Merge tracemonkey to mozilla-central.

This commit is contained in:
Robert Sayre 2010-10-18 12:01:28 -05:00
Родитель ca9ab173db e444f80e5c
Коммит aa68c211ca
60 изменённых файлов: 2955 добавлений и 8318 удалений

Просмотреть файл

@ -855,7 +855,7 @@ interface jsdIStackFrame : jsdIEphemeral
* Script object. In JavaScript engine terms, there's a single script for each
* function, and one for the top level script.
*/
[scriptable, uuid(18e09893-f461-4b4b-94d3-776fb0069c6f)]
[scriptable, uuid(53dadd96-69f6-4846-8958-cc8eaa3f9f09)]
interface jsdIScript : jsdIEphemeral
{
/** Internal use only. */

Просмотреть файл

@ -1,309 +0,0 @@
/* vim: set sw=4 ts=4 et tw=78: */
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is the Narcissus JavaScript engine.
*
* The Initial Developer of the Original Code is
* Brendan Eich <brendan@mozilla.org>.
* Portions created by the Initial Developer are Copyright (C) 2004
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
/*
* Narcissus - JS implemented in JS.
*
* Well-known constants and lookup tables. Many consts are generated from the
* tokens table via eval to minimize redundancy, so consumers must be compiled
* separately to take advantage of the simple switch-case constant propagation
* done by SpiderMonkey.
*/
(function() {
var builderTypes = Object.create(null, {
"default": { value: function() {
return new narcissus.parser.DefaultBuilder;
} },
"ssa": { value: function() {
return new narcissus.parser.SSABuilder;
} }
});
var builderType;
var narcissus = {
options: {
version: 185,
get builderType() { return builderType },
set builderType(type) {
var ctor = builderTypes[type];
if (!ctor)
throw new Error("expected builder type ('default' or 'ssa'), got " + type);
builderType = type;
narcissus.definitions.Builder = ctor;
}
},
hostGlobal: this
};
Narcissus = narcissus;
})();
Narcissus.definitions = (function() {
var tokens = [
// End of source.
"END",
// Operators and punctuators. Some pair-wise order matters, e.g. (+, -)
// and (UNARY_PLUS, UNARY_MINUS).
"\n", ";",
",",
"=",
"?", ":", "CONDITIONAL",
"||",
"&&",
"|",
"^",
"&",
"==", "!=", "===", "!==",
"<", "<=", ">=", ">",
"<<", ">>", ">>>",
"+", "-",
"*", "/", "%",
"!", "~", "UNARY_PLUS", "UNARY_MINUS",
"++", "--",
".",
"[", "]",
"{", "}",
"(", ")",
// Nonterminal tree node type codes.
"SCRIPT", "BLOCK", "LABEL", "FOR_IN", "CALL", "NEW_WITH_ARGS", "INDEX",
"ARRAY_INIT", "OBJECT_INIT", "PROPERTY_INIT", "GETTER", "SETTER",
"GROUP", "LIST", "LET_BLOCK", "ARRAY_COMP", "GENERATOR", "COMP_TAIL",
// Terminals.
"IDENTIFIER", "NUMBER", "STRING", "REGEXP",
// SSA fiction.
"PHI", "INTERVENED",
// Keywords.
"break",
"case", "catch", "const", "continue",
"debugger", "default", "delete", "do",
"else",
"false", "finally", "for", "function",
"if", "in", "instanceof",
"let",
"new", "null",
"return",
"switch",
"this", "throw", "true", "try", "typeof",
"var", "void",
"yield",
"while", "with",
];
// Operator and punctuator mapping from token to tree node type name.
// NB: because the lexer doesn't backtrack, all token prefixes must themselves
// be valid tokens (e.g. !== is acceptable because its prefixes are the valid
// tokens != and !).
var opTypeNames = {
'\n': "NEWLINE",
';': "SEMICOLON",
',': "COMMA",
'?': "HOOK",
':': "COLON",
'||': "OR",
'&&': "AND",
'|': "BITWISE_OR",
'^': "BITWISE_XOR",
'&': "BITWISE_AND",
'===': "STRICT_EQ",
'==': "EQ",
'=': "ASSIGN",
'!==': "STRICT_NE",
'!=': "NE",
'<<': "LSH",
'<=': "LE",
'<': "LT",
'>>>': "URSH",
'>>': "RSH",
'>=': "GE",
'>': "GT",
'++': "INCREMENT",
'--': "DECREMENT",
'+': "PLUS",
'-': "MINUS",
'*': "MUL",
'/': "DIV",
'%': "MOD",
'!': "NOT",
'~': "BITWISE_NOT",
'.': "DOT",
'[': "LEFT_BRACKET",
']': "RIGHT_BRACKET",
'{': "LEFT_CURLY",
'}': "RIGHT_CURLY",
'(': "LEFT_PAREN",
')': "RIGHT_PAREN"
};
// Hash of keyword identifier to tokens index. NB: we must null __proto__ to
// avoid toString, etc. namespace pollution.
var keywords = {__proto__: null};
// Define const END, etc., based on the token names. Also map name to index.
var tokenIds = {};
// Building up a string to be eval'd in different contexts.
var consts = "const ";
for (var i = 0, j = tokens.length; i < j; i++) {
if (i > 0)
consts += ", ";
var t = tokens[i];
var name;
if (/^[a-z]/.test(t)) {
name = t.toUpperCase();
keywords[t] = i;
} else {
name = (/^\W/.test(t) ? opTypeNames[t] : t);
}
consts += name + " = " + i;
tokenIds[name] = i;
tokens[t] = i;
}
consts += ";";
// Map assignment operators to their indexes in the tokens array.
var assignOps = ['|', '^', '&', '<<', '>>', '>>>', '+', '-', '*', '/', '%'];
for (i = 0, j = assignOps.length; i < j; i++) {
t = assignOps[i];
assignOps[t] = tokens[t];
}
function defineGetter(obj, prop, fn, dontDelete, dontEnum) {
Object.defineProperty(obj, prop, { get: fn, configurable: !dontDelete, enumerable: !dontEnum });
}
function defineProperty(obj, prop, val, dontDelete, readOnly, dontEnum) {
Object.defineProperty(obj, prop, { value: val, writable: !readOnly, configurable: !dontDelete, enumerable: !dontEnum });
}
// Returns true if fn is a native function. (Note: SpiderMonkey specific.)
function isNativeCode(fn) {
// Relies on the toString method to identify native code.
return ((typeof fn) === "function") && fn.toString().match(/\[native code\]/);
}
function getPropertyDescriptor(obj, name) {
while (obj) {
if (({}).hasOwnProperty.call(obj, name))
return Object.getOwnPropertyDescriptor(obj, name);
obj = Object.getPrototypeOf(obj);
}
}
function getOwnProperties(obj) {
var map = {};
for (var name in Object.getOwnPropertyNames(obj))
map[name] = Object.getOwnPropertyDescriptor(obj, name);
return map;
}
function makePassthruHandler(obj) {
// Handler copied from
// http://wiki.ecmascript.org/doku.php?id=harmony:proxies&s=proxy%20object#examplea_no-op_forwarding_proxy
return {
getOwnPropertyDescriptor: function(name) {
var desc = Object.getOwnPropertyDescriptor(obj, name);
// a trapping proxy's properties must always be configurable
desc.configurable = true;
return desc;
},
getPropertyDescriptor: function(name) {
var desc = getPropertyDescriptor(obj, name);
// a trapping proxy's properties must always be configurable
desc.configurable = true;
return desc;
},
getOwnPropertyNames: function() {
return Object.getOwnPropertyNames(obj);
},
defineProperty: function(name, desc) {
Object.defineProperty(obj, name, desc);
},
delete: function(name) { return delete obj[name]; },
fix: function() {
if (Object.isFrozen(obj)) {
return getOwnProperties(obj);
}
// As long as obj is not frozen, the proxy won't allow itself to be fixed.
return undefined; // will cause a TypeError to be thrown
},
has: function(name) { return name in obj; },
hasOwn: function(name) { return ({}).hasOwnProperty.call(obj, name); },
get: function(receiver, name) { return obj[name]; },
// bad behavior when set fails in non-strict mode
set: function(receiver, name, val) { obj[name] = val; return true; },
enumerate: function() {
var result = [];
for (name in obj) { result.push(name); };
return result;
},
keys: function() { return Object.keys(obj); }
};
}
return {
tokens: tokens,
opTypeNames: opTypeNames,
keywords: keywords,
tokenIds: tokenIds,
consts: consts,
assignOps: assignOps,
defineGetter: defineGetter,
defineProperty: defineProperty,
isNativeCode: isNativeCode,
makePassthruHandler: makePassthruHandler,
Builder: function() {
throw new Error("no Builder type selected");
}
};
}());
Narcissus.options.builderType = "default";

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,470 +0,0 @@
/* vim: set sw=4 ts=4 et tw=78: */
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is the Narcissus JavaScript engine.
*
* The Initial Developer of the Original Code is
* Brendan Eich <brendan@mozilla.org>.
* Portions created by the Initial Developer are Copyright (C) 2004
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
/*
* Narcissus - JS implemented in JS.
*
* Lexical scanner.
*/
Narcissus.lexer = (function() {
var definitions = Narcissus.definitions;
// Set constants in the local scope.
eval(definitions.consts);
// Build up a trie of operator tokens.
var opTokens = {};
for (var op in definitions.opTypeNames) {
if (op === '\n' || op === '.')
continue;
var node = opTokens;
for (var i = 0; i < op.length; i++) {
var ch = op[i];
if (!(ch in node))
node[ch] = {};
node = node[ch];
node.op = op;
}
}
/*
* Tokenizer :: (file ptr, path, line number) -> Tokenizer
*/
function Tokenizer(s, f, l) {
this.cursor = 0;
this.source = String(s);
this.tokens = [];
this.tokenIndex = 0;
this.lookahead = 0;
this.scanNewlines = false;
this.filename = f || "";
this.lineno = l || 1;
}
Tokenizer.prototype = {
get done() {
// We need to set scanOperand to true here because the first thing
// might be a regexp.
return this.peek(true) === END;
},
get token() {
return this.tokens[this.tokenIndex];
},
match: function (tt, scanOperand) {
return this.get(scanOperand) === tt || this.unget();
},
mustMatch: function (tt) {
if (!this.match(tt)) {
throw this.newSyntaxError("Missing " +
definitions.tokens[tt].toLowerCase());
}
return this.token;
},
peek: function (scanOperand) {
var tt, next;
if (this.lookahead) {
next = this.tokens[(this.tokenIndex + this.lookahead) & 3];
tt = (this.scanNewlines && next.lineno !== this.lineno)
? NEWLINE
: next.type;
} else {
tt = this.get(scanOperand);
this.unget();
}
return tt;
},
peekOnSameLine: function (scanOperand) {
this.scanNewlines = true;
var tt = this.peek(scanOperand);
this.scanNewlines = false;
return tt;
},
// Eats comments and whitespace.
skip: function () {
var input = this.source;
for (;;) {
var ch = input[this.cursor++];
var next = input[this.cursor];
if (ch === '\n' && !this.scanNewlines) {
this.lineno++;
} else if (ch === '/' && next === '*') {
this.cursor++;
for (;;) {
ch = input[this.cursor++];
if (ch === undefined)
throw this.newSyntaxError("Unterminated comment");
if (ch === '*') {
next = input[this.cursor];
if (next === '/') {
this.cursor++;
break;
}
} else if (ch === '\n') {
this.lineno++;
}
}
} else if (ch === '/' && next === '/') {
this.cursor++;
for (;;) {
ch = input[this.cursor++];
if (ch === undefined)
return;
if (ch === '\n') {
this.lineno++;
break;
}
}
} else if (ch !== ' ' && ch !== '\t') {
this.cursor--;
return;
}
}
},
// Lexes the exponential part of a number, if present. Returns true iff an
// exponential part was found.
lexExponent: function() {
var input = this.source;
var next = input[this.cursor];
if (next === 'e' || next === 'E') {
this.cursor++;
ch = input[this.cursor++];
if (ch === '+' || ch === '-')
ch = input[this.cursor++];
if (ch < '0' || ch > '9')
throw this.newSyntaxError("Missing exponent");
do {
ch = input[this.cursor++];
} while (ch >= '0' && ch <= '9');
this.cursor--;
return true;
}
return false;
},
lexZeroNumber: function (ch) {
var token = this.token, input = this.source;
token.type = NUMBER;
ch = input[this.cursor++];
if (ch === '.') {
do {
ch = input[this.cursor++];
} while (ch >= '0' && ch <= '9');
this.cursor--;
this.lexExponent();
token.value = parseFloat(token.start, this.cursor);
} else if (ch === 'x' || ch === 'X') {
do {
ch = input[this.cursor++];
} while ((ch >= '0' && ch <= '9') || (ch >= 'a' && ch <= 'f') ||
(ch >= 'A' && ch <= 'F'));
this.cursor--;
token.value = parseInt(input.substring(token.start, this.cursor));
} else if (ch >= '0' && ch <= '7') {
do {
ch = input[this.cursor++];
} while (ch >= '0' && ch <= '7');
this.cursor--;
token.value = parseInt(input.substring(token.start, this.cursor));
} else {
this.cursor--;
this.lexExponent(); // 0E1, &c.
token.value = 0;
}
},
lexNumber: function (ch) {
var token = this.token, input = this.source;
token.type = NUMBER;
var floating = false;
do {
ch = input[this.cursor++];
if (ch === '.' && !floating) {
floating = true;
ch = input[this.cursor++];
}
} while (ch >= '0' && ch <= '9');
this.cursor--;
var exponent = this.lexExponent();
floating = floating || exponent;
var str = input.substring(token.start, this.cursor);
token.value = floating ? parseFloat(str) : parseInt(str);
},
lexDot: function (ch) {
var token = this.token, input = this.source;
var next = input[this.cursor];
if (next >= '0' && next <= '9') {
do {
ch = input[this.cursor++];
} while (ch >= '0' && ch <= '9');
this.cursor--;
this.lexExponent();
token.type = NUMBER;
token.value = parseFloat(token.start, this.cursor);
} else {
token.type = DOT;
token.assignOp = null;
token.value = '.';
}
},
lexString: function (ch) {
var token = this.token, input = this.source;
token.type = STRING;
var hasEscapes = false;
var delim = ch;
ch = input[this.cursor++];
while (ch !== delim) {
if (ch === '\\') {
hasEscapes = true;
this.cursor++;
}
ch = input[this.cursor++];
}
token.value = (hasEscapes)
? eval(input.substring(token.start, this.cursor))
: input.substring(token.start + 1, this.cursor - 1);
},
lexRegExp: function (ch) {
var token = this.token, input = this.source;
token.type = REGEXP;
do {
ch = input[this.cursor++];
if (ch === '\\') {
this.cursor++;
} else if (ch === '[') {
do {
if (ch === undefined)
throw this.newSyntaxError("Unterminated character class");
if (ch === '\\')
this.cursor++;
ch = input[this.cursor++];
} while (ch !== ']');
} else if (ch === undefined) {
throw this.newSyntaxError("Unterminated regex");
}
} while (ch !== '/');
do {
ch = input[this.cursor++];
} while (ch >= 'a' && ch <= 'z');
this.cursor--;
token.value = eval(input.substring(token.start, this.cursor));
},
lexOp: function (ch) {
var token = this.token, input = this.source;
// A bit ugly, but it seems wasteful to write a trie lookup routine for
// only 3 characters...
var node = opTokens[ch];
var next = input[this.cursor];
if (next in node) {
node = node[next];
this.cursor++;
next = input[this.cursor];
if (next in node) {
node = node[next];
this.cursor++;
next = input[this.cursor];
}
}
var op = node.op;
if (definitions.assignOps[op] && input[this.cursor] === '=') {
this.cursor++;
token.type = ASSIGN;
token.assignOp = definitions.tokenIds[definitions.opTypeNames[op]];
op += '=';
} else {
token.type = definitions.tokenIds[definitions.opTypeNames[op]];
token.assignOp = null;
}
token.value = op;
},
// FIXME: Unicode escape sequences
// FIXME: Unicode identifiers
lexIdent: function (ch) {
var token = this.token, input = this.source;
do {
ch = input[this.cursor++];
} while ((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') ||
(ch >= '0' && ch <= '9') || ch === '$' || ch === '_');
this.cursor--; // Put the non-word character back.
var id = input.substring(token.start, this.cursor);
token.type = definitions.keywords[id] || IDENTIFIER;
token.value = id;
},
/*
* Tokenizer.get :: void -> token type
*
* Consumes input *only* if there is no lookahead.
* Dispatch to the appropriate lexing function depending on the input.
*/
get: function (scanOperand) {
var token;
while (this.lookahead) {
--this.lookahead;
this.tokenIndex = (this.tokenIndex + 1) & 3;
token = this.tokens[this.tokenIndex];
if (token.type !== NEWLINE || this.scanNewlines)
return token.type;
}
this.skip();
this.tokenIndex = (this.tokenIndex + 1) & 3;
token = this.tokens[this.tokenIndex];
if (!token)
this.tokens[this.tokenIndex] = token = {};
var input = this.source;
if (this.cursor === input.length)
return token.type = END;
token.start = this.cursor;
token.lineno = this.lineno;
var ch = input[this.cursor++];
if ((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') ||
ch === '$' || ch === '_') {
this.lexIdent(ch);
} else if (scanOperand && ch === '/') {
this.lexRegExp(ch);
} else if (ch in opTokens) {
this.lexOp(ch);
} else if (ch === '.') {
this.lexDot(ch);
} else if (ch >= '1' && ch <= '9') {
this.lexNumber(ch);
} else if (ch === '0') {
this.lexZeroNumber(ch);
} else if (ch === '"' || ch === "'") {
this.lexString(ch);
} else if (this.scanNewlines && ch === '\n') {
token.type = NEWLINE;
token.value = '\n';
this.lineno++;
} else {
throw this.newSyntaxError("Illegal token");
}
token.end = this.cursor;
return token.type;
},
/*
* Tokenizer.unget :: void -> undefined
*
* Match depends on unget returning undefined.
*/
unget: function () {
if (++this.lookahead === 4) throw "PANIC: too much lookahead!";
this.tokenIndex = (this.tokenIndex - 1) & 3;
},
newSyntaxError: function (m) {
var e = new SyntaxError(m, this.filename, this.lineno);
e.source = this.source;
e.cursor = this.cursor;
return e;
},
save: function () {
return {
cursor: this.cursor,
tokenIndex: this.tokenIndex,
tokens: this.tokens.slice(),
lookahead: this.lookahead,
scanNewlines: this.scanNewlines,
lineno: this.lineno
};
},
rewind: function(point) {
this.cursor = point.cursor;
this.tokenIndex = point.tokenIndex;
this.tokens = point.tokens.slice();
this.lookahead = point.lookahead;
this.scanNewline = point.scanNewline;
this.lineno = point.lineno;
}
};
return { Tokenizer: Tokenizer };
}());

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -416,7 +416,8 @@ void* ARMAssembler::executableCopy(ExecutablePool* allocator)
bkpt(0);
void * data = m_buffer.executableCopy(allocator);
fixUpOffsets(data);
if (data)
fixUpOffsets(data);
return data;
}
@ -426,6 +427,9 @@ void* ARMAssembler::executableCopy(ExecutablePool* allocator)
// have been flushed.
void* ARMAssembler::executableCopy(void * buffer)
{
if (m_buffer.oom())
return NULL;
ASSERT(m_buffer.sizeOfConstantPool() == 0);
memcpy(buffer, m_buffer.data(), m_buffer.size());

Просмотреть файл

@ -146,6 +146,7 @@ namespace JSC {
typedef SegmentedVector<int, 64> Jumps;
unsigned char *buffer() const { return m_buffer.buffer(); }
bool oom() const { return m_buffer.oom(); }
// ARM conditional constants
typedef enum {

Просмотреть файл

@ -1905,6 +1905,7 @@ private:
bool isAligned(int alignment) const { return m_buffer.isAligned(alignment); }
void* data() const { return m_buffer.data(); }
void* executableCopy(ExecutablePool* allocator) { return m_buffer.executableCopy(allocator); }
bool oom() const { return m_buffer.oom(); }
private:
AssemblerBuffer m_buffer;

Просмотреть файл

@ -446,6 +446,11 @@ public:
return m_assembler.buffer();
}
bool oom()
{
return m_assembler.oom();
}
void* executableCopy(void* buffer)
{
return m_assembler.executableCopy(buffer);

Просмотреть файл

@ -48,6 +48,7 @@ namespace JSC {
: m_buffer(m_inlineBuffer)
, m_capacity(inlineCapacity)
, m_size(0)
, m_oom(false)
{
}
@ -127,8 +128,20 @@ namespace JSC {
return m_size;
}
bool oom() const
{
return m_oom;
}
/*
* The user must check for a NULL return value, which means
* no code was generated, or there was an OOM.
*/
void* executableCopy(ExecutablePool* allocator)
{
if (m_oom)
return 0;
if (!m_size)
return 0;
@ -143,6 +156,7 @@ namespace JSC {
}
unsigned char *buffer() const {
ASSERT(!m_oom);
return reinterpret_cast<unsigned char *>(m_buffer);
}
@ -152,25 +166,59 @@ namespace JSC {
if (m_size > m_capacity - size)
grow(size);
// If we OOM and size > inlineCapacity, this would crash.
if (m_oom)
return;
memcpy(m_buffer + m_size, data, size);
m_size += size;
}
/*
* OOM handling: This class can OOM in the grow() method trying to
* allocate a new buffer. In response to an OOM, we need to avoid
* crashing and report the error. We also want to make it so that
* users of this class need to check for OOM only at certain points
* and not after every operation.
*
* Our strategy for handling an OOM is to set m_oom, and then set
* m_size to 0, preserving the current buffer. This way, the user
* can continue assembling into the buffer, deferring OOM checking
* until the user wants to read code out of the buffer.
*
* See also the |executableCopy| and |buffer| methods.
*/
void grow(int extraCapacity = 0)
{
m_capacity += m_capacity / 2 + extraCapacity;
int newCapacity = m_capacity + m_capacity / 2 + extraCapacity;
char* newBuffer;
if (m_buffer == m_inlineBuffer) {
char* newBuffer = static_cast<char*>(malloc(m_capacity));
m_buffer = static_cast<char*>(memcpy(newBuffer, m_buffer, m_size));
} else
m_buffer = static_cast<char*>(realloc(m_buffer, m_capacity));
newBuffer = static_cast<char*>(malloc(newCapacity));
if (!newBuffer) {
m_size = 0;
m_oom = true;
return;
}
memcpy(newBuffer, m_buffer, m_size);
} else {
newBuffer = static_cast<char*>(realloc(m_buffer, newCapacity));
if (!newBuffer) {
m_size = 0;
m_oom = true;
return;
}
}
m_buffer = newBuffer;
m_capacity = newCapacity;
}
char m_inlineBuffer[inlineCapacity];
char* m_buffer;
int m_capacity;
int m_size;
bool m_oom;
};
} // namespace JSC

Просмотреть файл

@ -363,6 +363,7 @@ public:
size_t size() const { return m_formatter.size(); }
unsigned char *buffer() const { return m_formatter.buffer(); }
bool oom() const { return m_formatter.oom(); }
// Stack operations:
@ -2222,12 +2223,13 @@ public:
void* executableCopy(ExecutablePool* allocator)
{
void* copy = m_formatter.executableCopy(allocator);
ASSERT(copy);
return copy;
}
void* executableCopy(void* buffer)
{
if (m_formatter.oom())
return NULL;
return memcpy(buffer, m_formatter.buffer(), size());
}
@ -2541,6 +2543,7 @@ private:
size_t size() const { return m_buffer.size(); }
unsigned char *buffer() const { return m_buffer.buffer(); }
bool oom() const { return m_buffer.oom(); }
bool isAligned(int alignment) const { return m_buffer.isAligned(alignment); }
void* data() const { return m_buffer.data(); }
void* executableCopy(ExecutablePool* allocator) { return m_buffer.executableCopy(allocator); }

Просмотреть файл

@ -55,7 +55,16 @@ extern "C" __declspec(dllimport) void CacheRangeFlush(LPVOID pAddr, DWORD dwLeng
#endif
#define JIT_ALLOCATOR_PAGE_SIZE (ExecutableAllocator::pageSize)
#define JIT_ALLOCATOR_LARGE_ALLOC_SIZE (ExecutableAllocator::pageSize * 4)
#if WTF_PLATFORM_WIN_OS || WTF_PLATFORM_WINCE
/*
* In practice, VirtualAlloc allocates in 64K chunks. (Technically, it
* allocates in page chunks, but the starting address is always a multiple
* of 64K, so each allocation uses up 64K of address space.
*/
# define JIT_ALLOCATOR_LARGE_ALLOC_SIZE (ExecutableAllocator::pageSize * 16)
#else
# define JIT_ALLOCATOR_LARGE_ALLOC_SIZE (ExecutableAllocator::pageSize * 4)
#endif
#if ENABLE_ASSEMBLER_WX_EXCLUSIVE
#define PROTECTION_FLAGS_RW (PROT_READ | PROT_WRITE)

Просмотреть файл

@ -2046,7 +2046,7 @@ JSContext::JSContext(JSRuntime *rt)
: runtime(rt),
compartment(rt->defaultCompartment),
regs(NULL),
busyArrays(this)
busyArrays(thisInInitializer())
{}
void

Просмотреть файл

@ -1125,8 +1125,7 @@ struct JSThreadData {
/*
* Flag indicating that we are waiving any soft limits on the GC heap
* because we want allocations to be infallible (except when we hit
* a hard quota).
* because we want allocations to be infallible (except when we hit OOM).
*/
bool waiveGCQuota;
@ -1355,22 +1354,21 @@ struct JSRuntime {
js::GCLocks gcLocksHash;
jsrefcount gcKeepAtoms;
size_t gcBytes;
size_t gcTriggerBytes;
size_t gcLastBytes;
size_t gcMaxBytes;
size_t gcMaxMallocBytes;
size_t gcNewArenaTriggerBytes;
uint32 gcEmptyArenaPoolLifespan;
uint32 gcNumber;
js::GCMarker *gcMarkingTracer;
uint32 gcTriggerFactor;
size_t gcTriggerBytes;
volatile JSBool gcIsNeeded;
/*
* NB: do not pack another flag here by claiming gcPadding unless the new
* flag is written only by the GC thread. Atomic updates to packed bytes
* are not guaranteed, so stores issued by one thread may be lost due to
* unsynchronized read-modify-write cycles on other threads.
* We can pack these flags as only the GC thread writes to them. Atomic
* updates to packed bytes are not guaranteed, so stores issued by one
* thread may be lost due to unsynchronized read-modify-write cycles on
* other threads.
*/
bool gcPoke;
bool gcMarkAndSweep;
@ -2385,6 +2383,9 @@ private:
* a boolean flag to minimize the amount of code in its inlined callers.
*/
JS_FRIEND_API(void) checkMallocGCPressure(void *p);
/* To silence MSVC warning about using 'this' in a member initializer. */
JSContext *thisInInitializer() { return this; }
};
#ifdef JS_THREADSAFE

Просмотреть файл

@ -49,6 +49,11 @@
#include "jsclist.h"
#include "jsxml.h"
#ifdef _MSC_VER
#pragma warning(push)
#pragma warning(disable:4251) /* Silence warning about JS_FRIEND_API and data members. */
#endif
struct JS_FRIEND_API(JSCompartment) {
JSRuntime *rt;
JSPrincipals *principals;
@ -99,6 +104,10 @@ struct JS_FRIEND_API(JSCompartment) {
bool arenaListsAreEmpty();
};
#ifdef _MSC_VER
#pragma warning(pop)
#endif
namespace js {
class PreserveCompartment {

Просмотреть файл

@ -2315,6 +2315,8 @@ ethogram_construct(JSContext *cx, uintN argc, jsval *vp)
EthogramEventBuffer *p;
p = (EthogramEventBuffer *) JS_malloc(cx, sizeof(EthogramEventBuffer));
if (!p)
return JS_FALSE;
p->mReadPos = p->mWritePos = 0;
p->mScripts = NULL;

Просмотреть файл

@ -328,201 +328,202 @@ js_dtobasestr(DtoaState *state, int base, double dinput)
dval(d) = dinput;
buffer = (char*) js_malloc(DTOBASESTR_BUFFER_SIZE);
if (buffer) {
p = buffer;
if (dval(d) < 0.0
if (!buffer)
return NULL;
p = buffer;
if (dval(d) < 0.0
#if defined(XP_WIN) || defined(XP_OS2)
&& !((word0(d) & Exp_mask) == Exp_mask && ((word0(d) & Frac_mask) || word1(d))) /* Visual C++ doesn't know how to compare against NaN */
&& !((word0(d) & Exp_mask) == Exp_mask && ((word0(d) & Frac_mask) || word1(d))) /* Visual C++ doesn't know how to compare against NaN */
#endif
) {
*p++ = '-';
dval(d) = -dval(d);
}
) {
*p++ = '-';
dval(d) = -dval(d);
}
/* Check for Infinity and NaN */
if ((word0(d) & Exp_mask) == Exp_mask) {
strcpy(p, !word1(d) && !(word0(d) & Frac_mask) ? "Infinity" : "NaN");
return buffer;
}
/* Check for Infinity and NaN */
if ((word0(d) & Exp_mask) == Exp_mask) {
strcpy(p, !word1(d) && !(word0(d) & Frac_mask) ? "Infinity" : "NaN");
return buffer;
}
/* Output the integer part of d with the digits in reverse order. */
pInt = p;
dval(di) = floor(dval(d));
if (dval(di) <= 4294967295.0) {
uint32 n = (uint32)dval(di);
if (n)
do {
uint32 m = n / base;
digit = n - m*base;
n = m;
JS_ASSERT(digit < (uint32)base);
*p++ = BASEDIGIT(digit);
} while (n);
else *p++ = '0';
} else {
int e;
int bits; /* Number of significant bits in di; not used. */
Bigint *b = d2b(PASS_STATE di, &e, &bits);
if (!b)
goto nomem1;
b = lshift(PASS_STATE b, e);
if (!b) {
nomem1:
Bfree(PASS_STATE b);
js_free(buffer);
return NULL;
}
/* Output the integer part of d with the digits in reverse order. */
pInt = p;
dval(di) = floor(dval(d));
if (dval(di) <= 4294967295.0) {
uint32 n = (uint32)dval(di);
if (n)
do {
digit = divrem(b, base);
uint32 m = n / base;
digit = n - m*base;
n = m;
JS_ASSERT(digit < (uint32)base);
*p++ = BASEDIGIT(digit);
} while (b->wds);
} while (n);
else *p++ = '0';
} else {
int e;
int bits; /* Number of significant bits in di; not used. */
Bigint *b = d2b(PASS_STATE di, &e, &bits);
if (!b)
goto nomem1;
b = lshift(PASS_STATE b, e);
if (!b) {
nomem1:
Bfree(PASS_STATE b);
js_free(buffer);
return NULL;
}
/* Reverse the digits of the integer part of d. */
q = p-1;
while (q > pInt) {
char ch = *pInt;
*pInt++ = *q;
*q-- = ch;
}
do {
digit = divrem(b, base);
JS_ASSERT(digit < (uint32)base);
*p++ = BASEDIGIT(digit);
} while (b->wds);
Bfree(PASS_STATE b);
}
/* Reverse the digits of the integer part of d. */
q = p-1;
while (q > pInt) {
char ch = *pInt;
*pInt++ = *q;
*q-- = ch;
}
dval(df) = dval(d) - dval(di);
if (dval(df) != 0.0) {
/* We have a fraction. */
int e, bbits;
int32 s2, done;
Bigint *b, *s, *mlo, *mhi;
dval(df) = dval(d) - dval(di);
if (dval(df) != 0.0) {
/* We have a fraction. */
int e, bbits;
int32 s2, done;
Bigint *b, *s, *mlo, *mhi;
b = s = mlo = mhi = NULL;
b = s = mlo = mhi = NULL;
*p++ = '.';
b = d2b(PASS_STATE df, &e, &bbits);
if (!b) {
nomem2:
Bfree(PASS_STATE b);
Bfree(PASS_STATE s);
if (mlo != mhi)
Bfree(PASS_STATE mlo);
Bfree(PASS_STATE mhi);
js_free(buffer);
return NULL;
}
JS_ASSERT(e < 0);
/* At this point df = b * 2^e. e must be less than zero because 0 < df < 1. */
s2 = -(int32)(word0(d) >> Exp_shift1 & Exp_mask>>Exp_shift1);
#ifndef Sudden_Underflow
if (!s2)
s2 = -1;
#endif
s2 += Bias + P;
/* 1/2^s2 = (nextDouble(d) - d)/2 */
JS_ASSERT(-s2 < e);
mlo = i2b(PASS_STATE 1);
if (!mlo)
goto nomem2;
mhi = mlo;
if (!word1(d) && !(word0(d) & Bndry_mask)
#ifndef Sudden_Underflow
&& word0(d) & (Exp_mask & Exp_mask << 1)
#endif
) {
/* The special case. Here we want to be within a quarter of the last input
significant digit instead of one half of it when the output string's value is less than d. */
s2 += Log2P;
mhi = i2b(PASS_STATE 1<<Log2P);
if (!mhi)
goto nomem2;
}
b = lshift(PASS_STATE b, e + s2);
if (!b)
goto nomem2;
s = i2b(PASS_STATE 1);
if (!s)
goto nomem2;
s = lshift(PASS_STATE s, s2);
if (!s)
goto nomem2;
/* At this point we have the following:
* s = 2^s2;
* 1 > df = b/2^s2 > 0;
* (d - prevDouble(d))/2 = mlo/2^s2;
* (nextDouble(d) - d)/2 = mhi/2^s2. */
done = JS_FALSE;
do {
int32 j, j1;
Bigint *delta;
b = multadd(PASS_STATE b, base, 0);
if (!b)
goto nomem2;
digit = quorem2(b, s2);
if (mlo == mhi) {
mlo = mhi = multadd(PASS_STATE mlo, base, 0);
if (!mhi)
goto nomem2;
}
else {
mlo = multadd(PASS_STATE mlo, base, 0);
if (!mlo)
goto nomem2;
mhi = multadd(PASS_STATE mhi, base, 0);
if (!mhi)
goto nomem2;
}
/* Do we yet have the shortest string that will round to d? */
j = cmp(b, mlo);
/* j is b/2^s2 compared with mlo/2^s2. */
delta = diff(PASS_STATE s, mhi);
if (!delta)
goto nomem2;
j1 = delta->sign ? 1 : cmp(b, delta);
Bfree(PASS_STATE delta);
/* j1 is b/2^s2 compared with 1 - mhi/2^s2. */
#ifndef ROUND_BIASED
if (j1 == 0 && !(word1(d) & 1)) {
if (j > 0)
digit++;
done = JS_TRUE;
} else
#endif
if (j < 0 || (j == 0
#ifndef ROUND_BIASED
&& !(word1(d) & 1)
#endif
)) {
if (j1 > 0) {
/* Either dig or dig+1 would work here as the least significant digit.
Use whichever would produce an output value closer to d. */
b = lshift(PASS_STATE b, 1);
if (!b)
goto nomem2;
j1 = cmp(b, s);
if (j1 > 0) /* The even test (|| (j1 == 0 && (digit & 1))) is not here because it messes up odd base output
* such as 3.5 in base 3. */
digit++;
}
done = JS_TRUE;
} else if (j1 > 0) {
digit++;
done = JS_TRUE;
}
JS_ASSERT(digit < (uint32)base);
*p++ = BASEDIGIT(digit);
} while (!done);
*p++ = '.';
b = d2b(PASS_STATE df, &e, &bbits);
if (!b) {
nomem2:
Bfree(PASS_STATE b);
Bfree(PASS_STATE s);
if (mlo != mhi)
Bfree(PASS_STATE mlo);
Bfree(PASS_STATE mhi);
js_free(buffer);
return NULL;
}
JS_ASSERT(p < buffer + DTOBASESTR_BUFFER_SIZE);
*p = '\0';
JS_ASSERT(e < 0);
/* At this point df = b * 2^e. e must be less than zero because 0 < df < 1. */
s2 = -(int32)(word0(d) >> Exp_shift1 & Exp_mask>>Exp_shift1);
#ifndef Sudden_Underflow
if (!s2)
s2 = -1;
#endif
s2 += Bias + P;
/* 1/2^s2 = (nextDouble(d) - d)/2 */
JS_ASSERT(-s2 < e);
mlo = i2b(PASS_STATE 1);
if (!mlo)
goto nomem2;
mhi = mlo;
if (!word1(d) && !(word0(d) & Bndry_mask)
#ifndef Sudden_Underflow
&& word0(d) & (Exp_mask & Exp_mask << 1)
#endif
) {
/* The special case. Here we want to be within a quarter of the last input
significant digit instead of one half of it when the output string's value is less than d. */
s2 += Log2P;
mhi = i2b(PASS_STATE 1<<Log2P);
if (!mhi)
goto nomem2;
}
b = lshift(PASS_STATE b, e + s2);
if (!b)
goto nomem2;
s = i2b(PASS_STATE 1);
if (!s)
goto nomem2;
s = lshift(PASS_STATE s, s2);
if (!s)
goto nomem2;
/* At this point we have the following:
* s = 2^s2;
* 1 > df = b/2^s2 > 0;
* (d - prevDouble(d))/2 = mlo/2^s2;
* (nextDouble(d) - d)/2 = mhi/2^s2. */
done = JS_FALSE;
do {
int32 j, j1;
Bigint *delta;
b = multadd(PASS_STATE b, base, 0);
if (!b)
goto nomem2;
digit = quorem2(b, s2);
if (mlo == mhi) {
mlo = mhi = multadd(PASS_STATE mlo, base, 0);
if (!mhi)
goto nomem2;
}
else {
mlo = multadd(PASS_STATE mlo, base, 0);
if (!mlo)
goto nomem2;
mhi = multadd(PASS_STATE mhi, base, 0);
if (!mhi)
goto nomem2;
}
/* Do we yet have the shortest string that will round to d? */
j = cmp(b, mlo);
/* j is b/2^s2 compared with mlo/2^s2. */
delta = diff(PASS_STATE s, mhi);
if (!delta)
goto nomem2;
j1 = delta->sign ? 1 : cmp(b, delta);
Bfree(PASS_STATE delta);
/* j1 is b/2^s2 compared with 1 - mhi/2^s2. */
#ifndef ROUND_BIASED
if (j1 == 0 && !(word1(d) & 1)) {
if (j > 0)
digit++;
done = JS_TRUE;
} else
#endif
if (j < 0 || (j == 0
#ifndef ROUND_BIASED
&& !(word1(d) & 1)
#endif
)) {
if (j1 > 0) {
/* Either dig or dig+1 would work here as the least significant digit.
Use whichever would produce an output value closer to d. */
b = lshift(PASS_STATE b, 1);
if (!b)
goto nomem2;
j1 = cmp(b, s);
if (j1 > 0) /* The even test (|| (j1 == 0 && (digit & 1))) is not here because it messes up odd base output
* such as 3.5 in base 3. */
digit++;
}
done = JS_TRUE;
} else if (j1 > 0) {
digit++;
done = JS_TRUE;
}
JS_ASSERT(digit < (uint32)base);
*p++ = BASEDIGIT(digit);
} while (!done);
Bfree(PASS_STATE b);
Bfree(PASS_STATE s);
if (mlo != mhi)
Bfree(PASS_STATE mlo);
Bfree(PASS_STATE mhi);
}
JS_ASSERT(p < buffer + DTOBASESTR_BUFFER_SIZE);
*p = '\0';
return buffer;
}

Просмотреть файл

@ -319,13 +319,16 @@ template <typename T>
Arena<T> *
Chunk::allocateArena(JSCompartment *comp, unsigned thingKind)
{
JSRuntime *rt = info.runtime;
JS_ASSERT(hasAvailableArenas());
Arena<T> *arena = info.emptyArenaLists.getNext<T>(comp, thingKind);
JS_ASSERT(arena);
JS_ASSERT(arena->header()->isUsed);
--info.numFree;
JSRuntime *rt = info.runtime;
rt->gcBytes += sizeof(Arena<T>);
if (rt->gcBytes >= rt->gcTriggerBytes)
TriggerGC(rt);
METER(rt->gcStats.nallarenas++);
return arena;
}
@ -413,23 +416,9 @@ ReleaseGCChunk(JSRuntime *rt, Chunk *p)
}
static Chunk *
PickChunk(JSContext *cx)
PickChunk(JSRuntime *rt)
{
JSRuntime *rt = cx->runtime;
Chunk *chunk;
if (!JS_THREAD_DATA(cx)->waiveGCQuota &&
(rt->gcBytes >= rt->gcMaxBytes ||
rt->gcBytes > GC_HEAP_GROWTH_FACTOR * rt->gcNewArenaTriggerBytes)) {
/*
* FIXME bug 524051 We cannot run a last-ditch GC on trace for now, so
* just pretend we are out of memory which will throw us off trace and
* we will re-try this code path from the interpreter.
*/
if (!JS_ON_TRACE(cx))
return NULL;
TriggerGC(cx->runtime);
}
for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) {
if (r.front()->hasAvailableArenas())
return r.front();
@ -477,22 +466,17 @@ static Arena<T> *
AllocateArena(JSContext *cx, unsigned thingKind)
{
JSRuntime *rt = cx->runtime;
Chunk *chunk;
Arena<T> *arena;
{
AutoLockGC lock(rt);
if (cx->compartment->chunk && cx->compartment->chunk->hasAvailableArenas()) {
chunk = cx->compartment->chunk;
} else {
if (!(chunk = PickChunk(cx))) {
return NULL;
} else {
cx->compartment->chunk = chunk;
}
AutoLockGC lock(rt);
Chunk *chunk = cx->compartment->chunk;
if (!chunk || !chunk->hasAvailableArenas()) {
chunk = PickChunk(rt);
if (!chunk) {
TriggerGC(rt);
return NULL;
}
arena = chunk->allocateArena<T>(cx->compartment, thingKind);
cx->compartment->chunk = chunk;
}
return arena;
return chunk->allocateArena<T>(cx->compartment, thingKind);
}
JS_FRIEND_API(bool)
@ -551,18 +535,13 @@ js_InitGC(JSRuntime *rt, uint32 maxbytes)
rt->gcEmptyArenaPoolLifespan = 30000;
/*
* By default the trigger factor gets maximum possible value. This
* means that GC will not be triggered by growth of GC memory (gcBytes).
*/
rt->setGCTriggerFactor((uint32) -1);
rt->gcTriggerFactor = uint32(100.0f * GC_HEAP_GROWTH_FACTOR);
/*
* The assigned value prevents GC from running when GC memory is too low
* (during JS engine start).
*/
rt->setGCLastBytes(8192);
rt->gcNewArenaTriggerBytes = GC_ARENA_ALLOCATION_TRIGGER;
METER(PodZero(&rt->gcStats));
return true;
@ -705,7 +684,7 @@ MarkWordConservatively(JSTracer *trc, jsuword w)
uint32 traceKind;
#if defined JS_DUMP_CONSERVATIVE_GC_ROOTS || defined JS_GCMETER
ConservativeGCTest test =
ConservativeGCTest test =
#endif
MarkIfGCThingWord(trc, w, traceKind);
@ -1016,10 +995,13 @@ void
JSRuntime::setGCLastBytes(size_t lastBytes)
{
gcLastBytes = lastBytes;
uint64 triggerBytes = uint64(lastBytes) * uint64(gcTriggerFactor / 100);
if (triggerBytes != size_t(triggerBytes))
triggerBytes = size_t(-1);
gcTriggerBytes = size_t(triggerBytes);
/* FIXME bug 603916 - we should unify the triggers here. */
float trigger1 = float(lastBytes) * float(gcTriggerFactor) / 100.0f;
float trigger2 = float(Max(lastBytes, GC_ARENA_ALLOCATION_TRIGGER)) *
GC_HEAP_GROWTH_FACTOR;
float maxtriger = Max(trigger1, trigger2);
gcTriggerBytes = (float(gcMaxBytes) < maxtriger) ? gcMaxBytes : size_t(maxtriger);
}
void
@ -1033,22 +1015,6 @@ FreeLists::purge()
*p = NULL;
}
static inline bool
IsGCThresholdReached(JSRuntime *rt)
{
#ifdef JS_GC_ZEAL
if (rt->gcZeal >= 1)
return true;
#endif
/*
* Since the initial value of the gcLastBytes parameter is not equal to
* zero (see the js_InitGC function) the return value is false when
* the gcBytes value is close to zero at the JS engine start.
*/
return rt->isGCMallocLimitReached() || rt->gcBytes >= rt->gcTriggerBytes;
}
struct JSShortString;
ArenaList *
@ -1069,6 +1035,38 @@ CheckAllocation(JSContext *cx)
}
#endif
inline bool
NeedLastDitchGC(JSContext *cx)
{
JSRuntime *rt = cx->runtime;
#ifdef JS_GC_ZEAL
if (rt->gcZeal >= 1)
return true;
#endif
return !!rt->gcIsNeeded;
}
/*
* Return false only if the GC run but could not bring its memory usage under
* JSRuntime::gcMaxBytes.
*/
static bool
RunLastDitchGC(JSContext *cx)
{
JSRuntime *rt = cx->runtime;
METER(rt->gcStats.lastditch++);
#ifdef JS_THREADSAFE
Conditionally<AutoUnlockDefaultCompartment>
unlockDefaultCompartmenIf(cx->compartment == rt->defaultCompartment &&
rt->defaultCompartmentIsLocked, cx);
#endif
/* The last ditch GC preserves all atoms. */
AutoKeepAtoms keep(rt);
js_GC(cx, GC_NORMAL);
return rt->gcBytes < rt->gcMaxBytes;
}
template <typename T>
inline bool
RefillTypedFreeList(JSContext *cx, unsigned thingKind)
@ -1076,31 +1074,17 @@ RefillTypedFreeList(JSContext *cx, unsigned thingKind)
JSCompartment *compartment = cx->compartment;
JS_ASSERT_IF(compartment->freeLists.finalizables[thingKind],
!*compartment->freeLists.finalizables[thingKind]);
JSRuntime *rt = cx->runtime;
ArenaList *arenaList;
Arena<T> *a;
JS_ASSERT(!rt->gcRunning);
if (rt->gcRunning)
JS_ASSERT(!cx->runtime->gcRunning);
if (cx->runtime->gcRunning)
return false;
bool canGC = !JS_ON_TRACE(cx) && !JS_THREAD_DATA(cx)->waiveGCQuota;
bool doGC = canGC && IsGCThresholdReached(rt);
arenaList = GetFinalizableArenaList(cx->compartment, thingKind);
do {
if (doGC) {
JS_ASSERT(!JS_ON_TRACE(cx));
#ifdef JS_THREADSAFE
Conditionally<AutoUnlockDefaultCompartment> unlockDefaultCompartmentIf(cx->compartment == cx->runtime->defaultCompartment &&
cx->runtime->defaultCompartmentIsLocked, cx);
#endif
/* The last ditch GC preserves all atoms. */
AutoKeepAtoms keep(cx->runtime);
js_GC(cx, GC_NORMAL);
METER(cx->runtime->gcStats.retry++);
canGC = false;
if (canGC && JS_UNLIKELY(NeedLastDitchGC(cx))) {
if (!RunLastDitchGC(cx))
break;
/*
* The JSGC_END callback can legitimately allocate new GC
* things and populate the free list. If that happens, just
@ -1108,13 +1092,22 @@ RefillTypedFreeList(JSContext *cx, unsigned thingKind)
*/
if (compartment->freeLists.finalizables[thingKind])
return true;
canGC = false;
}
if ((a = (Arena<T> *) arenaList->getNextWithFreeList())) {
ArenaList *arenaList = GetFinalizableArenaList(compartment, thingKind);
Arena<T> *a = reinterpret_cast<Arena<T> *>(arenaList->getNextWithFreeList());
if (a) {
JS_ASSERT(a->header()->freeList);
JS_ASSERT(sizeof(T) == a->header()->thingSize);
compartment->freeLists.populate(a, thingKind);
return true;
}
/*
* If the allocation fails rt->gcIsNeeded will be set and we will run
* the GC on the next loop iteration if the last ditch GC is allowed.
*/
a = AllocateArena<T>(cx, thingKind);
if (a) {
compartment->freeLists.populate(a, thingKind);
@ -1122,13 +1115,11 @@ RefillTypedFreeList(JSContext *cx, unsigned thingKind)
a->getMarkingDelay()->init();
return true;
}
if (!canGC) {
METER(cx->runtime->gcStats.fail++);
js_ReportOutOfMemory(cx);
return false;
}
doGC = true;
} while (true);
} while (canGC);
METER(cx->runtime->gcStats.fail++);
js_ReportOutOfMemory(cx);
return false;
}
bool
@ -1253,8 +1244,8 @@ namespace js {
*
* To implement such delayed marking of the children with minimal overhead for
* the normal case of sufficient native stack, the code adds a field per
* arena. The field marlingdelay->link links all arenas with delayed things
* into a stack list with the pointer to stack top in
* arena. The field marlingdelay->link links all arenas with delayed things
* into a stack list with the pointer to stack top in
* GCMarker::unmarkedArenaStackTop. delayMarkingChildren adds
* arenas to the stack as necessary while markDelayedChildren pops the arenas
* from the stack until it empties.
@ -2267,10 +2258,6 @@ MarkAndSweep(JSContext *cx, JSGCInvocationKind gckind GCTIMER_PARAM)
FinalizeArenaList<JSString>(*comp, cx, i);
}
rt->gcNewArenaTriggerBytes = rt->gcBytes < GC_ARENA_ALLOCATION_TRIGGER ?
GC_ARENA_ALLOCATION_TRIGGER :
rt->gcBytes;
TIMESTAMP(sweepStringEnd);
SweepCompartments(cx, gckind);
@ -2558,10 +2545,6 @@ GCUntilDone(JSContext *cx, JSGCInvocationKind gckind GCTIMER_PARAM)
rt->setGCLastBytes(rt->gcBytes);
}
/*
* The gckind flag bit GC_LOCK_HELD indicates a call from js_NewGCThing with
* rt->gcLock already held, so the lock should be kept on return.
*/
void
js_GC(JSContext *cx, JSGCInvocationKind gckind)
{
@ -2593,24 +2576,20 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind)
* on another thread.
*/
if (JSGCCallback callback = rt->gcCallback) {
Conditionally<AutoUnlockGC> unlockIf(!!(gckind & GC_LOCK_HELD), rt);
if (!callback(cx, JSGC_BEGIN) && gckind != GC_LAST_CONTEXT)
return;
}
{
/* Lock out other GC allocator and collector invocations. */
Conditionally<AutoLockGC> lockIf(!(gckind & GC_LOCK_HELD), rt);
AutoLockGC lock(rt);
GCUntilDone(cx, gckind GCTIMER_ARG);
}
/* We re-sample the callback again as the finalizers can change it. */
if (JSGCCallback callback = rt->gcCallback) {
Conditionally<AutoUnlockGC> unlockIf(gckind & GC_LOCK_HELD, rt);
if (JSGCCallback callback = rt->gcCallback)
(void) callback(cx, JSGC_END);
}
/*
* On shutdown, iterate until the JSGC_END callback stops creating
@ -2710,7 +2689,7 @@ TraceRuntime(JSTracer *trc)
JSContext *cx = trc->context;
JSRuntime *rt = cx->runtime;
AutoLockGC lock(rt);
if (rt->gcThread != cx->thread) {
AutoGCSession gcsession(cx);
AutoUnlockGC unlock(rt);

Просмотреть файл

@ -481,10 +481,11 @@ const size_t GC_ARENA_ALLOCATION_TRIGGER = 30 * js::GC_CHUNK_SIZE;
/*
* A GC is triggered once the number of newly allocated arenas
* is 1.5 times the number of live arenas after the last GC.
* (Starting after the lower limit of GC_ARENA_ALLOCATION_TRIGGER)
* is GC_HEAP_GROWTH_FACTOR times the number of live arenas after
* the last GC starting after the lower limit of
* GC_ARENA_ALLOCATION_TRIGGER.
*/
const float GC_HEAP_GROWTH_FACTOR = 3;
const float GC_HEAP_GROWTH_FACTOR = 3.0f;
static inline size_t
GetFinalizableTraceKind(size_t thingKind)
@ -827,12 +828,7 @@ typedef enum JSGCInvocationKind {
* Called from js_DestroyContext for last JSContext in a JSRuntime, when
* it is imperative that rt->gcPoke gets cleared early in js_GC.
*/
GC_LAST_CONTEXT = 1,
/*
* Flag bit telling js_GC that the caller has already acquired rt->gcLock.
*/
GC_LOCK_HELD = 0x10
GC_LAST_CONTEXT = 1
} JSGCInvocationKind;
extern void

Просмотреть файл

@ -279,7 +279,7 @@ js_DumpGCStats(JSRuntime *rt, FILE *fp)
DumpArenaStats(&rt->globalArenaStats[0], fp);
fprintf(fp, " bytes allocated: %lu\n", UL(rt->gcBytes));
fprintf(fp, " allocation failures: %lu\n", ULSTAT(fail));
fprintf(fp, "allocation retries after GC: %lu\n", ULSTAT(retry));
fprintf(fp, " last ditch GC runs: %lu\n", ULSTAT(lastditch));
fprintf(fp, " valid lock calls: %lu\n", ULSTAT(lock));
fprintf(fp, " valid unlock calls: %lu\n", ULSTAT(unlock));
fprintf(fp, " delayed tracing calls: %lu\n", ULSTAT(unmarked));

Просмотреть файл

@ -107,7 +107,7 @@ struct JSGCStats {
uint32 unlock; /* valid unlock calls */
uint32 unmarked; /* number of times marking of GC thing's children were
delayed due to a low C stack */
uint32 retry; /* allocation retries after running the GC */
uint32 lastditch; /* number of times the last ditch GC run */
uint32 fail; /* allocation failures */
#ifdef DEBUG
uint32 maxunmarked;/* maximum number of things with children to mark

Просмотреть файл

@ -1657,6 +1657,8 @@ js_DumpOpMeters()
# define SIGNIFICANT(count,total) (200. * (count) >= (total))
graph = (Edge *) js_calloc(nedges * sizeof graph[0]);
if (!graph)
return;
for (i = nedges = 0; i < JSOP_LIMIT; i++) {
from = js_CodeName[i];
for (j = 0; j < JSOP_LIMIT; j++) {
@ -4287,8 +4289,6 @@ BEGIN_CASE(JSOP_SETMETHOD)
JSObject *obj2;
JSAtom *atom;
if (cache->testForSet(cx, regs.pc, obj, &entry, &obj2, &atom)) {
JS_ASSERT(obj->isExtensible());
/*
* Fast property cache hit, only partially confirmed by
* testForSet. We know that the entry applies to regs.pc and
@ -4328,6 +4328,8 @@ BEGIN_CASE(JSOP_SETMETHOD)
break;
}
} else {
JS_ASSERT(obj->isExtensible());
if (obj->nativeEmpty()) {
/*
* We check that cx owns obj here and will continue to own

Просмотреть файл

@ -1665,10 +1665,6 @@ js_CheckPrincipalsAccess(JSContext *cx, JSObject *scopeobj,
extern JSBool
js_CheckContentSecurityPolicy(JSContext *cx);
/* Infallible -- returns its argument if there is no wrapped object. */
extern JSObject *
js_GetWrappedObject(JSContext *cx, JSObject *obj);
/* NB: Infallible. */
extern const char *
js_ComputeFilename(JSContext *cx, JSStackFrame *caller,

Просмотреть файл

@ -828,7 +828,7 @@ Compiler::compileScript(JSContext *cx, JSObject *scopeChain, JSStackFrame *calle
if (source) {
/*
* Save eval program source in script->atomMap.vector[0] for the
* eval cache (see obj_eval in jsobj.cpp).
* eval cache (see EvalCacheLookup in jsobj.cpp).
*/
JSAtom *atom = js_AtomizeString(cx, source, 0);
if (!atom || !cg.atomList.add(&parser, atom))

Просмотреть файл

@ -208,8 +208,6 @@
#define SHAPE_INVALID_SLOT 0xffffffff
JS_STATIC_ASSERT(uint32(SHAPE_INVALID_SLOT + 1) == uint32(0));
namespace js {
/*
@ -376,9 +374,9 @@ struct Shape : public JSObjectMap
*
* Any child shape, whether in a shape tree or in a dictionary list, must
* have a slotSpan either one greater than its slot value (if the child's
* slot is SHAPE_INVALID_SLOT, this will yield 0; the static assertion just
* after the SHAPE_INVALID_SLOT definition enforces this), or equal to its
* parent p's slotSpan, whichever is greater. This is the inductive step.
* slot is SHAPE_INVALID_SLOT, this will yield 0; the static assertion
* below enforces this), or equal to its parent p's slotSpan, whichever is
* greater. This is the inductive step.
*
* If we maintained shape paths such that parent slot was always one less
* than child slot, possibly with an exception for SHAPE_INVALID_SLOT slot
@ -408,6 +406,7 @@ struct Shape : public JSObjectMap
* with an auxiliary mechanism based on table.
*/
void setParent(js::Shape *p) {
JS_STATIC_ASSERT(uint32(SHAPE_INVALID_SLOT) == ~uint32(0));
if (p)
slotSpan = JS_MAX(p->slotSpan, slot + 1);
JS_ASSERT(slotSpan < JSObject::NSLOTS_LIMIT);

Просмотреть файл

@ -974,15 +974,22 @@ JSScript::NewScript(JSContext *cx, uint32 length, uint32 nsrcnotes, uint32 natom
cursor += sizeof(JSTryNoteArray);
}
if (nglobals != 0) {
JS_ASSERT((cursor - (uint8*)script) <= 0xFF);
script->globalsOffset = (uint8)(cursor - (uint8 *)script);
cursor += sizeof(GlobalSlotArray);
}
JS_ASSERT((cursor - (uint8 *)script) <= 0xFF);
if (nconsts != 0) {
script->constOffset = (uint8)(cursor - (uint8 *)script);
cursor += sizeof(JSConstArray);
}
JS_STATIC_ASSERT(sizeof(JSScript) +
sizeof(JSObjectArray) +
sizeof(JSUpvarArray) +
sizeof(JSObjectArray) +
sizeof(JSTryNoteArray) +
sizeof(GlobalSlotArray) <= 0xFF);
if (natoms != 0) {
script->atomMap.length = natoms;
script->atomMap.vector = (JSAtom **)cursor;

Просмотреть файл

@ -404,20 +404,6 @@ js_ConcatStrings(JSContext *cx, JSString *left, JSString *right)
return FinishConcat(cx, leftRopeTop, rightRopeTop, left, right, length, buf);
}
JSString * JS_FASTCALL
js_ConcatStringsZ(JSContext *cx, const char *left, JSString *right)
{
const size_t leftLength = strlen(left);
const size_t newLength = leftLength + right->length();
const size_t newSize = (newLength + 1) * sizeof(jschar);
jschar *chars = static_cast<jschar *>(cx->malloc(newSize));
for (size_t i = 0; i < leftLength; ++i)
chars[i] = left[i];
js_strncpy(chars + leftLength, right->chars(), right->length());
JSString *str = js_NewString(cx, chars, newLength);
return str;
}
const jschar *
JSString::undepend(JSContext *cx)
{

Просмотреть файл

@ -76,9 +76,6 @@ js_GetDependentStringChars(JSString *str);
extern JSString * JS_FASTCALL
js_ConcatStrings(JSContext *cx, JSString *left, JSString *right);
extern JSString * JS_FASTCALL
js_ConcatStringsZ(JSContext *cx, const char *left, JSString *right);
JS_STATIC_ASSERT(JS_BITS_PER_WORD >= 32);
struct JSRopeBufferInfo {

Просмотреть файл

@ -4594,10 +4594,12 @@ TraceRecorder::compile()
/* Associate a filename and line number with the fragment. */
const char* filename = cx->fp()->script()->filename;
char* label = (char*)js_malloc((filename ? strlen(filename) : 7) + 16);
sprintf(label, "%s:%u", filename ? filename : "<stdin>",
js_FramePCToLineNumber(cx, cx->fp()));
lirbuf->printer->addrNameMap->addAddrRange(fragment, sizeof(Fragment), 0, label);
js_free(label);
if (label) {
sprintf(label, "%s:%u", filename ? filename : "<stdin>",
js_FramePCToLineNumber(cx, cx->fp()));
lirbuf->printer->addrNameMap->addAddrRange(fragment, sizeof(Fragment), 0, label);
js_free(label);
}
#endif
Assembler *assm = traceMonitor->assembler;
@ -8751,9 +8753,9 @@ TraceRecorder::tableswitch()
high = GET_JUMP_OFFSET(pc);
} else {
pc += JUMPX_OFFSET_LEN;
low = GET_JUMPX_OFFSET(pc);
pc += JUMPX_OFFSET_LEN;
high = GET_JUMPX_OFFSET(pc);
low = GET_JUMP_OFFSET(pc);
pc += JUMP_OFFSET_LEN;
high = GET_JUMP_OFFSET(pc);
}
/*
@ -8762,6 +8764,7 @@ TraceRecorder::tableswitch()
* action to handle it.
*/
int count = high + 1 - low;
JS_ASSERT(count >= 0);
if (count == 0)
return ARECORD_CONTINUE;
@ -10269,8 +10272,9 @@ TraceRecorder::clearCurrentFrameSlotsFromTracker(Tracker& which)
which.set(vp, (LIns*)0);
}
struct BoxArg
class BoxArg
{
public:
BoxArg(TraceRecorder *tr, ptrdiff_t offset, LIns *base_ins)
: tr(tr), offset(offset), base_ins(base_ins) {}
TraceRecorder *tr;

Просмотреть файл

@ -927,7 +927,8 @@ class TypedArrayTemplate
return false;
}
tarray->copyFrom(src, offset);
if (!tarray->copyFrom(cx, src, offset))
return false;
} else if (arg0->wrappedObject(cx)->isArray()) {
jsuint len;
if (!js_GetLengthProperty(cx, arg0, &len))
@ -1007,7 +1008,8 @@ class TypedArrayTemplate
if (!createBufferWithSizeAndCount(cx, sizeof(NativeType), tarray->length))
return false;
copyFrom(tarray);
if (!copyFrom(cx, tarray))
return false;
} else if (other->getClass() == &ArrayBuffer::jsclass) {
ArrayBuffer *abuf = ArrayBuffer::fromJSObject(other);
@ -1154,21 +1156,19 @@ class TypedArrayTemplate
return true;
}
void
copyFrom(TypedArray *tarray, jsuint offset = 0)
bool
copyFrom(JSContext *cx, TypedArray *tarray, jsuint offset = 0)
{
JS_ASSERT(offset <= length);
JS_ASSERT(tarray->length <= length - offset);
if (tarray->buffer == buffer) {
copyFromWithOverlap(tarray, offset);
return;
}
if (tarray->buffer == buffer)
return copyFromWithOverlap(cx, tarray, offset);
NativeType *dest = static_cast<NativeType*>(data) + offset;
if (tarray->type == type) {
memcpy(dest, tarray->data, tarray->byteLength);
return;
return true;
}
uintN srclen = tarray->length;
@ -1226,10 +1226,12 @@ class TypedArrayTemplate
JS_NOT_REACHED("copyFrom with a TypedArray of unknown type");
break;
}
return true;
}
void
copyFromWithOverlap(TypedArray *tarray, jsuint offset = 0)
bool
copyFromWithOverlap(JSContext *cx, TypedArray *tarray, jsuint offset = 0)
{
JS_ASSERT(offset < length);
@ -1237,12 +1239,16 @@ class TypedArrayTemplate
if (tarray->type == type) {
memmove(dest, tarray->data, tarray->byteLength);
return;
return true;
}
// We have to make a copy of the source array here, since
// there's overlap, and we have to convert types.
void *srcbuf = js_malloc(tarray->byteLength);
if (!srcbuf) {
js_ReportOutOfMemory(cx);
return false;
}
memcpy(srcbuf, tarray->data, tarray->byteLength);
switch (tarray->type) {
@ -1301,6 +1307,7 @@ class TypedArrayTemplate
}
js_free(srcbuf);
return true;
}
bool

Просмотреть файл

@ -595,9 +595,9 @@ JSVAL_IS_STRING_IMPL(jsval_layout l)
static JS_ALWAYS_INLINE jsval_layout
STRING_TO_JSVAL_IMPL(JSString *str)
{
JS_ASSERT(str);
jsval_layout l;
uint64 strBits = (uint64)str;
JS_ASSERT(str);
JS_ASSERT((strBits >> JSVAL_TAG_SHIFT) == 0);
l.asBits = strBits | JSVAL_SHIFTED_TAG_STRING;
return l;

Просмотреть файл

@ -46,7 +46,7 @@
#elif defined JS_PUNBOX64
# include "PunboxAssembler.h"
#else
# error "Neither JS_NUNBOX32 nor JS_PUNBOX32 is defined."
# error "Neither JS_NUNBOX32 nor JS_PUNBOX64 is defined."
#endif
/* Get a label for assertion purposes. Prevent #ifdef clutter. */

Просмотреть файл

@ -67,6 +67,14 @@ using namespace js::mjit::ic;
#define ADD_CALLSITE(stub) if (debugMode) addCallSite(__LINE__, (stub))
#define RETURN_IF_OOM(retval) \
JS_BEGIN_MACRO \
if (masm.oom() || stubcc.masm.oom()) { \
js_ReportOutOfMemory(cx); \
return retval; \
} \
JS_END_MACRO
#if defined(JS_METHODJIT_SPEW)
static const char *OpcodeNames[] = {
# define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) #name,
@ -319,6 +327,8 @@ mjit::Compiler::generateEpilogue()
CompileStatus
mjit::Compiler::finishThisUp(JITScript **jitp)
{
RETURN_IF_OOM(Compile_Error);
for (size_t i = 0; i < branchPatches.length(); i++) {
Label label = labelOf(branchPatches[i].pc);
branchPatches[i].jump.linkTo(label, &masm);
@ -342,7 +352,7 @@ mjit::Compiler::finishThisUp(JITScript **jitp)
JSC::ExecutableAllocator::makeWritable(result, totalSize);
masm.executableCopy(result);
stubcc.masm.executableCopy(result + masm.size());
JSC::LinkBuffer fullCode(result, totalSize);
JSC::LinkBuffer stubCode(result + masm.size(), stubcc.size());
@ -955,7 +965,8 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_VOID)
BEGIN_CASE(JSOP_INCNAME)
jsop_nameinc(op, STRICT_VARIANT(stubs::IncName), fullAtomIndex(PC));
if (!jsop_nameinc(op, STRICT_VARIANT(stubs::IncName), fullAtomIndex(PC)))
return Compile_Error;
break;
END_CASE(JSOP_INCNAME)
@ -965,7 +976,8 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_INCGNAME)
BEGIN_CASE(JSOP_INCPROP)
jsop_propinc(op, STRICT_VARIANT(stubs::IncProp), fullAtomIndex(PC));
if (!jsop_propinc(op, STRICT_VARIANT(stubs::IncProp), fullAtomIndex(PC)))
return Compile_Error;
break;
END_CASE(JSOP_INCPROP)
@ -974,7 +986,8 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_INCELEM)
BEGIN_CASE(JSOP_DECNAME)
jsop_nameinc(op, STRICT_VARIANT(stubs::DecName), fullAtomIndex(PC));
if (!jsop_nameinc(op, STRICT_VARIANT(stubs::DecName), fullAtomIndex(PC)))
return Compile_Error;
break;
END_CASE(JSOP_DECNAME)
@ -984,7 +997,8 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_DECGNAME)
BEGIN_CASE(JSOP_DECPROP)
jsop_propinc(op, STRICT_VARIANT(stubs::DecProp), fullAtomIndex(PC));
if (!jsop_propinc(op, STRICT_VARIANT(stubs::DecProp), fullAtomIndex(PC)))
return Compile_Error;
break;
END_CASE(JSOP_DECPROP)
@ -993,7 +1007,8 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_DECELEM)
BEGIN_CASE(JSOP_NAMEINC)
jsop_nameinc(op, STRICT_VARIANT(stubs::NameInc), fullAtomIndex(PC));
if (!jsop_nameinc(op, STRICT_VARIANT(stubs::NameInc), fullAtomIndex(PC)))
return Compile_Error;
break;
END_CASE(JSOP_NAMEINC)
@ -1003,7 +1018,8 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_GNAMEINC)
BEGIN_CASE(JSOP_PROPINC)
jsop_propinc(op, STRICT_VARIANT(stubs::PropInc), fullAtomIndex(PC));
if (!jsop_propinc(op, STRICT_VARIANT(stubs::PropInc), fullAtomIndex(PC)))
return Compile_Error;
break;
END_CASE(JSOP_PROPINC)
@ -1012,7 +1028,8 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_ELEMINC)
BEGIN_CASE(JSOP_NAMEDEC)
jsop_nameinc(op, STRICT_VARIANT(stubs::NameDec), fullAtomIndex(PC));
if (!jsop_nameinc(op, STRICT_VARIANT(stubs::NameDec), fullAtomIndex(PC)))
return Compile_Error;
break;
END_CASE(JSOP_NAMEDEC)
@ -1022,7 +1039,8 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_GNAMEDEC)
BEGIN_CASE(JSOP_PROPDEC)
jsop_propinc(op, STRICT_VARIANT(stubs::PropDec), fullAtomIndex(PC));
if (!jsop_propinc(op, STRICT_VARIANT(stubs::PropDec), fullAtomIndex(PC)))
return Compile_Error;
break;
END_CASE(JSOP_PROPDEC)
@ -1033,30 +1051,36 @@ mjit::Compiler::generateMethod()
BEGIN_CASE(JSOP_GETTHISPROP)
/* Push thisv onto stack. */
jsop_this();
jsop_getprop(script->getAtom(fullAtomIndex(PC)));
if (!jsop_getprop(script->getAtom(fullAtomIndex(PC))))
return Compile_Error;
END_CASE(JSOP_GETTHISPROP);
BEGIN_CASE(JSOP_GETARGPROP)
/* Push arg onto stack. */
jsop_getarg(GET_SLOTNO(PC));
jsop_getprop(script->getAtom(fullAtomIndex(&PC[ARGNO_LEN])));
if (!jsop_getprop(script->getAtom(fullAtomIndex(&PC[ARGNO_LEN]))))
return Compile_Error;
END_CASE(JSOP_GETARGPROP)
BEGIN_CASE(JSOP_GETLOCALPROP)
frame.pushLocal(GET_SLOTNO(PC));
jsop_getprop(script->getAtom(fullAtomIndex(&PC[SLOTNO_LEN])));
if (!jsop_getprop(script->getAtom(fullAtomIndex(&PC[SLOTNO_LEN]))))
return Compile_Error;
END_CASE(JSOP_GETLOCALPROP)
BEGIN_CASE(JSOP_GETPROP)
jsop_getprop(script->getAtom(fullAtomIndex(PC)));
if (!jsop_getprop(script->getAtom(fullAtomIndex(PC))))
return Compile_Error;
END_CASE(JSOP_GETPROP)
BEGIN_CASE(JSOP_LENGTH)
jsop_length();
if (!jsop_length())
return Compile_Error;
END_CASE(JSOP_LENGTH)
BEGIN_CASE(JSOP_GETELEM)
jsop_getelem();
if (!jsop_getelem())
return Compile_Error;
END_CASE(JSOP_GETELEM)
BEGIN_CASE(JSOP_SETELEM)
@ -1347,12 +1371,14 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_BINDNAME)
BEGIN_CASE(JSOP_SETPROP)
jsop_setprop(script->getAtom(fullAtomIndex(PC)));
if (!jsop_setprop(script->getAtom(fullAtomIndex(PC))))
return Compile_Error;
END_CASE(JSOP_SETPROP)
BEGIN_CASE(JSOP_SETNAME)
BEGIN_CASE(JSOP_SETMETHOD)
jsop_setprop(script->getAtom(fullAtomIndex(PC)));
if (!jsop_setprop(script->getAtom(fullAtomIndex(PC))))
return Compile_Error;
END_CASE(JSOP_SETNAME)
BEGIN_CASE(JSOP_THROW)
@ -1370,7 +1396,8 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_IN)
BEGIN_CASE(JSOP_INSTANCEOF)
jsop_instanceof();
if (!jsop_instanceof())
return Compile_Error;
END_CASE(JSOP_INSTANCEOF)
BEGIN_CASE(JSOP_EXCEPTION)
@ -1598,7 +1625,8 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_STOP)
BEGIN_CASE(JSOP_GETXPROP)
jsop_xname(script->getAtom(fullAtomIndex(PC)));
if (!jsop_xname(script->getAtom(fullAtomIndex(PC))))
return Compile_Error;
END_CASE(JSOP_GETXPROP)
BEGIN_CASE(JSOP_ENTERBLOCK)
@ -1703,8 +1731,10 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_GLOBALINC)
BEGIN_CASE(JSOP_BEGIN)
if (isConstructing)
constructThis();
if (isConstructing) {
if (!constructThis())
return Compile_Error;
}
END_CASE(JSOP_BEGIN)
default:
@ -2392,7 +2422,7 @@ mjit::Compiler::jsop_callprop_slow(JSAtom *atom)
return true;
}
void
bool
mjit::Compiler::jsop_length()
{
FrameEntry *top = frame.peek(-1);
@ -2411,16 +2441,17 @@ mjit::Compiler::jsop_length()
frame.pop();
frame.pushTypedPayload(JSVAL_TYPE_INT32, str);
}
return;
return true;
}
#if defined JS_POLYIC
jsop_getprop(cx->runtime->atomState.lengthAtom);
return jsop_getprop(cx->runtime->atomState.lengthAtom);
#else
prepareStubCall(Uses(1));
stubCall(stubs::Length);
frame.pop();
frame.pushSynced();
return true;
#endif
}
@ -2439,7 +2470,7 @@ mjit::Compiler::passPICAddress(PICGenInfo &pic)
pic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
}
void
bool
mjit::Compiler::jsop_getprop(JSAtom *atom, bool doTypeCheck)
{
FrameEntry *top = frame.peek(-1);
@ -2449,7 +2480,7 @@ mjit::Compiler::jsop_getprop(JSAtom *atom, bool doTypeCheck)
JS_ASSERT_IF(atom == cx->runtime->atomState.lengthAtom,
top->getKnownType() != JSVAL_TYPE_STRING);
jsop_getprop_slow();
return;
return true;
}
/*
@ -2477,6 +2508,7 @@ mjit::Compiler::jsop_getprop(JSAtom *atom, bool doTypeCheck)
Jump j = masm.testObject(Assembler::NotEqual, reg);
/* GETPROP_INLINE_TYPE_GUARD is used to patch the jmp, not cmp. */
RETURN_IF_OOM(false);
JS_ASSERT(masm.differenceBetween(pic.fastPathStart, masm.label()) == GETPROP_INLINE_TYPE_GUARD);
pic.typeCheck = stubcc.linkExit(j, Uses(1));
@ -2536,8 +2568,8 @@ mjit::Compiler::jsop_getprop(JSAtom *atom, bool doTypeCheck)
#endif
pic.storeBack = masm.label();
/* Assert correctness of hardcoded offsets. */
RETURN_IF_OOM(false);
#if defined JS_NUNBOX32
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDslotsLoad) == GETPROP_DSLOTS_LOAD);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgTypeLoad) == GETPROP_TYPE_LOAD);
@ -2564,10 +2596,11 @@ mjit::Compiler::jsop_getprop(JSAtom *atom, bool doTypeCheck)
stubcc.rejoin(Changes(1));
pics.append(pic);
return true;
}
#ifdef JS_POLYIC
void
bool
mjit::Compiler::jsop_getelem_pic(FrameEntry *obj, FrameEntry *id, RegisterID objReg,
RegisterID idReg, RegisterID shapeReg)
{
@ -2637,6 +2670,7 @@ mjit::Compiler::jsop_getelem_pic(FrameEntry *obj, FrameEntry *id, RegisterID obj
pic.objReg = objReg;
pic.idReg = idReg;
RETURN_IF_OOM(false);
#if defined JS_NUNBOX32
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDslotsLoad) == GETELEM_DSLOTS_LOAD);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgTypeLoad) == GETELEM_TYPE_LOAD);
@ -2668,6 +2702,7 @@ mjit::Compiler::jsop_getelem_pic(FrameEntry *obj, FrameEntry *id, RegisterID obj
JS_ASSERT(pic.objReg != pic.shapeReg);
pics.append(pic);
return true;
}
#endif
@ -2768,6 +2803,7 @@ mjit::Compiler::jsop_callprop_generic(JSAtom *atom)
pic.storeBack = masm.label();
/* Assert correctness of hardcoded offsets. */
RETURN_IF_OOM(false);
JS_ASSERT(masm.differenceBetween(pic.fastPathStart, dbgInlineTypeGuard) == GETPROP_INLINE_TYPE_GUARD);
#if defined JS_NUNBOX32
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDslotsLoad) == GETPROP_DSLOTS_LOAD);
@ -2814,7 +2850,8 @@ mjit::Compiler::jsop_callprop_str(JSAtom *atom)
frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
/* Get the property. */
jsop_getprop(atom);
if (!jsop_getprop(atom))
return false;
/* Perform a swap. */
frame.dup2();
@ -2926,6 +2963,7 @@ mjit::Compiler::jsop_callprop_obj(JSAtom *atom)
* Assert correctness of hardcoded offsets.
* No type guard: type is asserted.
*/
RETURN_IF_OOM(false);
#if defined JS_NUNBOX32
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgDslotsLoad) == GETPROP_DSLOTS_LOAD);
JS_ASSERT(masm.differenceBetween(pic.storeBack, dbgTypeLoad) == GETPROP_TYPE_LOAD);
@ -2968,7 +3006,7 @@ mjit::Compiler::jsop_callprop(JSAtom *atom)
return jsop_callprop_generic(atom);
}
void
bool
mjit::Compiler::jsop_setprop(JSAtom *atom)
{
FrameEntry *lhs = frame.peek(-2);
@ -2977,7 +3015,7 @@ mjit::Compiler::jsop_setprop(JSAtom *atom)
/* If the incoming type will never PIC, take slow path. */
if (lhs->isTypeKnown() && lhs->getKnownType() != JSVAL_TYPE_OBJECT) {
jsop_setprop_slow(atom);
return;
return true;
}
JSOp op = JSOp(*PC);
@ -3083,6 +3121,7 @@ mjit::Compiler::jsop_setprop(JSAtom *atom)
stubcc.rejoin(Changes(1));
}
RETURN_IF_OOM(false);
#if defined JS_PUNBOX64
pic.labels.setprop.dslotsLoadOffset = masm.differenceBetween(pic.storeBack, dslotsLoadLabel);
pic.labels.setprop.inlineShapeOffset = masm.differenceBetween(pic.shapeGuard, inlineShapeOffsetLabel);
@ -3108,6 +3147,7 @@ mjit::Compiler::jsop_setprop(JSAtom *atom)
#endif
pics.append(pic);
return true;
}
void
@ -3142,15 +3182,14 @@ mjit::Compiler::jsop_name(JSAtom *atom)
pics.append(pic);
}
void
bool
mjit::Compiler::jsop_xname(JSAtom *atom)
{
PICGenInfo pic(ic::PICInfo::XNAME);
FrameEntry *fe = frame.peek(-1);
if (fe->isNotType(JSVAL_TYPE_OBJECT)) {
jsop_getprop(atom);
return;
return jsop_getprop(atom);
}
if (!fe->isTypeKnown()) {
@ -3184,6 +3223,7 @@ mjit::Compiler::jsop_xname(JSAtom *atom)
stubcc.rejoin(Changes(1));
pics.append(pic);
return true;
}
void
@ -3243,16 +3283,17 @@ mjit::Compiler::jsop_name(JSAtom *atom)
frame.pushSynced();
}
void
bool
mjit::Compiler::jsop_xname(JSAtom *atom)
{
jsop_getprop(atom);
return jsop_getprop(atom);
}
void
bool
mjit::Compiler::jsop_getprop(JSAtom *atom, bool typecheck)
{
jsop_getprop_slow();
return true;
}
bool
@ -3261,10 +3302,11 @@ mjit::Compiler::jsop_callprop(JSAtom *atom)
return jsop_callprop_slow(atom);
}
void
bool
mjit::Compiler::jsop_setprop(JSAtom *atom)
{
jsop_setprop_slow(atom);
return true;
}
void
@ -3401,7 +3443,7 @@ mjit::Compiler::jsop_gnameinc(JSOp op, VoidStubAtom stub, uint32 index)
PC += JSOP_GNAMEINC_LENGTH;
}
void
bool
mjit::Compiler::jsop_nameinc(JSOp op, VoidStubAtom stub, uint32 index)
{
JSAtom *atom = script->getAtom(index);
@ -3435,7 +3477,8 @@ mjit::Compiler::jsop_nameinc(JSOp op, VoidStubAtom stub, uint32 index)
frame.shift(-1);
// OBJ V+1
jsop_setprop(atom);
if (!jsop_setprop(atom))
return false;
// V+1
if (pop)
@ -3470,7 +3513,8 @@ mjit::Compiler::jsop_nameinc(JSOp op, VoidStubAtom stub, uint32 index)
frame.shift(-1);
// N OBJ N+1
jsop_setprop(atom);
if (!jsop_setprop(atom))
return false;
// N N+1
frame.pop();
@ -3487,9 +3531,10 @@ mjit::Compiler::jsop_nameinc(JSOp op, VoidStubAtom stub, uint32 index)
#endif
PC += JSOP_NAMEINC_LENGTH;
return true;
}
void
bool
mjit::Compiler::jsop_propinc(JSOp op, VoidStubAtom stub, uint32 index)
{
JSAtom *atom = script->getAtom(index);
@ -3506,7 +3551,8 @@ mjit::Compiler::jsop_propinc(JSOp op, VoidStubAtom stub, uint32 index)
frame.dup();
// OBJ OBJ
jsop_getprop(atom);
if (!jsop_getprop(atom))
return false;
// OBJ V
frame.push(Int32Value(amt));
@ -3516,7 +3562,8 @@ mjit::Compiler::jsop_propinc(JSOp op, VoidStubAtom stub, uint32 index)
jsop_binary(JSOP_SUB, stubs::Sub);
// OBJ V+1
jsop_setprop(atom);
if (!jsop_setprop(atom))
return false;
// V+1
if (pop)
@ -3527,7 +3574,8 @@ mjit::Compiler::jsop_propinc(JSOp op, VoidStubAtom stub, uint32 index)
frame.dup();
// OBJ OBJ
jsop_getprop(atom);
if (!jsop_getprop(atom))
return false;
// OBJ V
jsop_pos();
@ -3548,7 +3596,8 @@ mjit::Compiler::jsop_propinc(JSOp op, VoidStubAtom stub, uint32 index)
frame.dupAt(-2);
// OBJ N N+1 OBJ N+1
jsop_setprop(atom);
if (!jsop_setprop(atom))
return false;
// OBJ N N+1 N+1
frame.popn(2);
@ -3570,6 +3619,7 @@ mjit::Compiler::jsop_propinc(JSOp op, VoidStubAtom stub, uint32 index)
}
PC += JSOP_PROPINC_LENGTH;
return true;
}
void
@ -4124,7 +4174,7 @@ mjit::Compiler::jsop_unbrand()
stubCall(stubs::Unbrand);
}
void
bool
mjit::Compiler::jsop_instanceof()
{
FrameEntry *lhs = frame.peek(-2);
@ -4137,7 +4187,7 @@ mjit::Compiler::jsop_instanceof()
frame.popn(2);
frame.takeReg(Registers::ReturnReg);
frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
return;
return true;
}
MaybeJump firstSlow;
@ -4164,7 +4214,8 @@ mjit::Compiler::jsop_instanceof()
/* This is sadly necessary because the error case needs the object. */
frame.dup();
jsop_getprop(cx->runtime->atomState.classPrototypeAtom, false);
if (!jsop_getprop(cx->runtime->atomState.classPrototypeAtom, false))
return false;
/* Primitive prototypes are invalid. */
rhs = frame.peek(-1);
@ -4216,6 +4267,7 @@ mjit::Compiler::jsop_instanceof()
if (firstSlow.isSet())
firstSlow.getJump().linkTo(stubcc.masm.label(), &stubcc.masm);
stubcc.rejoin(Changes(1));
return true;
}
/*
@ -4341,7 +4393,7 @@ mjit::Compiler::leaveBlock()
// NULL
// call js_CreateThisFromFunctionWithProto(...)
//
void
bool
mjit::Compiler::constructThis()
{
JS_ASSERT(isConstructing);
@ -4353,7 +4405,8 @@ mjit::Compiler::constructThis()
frame.pushTypedPayload(JSVAL_TYPE_OBJECT, calleeReg);
// Get callee.prototype.
jsop_getprop(cx->runtime->atomState.classPrototypeAtom);
if (!jsop_getprop(cx->runtime->atomState.classPrototypeAtom))
return false;
// Reach into the proto Value and grab a register for its data.
FrameEntry *protoFe = frame.peek(-1);
@ -4373,5 +4426,6 @@ mjit::Compiler::constructThis()
masm.move(protoReg, Registers::ArgReg1);
stubCall(stubs::CreateThis);
frame.freeReg(protoReg);
return true;
}

Просмотреть файл

@ -271,7 +271,7 @@ class Compiler : public BaseCompiler
#ifdef JS_MONOIC
void passMICAddress(MICGenInfo &mic);
#endif
void constructThis();
bool constructThis();
/* Opcode handlers. */
void jumpAndTrace(Jump j, jsbytecode *target, Jump *slowOne = NULL, Jump *slowTwo = NULL);
@ -291,8 +291,8 @@ class Compiler : public BaseCompiler
void inlineCallHelper(uint32 argc, bool callingNew);
void fixPrimitiveReturn(Assembler *masm, FrameEntry *fe);
void jsop_gnameinc(JSOp op, VoidStubAtom stub, uint32 index);
void jsop_nameinc(JSOp op, VoidStubAtom stub, uint32 index);
void jsop_propinc(JSOp op, VoidStubAtom stub, uint32 index);
bool jsop_nameinc(JSOp op, VoidStubAtom stub, uint32 index);
bool jsop_propinc(JSOp op, VoidStubAtom stub, uint32 index);
void jsop_eleminc(JSOp op, VoidStub);
void jsop_getgname(uint32 index);
void jsop_getgname_slow(uint32 index);
@ -302,18 +302,18 @@ class Compiler : public BaseCompiler
void jsop_setelem_slow();
void jsop_getelem_slow();
void jsop_unbrand();
void jsop_getprop(JSAtom *atom, bool typeCheck = true);
void jsop_length();
void jsop_setprop(JSAtom *atom);
bool jsop_getprop(JSAtom *atom, bool typeCheck = true);
bool jsop_length();
bool jsop_setprop(JSAtom *atom);
void jsop_setprop_slow(JSAtom *atom);
bool jsop_callprop_slow(JSAtom *atom);
bool jsop_callprop(JSAtom *atom);
bool jsop_callprop_obj(JSAtom *atom);
bool jsop_callprop_str(JSAtom *atom);
bool jsop_callprop_generic(JSAtom *atom);
void jsop_instanceof();
bool jsop_instanceof();
void jsop_name(JSAtom *atom);
void jsop_xname(JSAtom *atom);
bool jsop_xname(JSAtom *atom);
void enterBlock(JSObject *obj);
void leaveBlock();
@ -365,11 +365,11 @@ class Compiler : public BaseCompiler
void jsop_arginc(JSOp op, uint32 slot, bool popped);
void jsop_localinc(JSOp op, uint32 slot, bool popped);
void jsop_setelem();
void jsop_getelem();
void jsop_getelem_known_type(FrameEntry *obj, FrameEntry *id, RegisterID tmpReg);
void jsop_getelem_with_pic(FrameEntry *obj, FrameEntry *id, RegisterID tmpReg);
bool jsop_getelem();
bool jsop_getelem_known_type(FrameEntry *obj, FrameEntry *id, RegisterID tmpReg);
bool jsop_getelem_with_pic(FrameEntry *obj, FrameEntry *id, RegisterID tmpReg);
void jsop_getelem_nopic(FrameEntry *obj, FrameEntry *id, RegisterID tmpReg);
void jsop_getelem_pic(FrameEntry *obj, FrameEntry *id, RegisterID objReg, RegisterID idReg,
bool jsop_getelem_pic(FrameEntry *obj, FrameEntry *id, RegisterID objReg, RegisterID idReg,
RegisterID shapeReg);
void jsop_getelem_dense(FrameEntry *obj, FrameEntry *id, RegisterID objReg,
MaybeRegisterID &idReg, RegisterID shapeReg);

Просмотреть файл

@ -1008,8 +1008,8 @@ mjit::Compiler::jsop_equality_int_string(JSOp op, BoolStub stub, jsbytecode *tar
JaegerSpew(JSpew_Insns, " ---- BEGIN STUB CALL CODE ---- \n");
/* The lhs/rhs need to be synced in the stub call path. */
frame.syncEntry(stubcc.masm, lhs, lvr);
frame.syncEntry(stubcc.masm, rhs, rvr);
frame.ensureValueSynced(stubcc.masm, lhs, lvr);
frame.ensureValueSynced(stubcc.masm, rhs, rvr);
/* Call the stub, adjusting for the two values just pushed. */
stubcc.call(stub, frame.stackDepth() + script->nfixed + 2);

Просмотреть файл

@ -1398,7 +1398,7 @@ mjit::Compiler::jsop_getelem_dense(FrameEntry *obj, FrameEntry *id, RegisterID o
/* Note: linkExits will be hooked up to a leave() after this method completes. */
}
void
bool
mjit::Compiler::jsop_getelem_known_type(FrameEntry *obj, FrameEntry *id, RegisterID tmpReg)
{
switch (id->getKnownType()) {
@ -1421,7 +1421,7 @@ mjit::Compiler::jsop_getelem_known_type(FrameEntry *obj, FrameEntry *id, Registe
frame.popn(2);
frame.pushRegs(tmpReg, objReg);
stubcc.rejoin(Changes(1));
return;
break;
}
#ifdef JS_POLYIC
case JSVAL_TYPE_STRING:
@ -1431,23 +1431,25 @@ mjit::Compiler::jsop_getelem_known_type(FrameEntry *obj, FrameEntry *id, Registe
RegisterID idReg = frame.copyDataIntoReg(id);
/* Meat. */
jsop_getelem_pic(obj, id, objReg, idReg, tmpReg);
if (!jsop_getelem_pic(obj, id, objReg, idReg, tmpReg))
return false;
/* Epilogue. */
frame.popn(2);
frame.pushRegs(tmpReg, objReg);
frame.freeReg(idReg);
stubcc.rejoin(Changes(1));
return;
break;
}
#endif
default:
JS_NOT_REACHED("Invalid known id type.");
}
return true;
}
#ifdef JS_POLYIC
void
bool
mjit::Compiler::jsop_getelem_with_pic(FrameEntry *obj, FrameEntry *id, RegisterID tmpReg)
{
JS_ASSERT(!id->isTypeKnown());
@ -1472,13 +1474,15 @@ mjit::Compiler::jsop_getelem_with_pic(FrameEntry *obj, FrameEntry *id, RegisterI
stubcc.call(stubs::GetElem);
Jump toFinalMerge = stubcc.masm.jump();
jsop_getelem_pic(obj, id, objReg, idReg.reg(), tmpReg);
if (!jsop_getelem_pic(obj, id, objReg, idReg.reg(), tmpReg))
return false;
performedDense.linkTo(masm.label(), &masm);
frame.popn(2);
frame.pushRegs(tmpReg, objReg);
frame.freeReg(idReg.reg());
toFinalMerge.linkTo(stubcc.masm.label(), &stubcc.masm);
stubcc.rejoin(Changes(1));
return true;
}
#endif
@ -1504,7 +1508,7 @@ mjit::Compiler::jsop_getelem_nopic(FrameEntry *obj, FrameEntry *id, RegisterID t
stubcc.rejoin(Changes(1));
}
void
bool
mjit::Compiler::jsop_getelem()
{
FrameEntry *obj = frame.peek(-2);
@ -1512,7 +1516,7 @@ mjit::Compiler::jsop_getelem()
if (obj->isTypeKnown() && obj->getKnownType() != JSVAL_TYPE_OBJECT) {
jsop_getelem_slow();
return;
return true;
}
if (id->isTypeKnown() &&
@ -1522,19 +1526,19 @@ mjit::Compiler::jsop_getelem()
#endif
)) {
jsop_getelem_slow();
return;
return true;
}
if (id->isTypeKnown() && id->getKnownType() == JSVAL_TYPE_INT32 && id->isConstant() &&
id->getValue().toInt32() < 0) {
jsop_getelem_slow();
return;
return true;
}
if (id->isTypeKnown() && id->getKnownType() == JSVAL_TYPE_STRING && id->isConstant()) {
/* Never happens, or I'd optimize it. */
jsop_getelem_slow();
return;
return true;
}
RegisterID tmpReg;
@ -1552,7 +1556,8 @@ mjit::Compiler::jsop_getelem()
#ifdef JS_POLYIC
return jsop_getelem_with_pic(obj, id, tmpReg);
#else
return jsop_getelem_nopic(obj, id, tmpReg);
jsop_getelem_nopic(obj, id, tmpReg);
return true;
#endif
}
@ -1643,13 +1648,11 @@ mjit::Compiler::jsop_stricteq(JSOp op)
masm.set32(cond, frame.tempRegForType(test), Imm32(mask), result);
#elif defined JS_CPU_X64
RegisterID maskReg = frame.allocReg();
frame.pinReg(maskReg);
masm.move(ImmTag(known->getKnownTag()), maskReg);
RegisterID r = frame.tempRegForType(test);
masm.setPtr(cond, r, maskReg, result);
frame.unpinReg(maskReg);
frame.freeReg(maskReg);
#endif
frame.popn(2);

Просмотреть файл

@ -195,6 +195,7 @@ class FrameEntry
FrameEntry *copyOf() const {
JS_ASSERT(isCopy());
JS_ASSERT(copy < this);
return copy;
}

Просмотреть файл

@ -126,20 +126,6 @@ FrameState::allocReg(FrameEntry *fe, RematInfo::RematType type)
return reg;
}
inline void
FrameState::emitLoadTypeTag(FrameEntry *fe, RegisterID reg) const
{
emitLoadTypeTag(this->masm, fe, reg);
}
inline void
FrameState::emitLoadTypeTag(Assembler &masm, FrameEntry *fe, RegisterID reg) const
{
if (fe->isCopy())
fe = fe->copyOf();
masm.loadTypeTag(addressOf(fe), reg);
}
inline void
FrameState::convertInt32ToDouble(Assembler &masm, FrameEntry *fe, FPRegisterID fpreg) const
{
@ -485,33 +471,188 @@ FrameState::shouldAvoidDataRemat(FrameEntry *fe)
}
inline void
FrameState::syncType(const FrameEntry *fe, Address to, Assembler &masm) const
FrameState::ensureFeSynced(const FrameEntry *fe, Assembler &masm) const
{
JS_ASSERT_IF(fe->type.synced(),
fe->isCopied() && addressOf(fe).offset != to.offset);
JS_ASSERT(fe->type.inRegister() || fe->type.isConstant());
Address to = addressOf(fe);
const FrameEntry *backing = fe;
if (fe->isCopy())
backing = fe->copyOf();
/* Store a double's type bits, even though !isTypeKnown(). */
if (fe->isConstant())
masm.storeTypeTag(ImmTag(fe->getKnownTag()), to);
else if (fe->isTypeKnown())
masm.storeTypeTag(ImmType(fe->getKnownType()), to);
else
masm.storeTypeTag(fe->type.reg(), to);
#if defined JS_PUNBOX64
/* If we can, sync the type and data in one go. */
if (!fe->data.synced() && !fe->type.synced()) {
if (backing->isConstant())
masm.storeValue(backing->getValue(), to);
else if (backing->isTypeKnown())
masm.storeValueFromComponents(ImmType(backing->getKnownType()), backing->data.reg(), to);
else
masm.storeValueFromComponents(backing->type.reg(), backing->data.reg(), to);
return;
}
#endif
/*
* On x86_64, only one of the following two calls will have output,
* and a load will only occur if necessary.
*/
ensureDataSynced(fe, masm);
ensureTypeSynced(fe, masm);
}
inline void
FrameState::syncData(const FrameEntry *fe, Address to, Assembler &masm) const
FrameState::ensureTypeSynced(const FrameEntry *fe, Assembler &masm) const
{
JS_ASSERT_IF(addressOf(fe).base == to.base &&
addressOf(fe).offset == to.offset,
!fe->data.synced());
JS_ASSERT(fe->data.inRegister() || fe->data.isConstant());
if (fe->type.synced())
return;
if (fe->data.isConstant())
masm.storePayload(ImmPayload(fe->getPayload()), to);
Address to = addressOf(fe);
const FrameEntry *backing = fe;
if (fe->isCopy())
backing = fe->copyOf();
#if defined JS_PUNBOX64
/* Attempt to store the entire Value, to prevent a load. */
if (backing->isConstant()) {
masm.storeValue(backing->getValue(), to);
return;
}
if (backing->data.inRegister()) {
RegisterID dreg = backing->data.reg();
if (backing->isTypeKnown())
masm.storeValueFromComponents(ImmType(backing->getKnownType()), dreg, to);
else
masm.storeValueFromComponents(backing->type.reg(), dreg, to);
return;
}
#endif
/* Store a double's type bits, even though !isTypeKnown(). */
if (backing->isConstant())
masm.storeTypeTag(ImmTag(backing->getKnownTag()), to);
else if (backing->isTypeKnown())
masm.storeTypeTag(ImmType(backing->getKnownType()), to);
else
masm.storePayload(fe->data.reg(), to);
masm.storeTypeTag(backing->type.reg(), to);
}
inline void
FrameState::ensureDataSynced(const FrameEntry *fe, Assembler &masm) const
{
if (fe->data.synced())
return;
Address to = addressOf(fe);
const FrameEntry *backing = fe;
if (fe->isCopy())
backing = fe->copyOf();
#if defined JS_PUNBOX64
if (backing->isConstant())
masm.storeValue(backing->getValue(), to);
else if (backing->isTypeKnown())
masm.storeValueFromComponents(ImmType(backing->getKnownType()), backing->data.reg(), to);
else if (backing->type.inRegister())
masm.storeValueFromComponents(backing->type.reg(), backing->data.reg(), to);
else
masm.storePayload(backing->data.reg(), to);
#elif defined JS_NUNBOX32
if (backing->isConstant())
masm.storePayload(ImmPayload(backing->getPayload()), to);
else
masm.storePayload(backing->data.reg(), to);
#endif
}
inline void
FrameState::syncFe(FrameEntry *fe)
{
FrameEntry *backing = fe;
if (fe->isCopy())
backing = fe->copyOf();
bool needTypeReg = !fe->type.synced() && backing->type.inMemory();
bool needDataReg = !fe->data.synced() && backing->data.inMemory();
#if defined JS_NUNBOX32
/* Determine an ordering that won't spill known regs. */
if (needTypeReg && !needDataReg) {
syncData(fe);
syncType(fe);
} else {
syncType(fe);
syncData(fe);
}
#elif defined JS_PUNBOX64
if (JS_UNLIKELY(needTypeReg && needDataReg)) {
/* Memory-to-memory moves can only occur for copies backed by memory. */
JS_ASSERT(backing != fe);
/* Use ValueReg to do a whole-Value mem-to-mem move. */
masm.loadValue(addressOf(backing), Registers::ValueReg);
masm.storeValue(Registers::ValueReg, addressOf(fe));
} else {
/* Store in case unpinning is necessary. */
MaybeRegisterID pairReg;
/* Get a register if necessary, without clobbering its pair. */
if (needTypeReg) {
if (backing->data.inRegister()) {
pairReg = backing->data.reg();
pinReg(backing->data.reg());
}
tempRegForType(backing);
} else if (needDataReg) {
if (backing->type.inRegister()) {
pairReg = backing->type.reg();
pinReg(backing->type.reg());
}
tempRegForData(backing);
}
ensureFeSynced(fe, masm);
if (pairReg.isSet())
unpinReg(pairReg.reg());
}
if (!fe->type.synced())
fe->type.sync();
if (!fe->data.synced())
fe->data.sync();
#endif
}
inline void
FrameState::syncType(FrameEntry *fe)
{
FrameEntry *backing = fe;
if (fe->isCopy())
backing = fe->copyOf();
if (!fe->type.synced() && backing->type.inMemory())
tempRegForType(backing);
ensureTypeSynced(fe, masm);
if (!fe->type.synced())
fe->type.sync();
}
inline void
FrameState::syncData(FrameEntry *fe)
{
FrameEntry *backing = fe;
if (fe->isCopy())
backing = fe->copyOf();
if (!fe->data.synced() && backing->data.inMemory())
tempRegForData(backing);
ensureDataSynced(fe, masm);
if (!fe->data.synced())
fe->data.sync();
}
inline void
@ -525,7 +666,17 @@ FrameState::forgetType(FrameEntry *fe)
if (!fe->isTypeKnown())
return;
syncType(fe, addressOf(fe), masm);
/*
* Likewise, storeLocal() may have set this FE, with a known type,
* to be a copy of another FE, which has an unknown type.
* Just forget the type, since the backing is used in all cases.
*/
if (fe->isCopy()) {
fe->type.invalidate();
return;
}
ensureTypeSynced(fe, masm);
fe->type.setMemory();
}
@ -852,11 +1003,7 @@ FrameState::loadDouble(FrameEntry *fe, FPRegisterID fpReg, Assembler &masm) cons
return;
}
if (!fe->data.synced())
syncData(fe, addressOf(fe), masm);
if (!fe->type.synced())
syncType(fe, addressOf(fe), masm);
ensureFeSynced(fe, masm);
masm.loadDouble(addressOf(fe), fpReg);
}

Просмотреть файл

@ -47,7 +47,10 @@ using namespace js::mjit;
JS_STATIC_ASSERT(sizeof(FrameEntry) % 8 == 0);
FrameState::FrameState(JSContext *cx, JSScript *script, Assembler &masm)
: cx(cx), script(script), masm(masm), entries(NULL), reifier(cx, *this),
: cx(cx), script(script), masm(masm), entries(NULL),
#if defined JS_NUNBOX32
reifier(cx, *this),
#endif
inTryBlock(false)
{
}
@ -78,8 +81,10 @@ FrameState::init(uint32 nargs)
if (!cursor)
return false;
#if defined JS_NUNBOX32
if (!reifier.init(nslots))
return false;
#endif
entries = (FrameEntry *)cursor;
cursor += sizeof(FrameEntry) * nslots;
@ -121,16 +126,10 @@ FrameState::evictReg(RegisterID reg)
FrameEntry *fe = regstate[reg].fe();
if (regstate[reg].type() == RematInfo::TYPE) {
if (!fe->type.synced()) {
syncType(fe, addressOf(fe), masm);
fe->type.sync();
}
ensureTypeSynced(fe, masm);
fe->type.setMemory();
} else {
if (!fe->data.synced()) {
syncData(fe, addressOf(fe), masm);
fe->data.sync();
}
ensureDataSynced(fe, masm);
fe->data.setMemory();
}
}
@ -420,6 +419,7 @@ FrameState::assertValidRegisterState() const
}
#endif
#if defined JS_NUNBOX32
void
FrameState::syncFancy(Assembler &masm, Registers avail, FrameEntry *resumeAt,
FrameEntry *bottom) const
@ -433,6 +433,7 @@ FrameState::syncFancy(Assembler &masm, Registers avail, FrameEntry *resumeAt,
reifier.sync(fe);
}
}
#endif
void
FrameState::sync(Assembler &masm, Uses uses) const
@ -441,23 +442,34 @@ FrameState::sync(Assembler &masm, Uses uses) const
return;
/* Sync all registers up-front. */
for (uint32 i = 0; i < JSC::MacroAssembler::TotalRegisters; i++) {
RegisterID reg = RegisterID(i);
Registers allRegs(Registers::AvailRegs);
while (!allRegs.empty()) {
RegisterID reg = allRegs.takeAnyReg();
FrameEntry *fe = regstate[reg].usedBy();
if (!fe)
continue;
JS_ASSERT(fe->isTracked());
#if defined JS_PUNBOX64
/* Sync entire FE to prevent loads. */
ensureFeSynced(fe, masm);
/* Take the other register in the pair, if one exists. */
if (regstate[reg].type() == RematInfo::DATA && fe->type.inRegister())
allRegs.takeReg(fe->type.reg());
else if (regstate[reg].type() == RematInfo::TYPE && fe->data.inRegister())
allRegs.takeReg(fe->data.reg());
#elif defined JS_NUNBOX32
/* Sync register if unsynced. */
if (regstate[reg].type() == RematInfo::DATA) {
JS_ASSERT(fe->data.reg() == reg);
if (!fe->data.synced())
syncData(fe, addressOf(fe), masm);
ensureDataSynced(fe, masm);
} else {
JS_ASSERT(fe->type.reg() == reg);
if (!fe->type.synced())
syncType(fe, addressOf(fe), masm);
ensureTypeSynced(fe, masm);
}
#endif
}
/*
@ -473,51 +485,63 @@ FrameState::sync(Assembler &masm, Uses uses) const
if (!fe->isTracked())
continue;
Address address = addressOf(fe);
FrameEntry *backing = fe;
if (!fe->isCopy()) {
/*
* If this |fe| has registers, track them as available. They've
* already been synced. Otherwise, see if a constant needs to be
* synced.
*/
if (fe->data.inRegister())
avail.putReg(fe->data.reg());
else if (!fe->data.synced())
syncData(fe, address, masm);
if (fe->type.inRegister())
avail.putReg(fe->type.reg());
else if (!fe->type.synced())
syncType(fe, address, masm);
} else {
FrameEntry *backing = fe->copyOf();
JS_ASSERT(backing != fe);
backing = fe->copyOf();
JS_ASSERT(!backing->isConstant() && !fe->isConstant());
/*
* If the copy is backed by something not in a register, fall back
* to a slower sync algorithm.
*/
if ((!fe->type.synced() && !backing->type.inRegister()) ||
(!fe->data.synced() && !backing->data.inRegister())) {
#if defined JS_PUNBOX64
if ((!fe->type.synced() && backing->type.inMemory()) ||
(!fe->data.synced() && backing->data.inMemory())) {
RegisterID syncReg = Registers::ValueReg;
/* Load the entire Value into syncReg. */
if (backing->type.synced() && backing->data.synced()) {
masm.loadValue(addressOf(backing), syncReg);
} else if (backing->type.inMemory()) {
masm.loadTypeTag(addressOf(backing), syncReg);
masm.orPtr(backing->data.reg(), syncReg);
} else {
JS_ASSERT(backing->data.inMemory());
masm.loadPayload(addressOf(backing), syncReg);
if (backing->isTypeKnown())
masm.orPtr(ImmType(backing->getKnownType()), syncReg);
else
masm.orPtr(backing->type.reg(), syncReg);
}
masm.storeValue(syncReg, addressOf(fe));
continue;
}
#elif defined JS_NUNBOX32
/* Fall back to a slower sync algorithm if load required. */
if ((!fe->type.synced() && backing->type.inMemory()) ||
(!fe->data.synced() && backing->data.inMemory())) {
syncFancy(masm, avail, fe, bottom);
return;
}
if (!fe->type.synced()) {
/* :TODO: we can do better, the type is learned for all copies. */
if (fe->isTypeKnown()) {
//JS_ASSERT(fe->getTypeTag() == backing->getTypeTag());
masm.storeTypeTag(ImmType(fe->getKnownType()), address);
} else {
masm.storeTypeTag(backing->type.reg(), address);
}
}
if (!fe->data.synced())
masm.storePayload(backing->data.reg(), address);
#endif
}
/* If a part still needs syncing, it is either a copy or constant. */
#if defined JS_PUNBOX64
/* All register-backed FEs have been entirely synced up-front. */
if (!fe->type.inRegister() && !fe->data.inRegister())
ensureFeSynced(fe, masm);
#elif defined JS_NUNBOX32
/* All components held in registers have been already synced. */
if (!fe->data.inRegister())
ensureDataSynced(fe, masm);
if (!fe->type.inRegister())
ensureTypeSynced(fe, masm);
#endif
}
}
@ -536,19 +560,35 @@ FrameState::syncAndKill(Registers kill, Uses uses, Uses ignore)
JS_ASSERT(fe->isTracked());
#if defined JS_PUNBOX64
/* Don't use syncFe(), since that may clobber more registers. */
ensureFeSynced(fe, masm);
if (!fe->type.synced())
fe->type.sync();
if (!fe->data.synced())
fe->data.sync();
/* Take the other register in the pair, if one exists. */
if (regstate[reg].type() == RematInfo::DATA) {
JS_ASSERT(fe->data.reg() == reg);
if (!fe->data.synced()) {
syncData(fe, addressOf(fe), masm);
fe->data.sync();
}
if (fe->type.inRegister() && search.hasReg(fe->type.reg()))
search.takeReg(fe->type.reg());
} else {
JS_ASSERT(fe->type.reg() == reg);
if (!fe->type.synced()) {
syncType(fe, addressOf(fe), masm);
fe->type.sync();
}
if (fe->data.inRegister() && search.hasReg(fe->data.reg()))
search.takeReg(fe->data.reg());
}
#elif defined JS_NUNBOX32
/* Sync this register. */
if (regstate[reg].type() == RematInfo::DATA) {
JS_ASSERT(fe->data.reg() == reg);
syncData(fe);
} else {
JS_ASSERT(fe->type.reg() == reg);
syncType(fe);
}
#endif
}
uint32 maxvisits = tracker.nentries;
@ -563,31 +603,18 @@ FrameState::syncAndKill(Registers kill, Uses uses, Uses ignore)
if (fe >= spStop)
continue;
Address address = addressOf(fe);
FrameEntry *backing = fe;
syncFe(fe);
if (fe->isCopy())
backing = fe->copyOf();
if (!fe->data.synced()) {
if (backing != fe && backing->data.inMemory())
tempRegForData(backing);
syncData(backing, address, masm);
fe->data.sync();
if (fe->data.inRegister() && kill.hasReg(fe->data.reg())) {
forgetReg(fe->data.reg());
fe->data.setMemory();
}
/* Forget registers. */
if (fe->data.inRegister() && kill.hasReg(fe->data.reg()) &&
!regstate[fe->data.reg()].isPinned()) {
forgetReg(fe->data.reg());
fe->data.setMemory();
}
if (!fe->type.synced()) {
if (backing != fe && backing->type.inMemory())
tempRegForType(backing);
syncType(backing, address, masm);
fe->type.sync();
if (fe->type.inRegister() && kill.hasReg(fe->type.reg())) {
forgetReg(fe->type.reg());
fe->type.setMemory();
}
if (fe->type.inRegister() && kill.hasReg(fe->type.reg()) &&
!regstate[fe->type.reg()].isPinned()) {
forgetReg(fe->type.reg());
fe->type.setMemory();
}
}
@ -669,8 +696,7 @@ FrameState::copyDataIntoReg(FrameEntry *fe, RegisterID hint)
RegisterID reg = fe->data.reg();
if (reg == hint) {
if (freeRegs.empty()) {
if (!fe->data.synced())
syncData(fe, addressOf(fe), masm);
ensureDataSynced(fe, masm);
fe->data.setMemory();
} else {
reg = allocReg();
@ -698,8 +724,7 @@ FrameState::copyDataIntoReg(Assembler &masm, FrameEntry *fe)
if (fe->data.inRegister()) {
RegisterID reg = fe->data.reg();
if (freeRegs.empty()) {
if (!fe->data.synced())
syncData(fe, addressOf(fe), masm);
ensureDataSynced(fe, masm);
fe->data.setMemory();
regstate[reg].forget();
} else {
@ -731,8 +756,7 @@ FrameState::copyTypeIntoReg(FrameEntry *fe)
if (fe->type.inRegister()) {
RegisterID reg = fe->type.reg();
if (freeRegs.empty()) {
if (!fe->type.synced())
syncType(fe, addressOf(fe), masm);
ensureTypeSynced(fe, masm);
fe->type.setMemory();
regstate[reg].forget();
} else {
@ -784,13 +808,9 @@ FrameState::copyEntryIntoFPReg(Assembler &masm, FrameEntry *fe, FPRegisterID fpr
if (fe->isCopy())
fe = fe->copyOf();
/* The entry must be synced to memory. */
if (!fe->data.synced())
syncData(fe, addressOf(fe), masm);
if (!fe->type.synced())
syncType(fe, addressOf(fe), masm);
ensureFeSynced(fe, masm);
masm.loadDouble(addressOf(fe), fpreg);
return fpreg;
}
@ -810,8 +830,7 @@ FrameState::ownRegForType(FrameEntry *fe)
if (freeRegs.empty()) {
/* For now... just steal the register that already exists. */
if (!backing->type.synced())
syncType(backing, addressOf(backing), masm);
ensureTypeSynced(backing, masm);
reg = backing->type.reg();
backing->type.setMemory();
regstate[reg].forget();
@ -854,8 +873,7 @@ FrameState::ownRegForData(FrameEntry *fe)
if (freeRegs.empty()) {
/* For now... just steal the register that already exists. */
if (!backing->data.synced())
syncData(backing, addressOf(backing), masm);
ensureDataSynced(backing, masm);
reg = backing->data.reg();
backing->data.setMemory();
regstate[reg].forget();
@ -1106,33 +1124,14 @@ FrameState::storeLocal(uint32 n, bool popGuaranteed, bool typeChange)
return;
/* Ensure that the local variable remains synced. */
if (local->isCopy()) {
FrameEntry *backing = local->copyOf();
if (!local->data.synced()) {
if (backing->data.inMemory())
tempRegForData(backing);
syncData(backing, addressOf(local), masm);
}
if (!local->type.synced()) {
if (backing->type.inMemory())
tempRegForType(backing);
syncType(backing, addressOf(local), masm);
}
} else {
if (!local->data.synced()) {
syncData(local, addressOf(local), masm);
local->data.sync();
}
if (!local->type.synced()) {
syncType(local, addressOf(local), masm);
local->type.sync();
}
if (closed)
forgetEntry(local);
}
syncFe(local);
if (closed)
if (closed) {
/* If the FE can have registers, free them before resetting. */
if (!local->isCopy())
forgetEntry(local);
local->resetSynced();
}
}
void
@ -1336,7 +1335,7 @@ FrameState::unpinEntry(const ValueRemat &vr)
}
void
FrameState::syncEntry(Assembler &masm, FrameEntry *fe, const ValueRemat &vr)
FrameState::ensureValueSynced(Assembler &masm, FrameEntry *fe, const ValueRemat &vr)
{
#if defined JS_PUNBOX64
if (!vr.isDataSynced || !vr.isTypeSynced)

Просмотреть файл

@ -408,13 +408,6 @@ class FrameState
*/
inline RegisterID tempRegForData(FrameEntry *fe, RegisterID reg, Assembler &masm) const;
/*
* Forcibly loads the type tag for the specified FrameEntry
* into a register already marked as owning the type.
*/
inline void emitLoadTypeTag(FrameEntry *fe, RegisterID reg) const;
inline void emitLoadTypeTag(Assembler &masm, FrameEntry *fe, RegisterID reg) const;
/*
* Convert an integer to a double without applying
* additional Register pressure.
@ -490,7 +483,7 @@ class FrameState
void unpinEntry(const ValueRemat &vr);
/* Syncs fe to memory, given its state as constructed by a call to pinEntry. */
void syncEntry(Assembler &masm, FrameEntry *fe, const ValueRemat &vr);
void ensureValueSynced(Assembler &masm, FrameEntry *fe, const ValueRemat &vr);
struct BinaryAlloc {
MaybeRegisterID lhsType;
@ -798,15 +791,26 @@ class FrameState
void evictReg(RegisterID reg);
inline FrameEntry *rawPush();
inline void addToTracker(FrameEntry *fe);
inline void syncType(const FrameEntry *fe, Address to, Assembler &masm) const;
inline void syncData(const FrameEntry *fe, Address to, Assembler &masm) const;
/* Guarantee sync, but do not set any sync flag. */
inline void ensureFeSynced(const FrameEntry *fe, Assembler &masm) const;
inline void ensureTypeSynced(const FrameEntry *fe, Assembler &masm) const;
inline void ensureDataSynced(const FrameEntry *fe, Assembler &masm) const;
/* Guarantee sync, even if register allocation is required, and set sync. */
inline void syncFe(FrameEntry *fe);
inline void syncType(FrameEntry *fe);
inline void syncData(FrameEntry *fe);
inline FrameEntry *getLocal(uint32 slot);
inline void forgetAllRegs(FrameEntry *fe);
inline void swapInTracker(FrameEntry *lhs, FrameEntry *rhs);
inline uint32 localIndex(uint32 n);
void pushCopyOf(uint32 index);
#if defined JS_NUNBOX32
void syncFancy(Assembler &masm, Registers avail, FrameEntry *resumeAt,
FrameEntry *bottom) const;
#endif
inline bool tryFastDoubleLoad(FrameEntry *fe, FPRegisterID fpReg, Assembler &masm) const;
void resetInternalState();
@ -880,7 +884,9 @@ class FrameState
*/
RegisterState regstate[Assembler::TotalRegisters];
#if defined JS_NUNBOX32
mutable ImmutableSync reifier;
#endif
JSPackedBool *closedVars;
bool eval;

Просмотреть файл

@ -36,6 +36,9 @@
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#if defined JS_NUNBOX32
#include "FrameEntry.h"
#include "FrameState.h"
#include "FrameState-inl.h"
@ -270,3 +273,5 @@ ImmutableSync::syncNormal(FrameEntry *fe)
}
}
#endif /* JS_NUNBOX32 */

Просмотреть файл

@ -37,7 +37,7 @@
*
* ***** END LICENSE BLOCK ***** */
#if !defined jsjaeger_imm_sync_h__ && defined JS_METHODJIT
#if !defined jsjaeger_imm_sync_h__ && defined JS_METHODJIT && defined JS_NUNBOX32
#define jsjaeger_imm_sync_h__
#include "methodjit/MachineRegs.h"

Просмотреть файл

@ -585,6 +585,8 @@ class SetPropCompiler : public PICStubCompiler
/* Adding a property to the object. */
if (obj->isDelegate())
return disable("delegate");
if (!obj->isExtensible())
return disable("not extensible");
if (clasp->addProperty != PropertyStub)
return disable("add property hook");

Просмотреть файл

@ -132,8 +132,8 @@ class Assembler : public BaseAssembler
}
void loadValueAsComponents(const Value &val, RegisterID type, RegisterID payload) {
move(Imm64(val.asRawBits() & 0xFFFF800000000000), type);
move(Imm64(val.asRawBits() & 0x00007FFFFFFFFFFF), payload);
move(Imm64(val.asRawBits() & JSVAL_TAG_MASK), type);
move(Imm64(val.asRawBits() & JSVAL_PAYLOAD_MASK), payload);
}
template <typename T>
@ -158,8 +158,7 @@ class Assembler : public BaseAssembler
template <typename T>
void storeTypeTag(ImmTag imm, T address) {
loadValue(address, Registers::ValueReg);
convertValueToPayload(Registers::ValueReg);
loadPayload(address, Registers::ValueReg);
orPtr(imm, Registers::ValueReg);
storePtr(Registers::ValueReg, valueOf(address));
}
@ -167,8 +166,7 @@ class Assembler : public BaseAssembler
template <typename T>
void storeTypeTag(RegisterID reg, T address) {
/* The type tag must be stored in shifted format. */
loadValue(address, Registers::ValueReg);
convertValueToPayload(Registers::ValueReg);
loadPayload(address, Registers::ValueReg);
orPtr(reg, Registers::ValueReg);
storePtr(Registers::ValueReg, valueOf(address));
}
@ -182,8 +180,7 @@ class Assembler : public BaseAssembler
template <typename T>
void storePayload(RegisterID reg, T address) {
/* Not for doubles. */
loadValue(address, Registers::ValueReg);
convertValueToType(Registers::ValueReg);
loadTypeTag(address, Registers::ValueReg);
orPtr(reg, Registers::ValueReg);
storePtr(Registers::ValueReg, valueOf(address));
}
@ -233,7 +230,7 @@ class Assembler : public BaseAssembler
Jump testNull(Assembler::Condition cond, Address address) {
loadValue(address, Registers::ValueReg);
return branchPtr(cond, Registers::ValueReg, Imm64(JSVAL_BITS(JSVAL_NULL)));
return testNull(cond, Registers::ValueReg);
}
Jump testUndefined(Assembler::Condition cond, RegisterID reg) {
@ -242,7 +239,7 @@ class Assembler : public BaseAssembler
Jump testUndefined(Assembler::Condition cond, Address address) {
loadValue(address, Registers::ValueReg);
return branchPtr(cond, Registers::ValueReg, Imm64(JSVAL_BITS(JSVAL_VOID)));
return testUndefined(cond, Registers::ValueReg);
}
Jump testInt32(Assembler::Condition cond, RegisterID reg) {
@ -250,63 +247,50 @@ class Assembler : public BaseAssembler
}
Jump testInt32(Assembler::Condition cond, Address address) {
loadValue(address, Registers::ValueReg);
convertValueToType(Registers::ValueReg);
return branchPtr(cond, Registers::ValueReg, ImmTag(JSVAL_SHIFTED_TAG_INT32));
loadTypeTag(address, Registers::ValueReg);
return testInt32(cond, Registers::ValueReg);
}
Jump testNumber(Assembler::Condition cond, RegisterID reg) {
cond = (cond == Assembler::Equal) ? Assembler::BelowOrEqual : Assembler::Above;
return branchPtr(cond, reg, ImmTag(JSVAL_SHIFTED_TAG_INT32));
cond = (cond == Assembler::Equal) ? Assembler::Below : Assembler::AboveOrEqual;
return branchPtr(cond, reg,
ImmTag(JSVAL_UPPER_EXCL_SHIFTED_TAG_OF_NUMBER_SET));
}
Jump testNumber(Assembler::Condition cond, Address address) {
cond = (cond == Assembler::Equal) ? Assembler::BelowOrEqual : Assembler::Above;
loadValue(address, Registers::ValueReg);
convertValueToType(Registers::ValueReg);
return branchPtr(cond, Registers::ValueReg, ImmTag(JSVAL_SHIFTED_TAG_INT32));
return testNumber(cond, Registers::ValueReg);
}
Jump testPrimitive(Assembler::Condition cond, RegisterID reg) {
cond = (cond == Assembler::NotEqual) ? Assembler::AboveOrEqual : Assembler::Below;
return branchPtr(cond, reg, ImmTag(JSVAL_SHIFTED_TAG_OBJECT));
cond = (cond == Assembler::Equal) ? Assembler::Below : Assembler::AboveOrEqual;
return branchPtr(cond, reg,
ImmTag(JSVAL_UPPER_EXCL_SHIFTED_TAG_OF_PRIMITIVE_SET));
}
Jump testPrimitive(Assembler::Condition cond, Address address) {
cond = (cond == Assembler::NotEqual) ? Assembler::AboveOrEqual : Assembler::Below;
loadValue(address, Registers::ValueReg);
convertValueToType(Registers::ValueReg);
return branchPtr(cond, Registers::ValueReg, ImmTag(JSVAL_SHIFTED_TAG_OBJECT));
return testPrimitive(cond, Registers::ValueReg);
}
Jump testObject(Assembler::Condition cond, RegisterID reg) {
cond = (cond == Assembler::Equal) ? Assembler::AboveOrEqual : Assembler::Below;
return branchPtr(cond, reg, ImmTag(JSVAL_SHIFTED_TAG_OBJECT));
}
Jump testObject(Assembler::Condition cond, Address address) {
loadValue(address, Registers::ValueReg);
convertValueToType(Registers::ValueReg);
return branchPtr(cond, Registers::ValueReg, ImmTag(JSVAL_SHIFTED_TAG_OBJECT));
return testObject(cond, Registers::ValueReg);
}
Jump testDouble(Assembler::Condition cond, RegisterID reg) {
Assembler::Condition opcond;
if (cond == Assembler::Equal)
opcond = Assembler::Below;
else
opcond = Assembler::AboveOrEqual;
return branchPtr(opcond, reg, ImmTag(JSVAL_SHIFTED_TAG_MAX_DOUBLE));
cond = (cond == Assembler::Equal) ? Assembler::BelowOrEqual : Assembler::Above;
return branchPtr(cond, reg, ImmTag(JSVAL_SHIFTED_TAG_MAX_DOUBLE));
}
Jump testDouble(Assembler::Condition cond, Address address) {
Assembler::Condition opcond;
if (cond == Assembler::Equal)
opcond = Assembler::Below;
else
opcond = Assembler::AboveOrEqual;
loadValue(address, Registers::ValueReg);
convertValueToType(Registers::ValueReg);
return branchPtr(opcond, Registers::ValueReg, ImmTag(JSVAL_SHIFTED_TAG_MAX_DOUBLE));
return testDouble(cond, Registers::ValueReg);
}
Jump testBoolean(Assembler::Condition cond, RegisterID reg) {
@ -314,9 +298,8 @@ class Assembler : public BaseAssembler
}
Jump testBoolean(Assembler::Condition cond, Address address) {
loadValue(address, Registers::ValueReg);
convertValueToType(Registers::ValueReg);
return branchPtr(cond, Registers::ValueReg, ImmTag(JSVAL_SHIFTED_TAG_BOOLEAN));
loadTypeTag(address, Registers::ValueReg);
return testBoolean(cond, Registers::ValueReg);
}
Jump testString(Assembler::Condition cond, RegisterID reg) {
@ -324,9 +307,8 @@ class Assembler : public BaseAssembler
}
Jump testString(Assembler::Condition cond, Address address) {
loadValue(address, Registers::ValueReg);
convertValueToType(Registers::ValueReg);
return branchPtr(cond, Registers::ValueReg, ImmTag(JSVAL_SHIFTED_TAG_BOOLEAN));
loadTypeTag(address, Registers::ValueReg);
return testString(cond, Registers::ValueReg);
}
};

Просмотреть файл

@ -143,8 +143,6 @@ stubs::SetName(VMFrame &f, JSAtom *origAtom)
JSObject *obj2;
JSAtom *atom;
if (cache->testForSet(cx, f.regs.pc, obj, &entry, &obj2, &atom)) {
JS_ASSERT(obj->isExtensible());
/*
* Fast property cache hit, only partially confirmed by
* testForSet. We know that the entry applies to regs.pc and
@ -184,6 +182,8 @@ stubs::SetName(VMFrame &f, JSAtom *origAtom)
break;
}
} else {
JS_ASSERT(obj->isExtensible());
if (obj->nativeEmpty()) {
/*
* We check that cx owns obj here and will continue to own

Просмотреть файл

@ -41,7 +41,6 @@ DEPTH = ..
topsrcdir = @top_srcdir@
srcdir = @srcdir@
VPATH = @srcdir@
narcissusdir = $(topsrcdir)/../narcissus
include $(DEPTH)/config/autoconf.mk
@ -50,7 +49,6 @@ CPPSRCS = \
js.cpp \
jsworkers.cpp \
$(NULL)
NJS = njs
DEFINES += -DEXPORT_JS_API
@ -79,10 +77,5 @@ LDFLAGS += -F/System/Library/PrivateFrameworks -framework CHUD
endif
# People expect the js shell to wind up in the top-level JS dir.
# The njs script expects to be in the same directory as the js shell as well as
# narcissus/js*.js.
libs::
$(INSTALL) $(IFLAGS2) $(PROGRAM) $(DEPTH)
$(NSINSTALL) -D $(DEPTH)/narcissus
$(INSTALL) $(IFLAGS2) $(foreach f,$(wildcard $(narcissusdir)/js*.js),"$f") $(DEPTH)/narcissus
$(INSTALL) $(IFLAGS2) $(srcdir)/$(NJS) $(DEPTH)

Просмотреть файл

@ -4072,38 +4072,6 @@ Snarf(JSContext *cx, uintN argc, jsval *vp)
return JS_TRUE;
}
static JSBool
Snarl(JSContext *cx, uintN argc, jsval *vp)
{
if (argc < 1) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_MORE_ARGS_NEEDED,
"compile", "0", "s");
}
JSObject *thisobj = JS_THIS_OBJECT(cx, vp);
if (!thisobj)
return JS_FALSE;
jsval arg0 = JS_ARGV(cx, vp)[0];
if (!JSVAL_IS_STRING(arg0)) {
const char *typeName = JS_GetTypeName(cx, JS_TypeOfValue(cx, arg0));
JS_ReportError(cx, "expected string to compile, got %s", typeName);
return JS_FALSE;
}
JSString *scriptContents = JSVAL_TO_STRING(arg0);
JSScript *script = JS_CompileUCScript(cx, NULL, JS_GetStringCharsZ(cx, scriptContents),
JS_GetStringLength(scriptContents), "<string>", 0);
if (!script)
return JS_FALSE;
JS_ExecuteScript(cx, thisobj, script, NULL);
JS_DestroyScript(cx, script);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
JSBool
Wrap(JSContext *cx, uintN argc, jsval *vp)
{
@ -4254,7 +4222,6 @@ static JSFunctionSpec shell_functions[] = {
JS_FN("scatter", Scatter, 1,0),
#endif
JS_FN("snarf", Snarf, 0,0),
JS_FN("snarl", Snarl, 0,0),
JS_FN("read", Snarf, 0,0),
JS_FN("compile", Compile, 1,0),
JS_FN("parse", Parse, 1,0),
@ -4380,7 +4347,6 @@ static const char *const shell_help_messages[] = {
"scatter(fns) Call functions concurrently (ignoring errors)",
#endif
"snarf(filename) Read filename into returned string",
"snarl(codestring) Eval code, without being eval.",
"read(filename) Synonym for snarf",
"compile(code) Compiles a string to bytecode, potentially throwing",
"parse(code) Parses a string, potentially throwing",

Просмотреть файл

@ -1,67 +0,0 @@
#!/usr/bin/python
#
# Narcissus 'shell' for use with jstests.py
# Expects to be in the same directory as ./js
# Expects the Narcissus src files to be in ./narcissus/
import os, re, sys, signal
from subprocess import *
from optparse import OptionParser
THIS_DIR = os.path.dirname(__file__)
NARC_JS_DIR = os.path.abspath(os.path.join(THIS_DIR, 'narcissus'))
js_cmd = os.path.abspath(os.path.join(THIS_DIR, "js"))
narc_jsdefs = os.path.join(NARC_JS_DIR, "jsdefs.js")
narc_jslex = os.path.join(NARC_JS_DIR, "jslex.js")
narc_jsparse = os.path.join(NARC_JS_DIR, "jsparse.js")
narc_jsssa = os.path.join(NARC_JS_DIR, "jsssa.js")
narc_jsexec = os.path.join(NARC_JS_DIR, "jsexec.js")
def handler(signum, frame):
print ''
# the exit code produced by ./js on SIGINT
sys.exit(130)
signal.signal(signal.SIGINT, handler)
if __name__ == '__main__':
op = OptionParser(usage='%prog [TEST-SPECS]')
op.add_option('-f', '--file', dest='js_files', action='append',
help='JS file to load', metavar='FILE')
op.add_option('-e', '--expression', dest='js_exps', action='append',
help='JS expression to evaluate')
op.add_option('-i', '--interactive', dest='js_interactive', action='store_true',
help='enable interactive shell')
op.add_option('-H', '--harmony', dest='js_harmony', action='store_true',
help='enable ECMAScript Harmony mode')
op.add_option('-S', '--ssa', dest='js_ssa', action='store_true',
help='enable parse-time SSA construction')
(options, args) = op.parse_args()
cmd = ""
if options.js_harmony:
cmd += 'Narcissus.options.version = "harmony"; '
if options.js_ssa:
cmd += 'Narcissus.options.builderType = "ssa"; '
if options.js_exps:
for exp in options.js_exps:
cmd += 'Narcissus.interpreter.evaluate("%s"); ' % exp.replace('"', '\\"')
if options.js_files:
for file in options.js_files:
cmd += 'Narcissus.interpreter.evaluate(snarf("%(file)s"), "%(file)s", 1); ' % { 'file':file }
if (not options.js_exps) and (not options.js_files):
options.js_interactive = True
if options.js_interactive:
cmd += 'Narcissus.interpreter.repl();'
Popen([js_cmd, '-f', narc_jsdefs, '-f', narc_jslex, '-f', narc_jsparse, '-f', narc_jsssa, '-f', narc_jsexec, '-e', cmd]).wait()

Просмотреть файл

@ -3,3 +3,4 @@ script parseInt-01.js
script eval-01.js
script eval-02.js
script eval-inside-with-is-direct.js
script parenthesized-eval-is-direct.js

Просмотреть файл

@ -0,0 +1,68 @@
// Any copyright is dedicated to the Public Domain.
// http://creativecommons.org/licenses/publicdomain/
//-----------------------------------------------------------------------------
print("(eval)(...) is a direct eval, (1, eval)() isn't, etc.");
/**************
* BEGIN TEST *
**************/
/*
* Justification:
*
* https://mail.mozilla.org/pipermail/es5-discuss/2010-October/003724.html
*
* Note also bug 537673.
*/
var t = "global";
function group()
{
var t = "local";
return (eval)("t");
}
assertEq(group(), "local");
function groupAndComma()
{
var t = "local";
return (1, eval)("t");
}
assertEq(groupAndComma(), "global");
function groupAndTrueTernary()
{
var t = "local";
return (true ? eval : null)("t");
}
assertEq(groupAndTrueTernary(), "global");
function groupAndEmptyStringTernary()
{
var t = "local";
return ("" ? null : eval)("t");
}
assertEq(groupAndEmptyStringTernary(), "global");
function groupAndZeroTernary()
{
var t = "local";
return (0 ? null : eval)("t");
}
assertEq(groupAndZeroTernary(), "global");
function groupAndNaNTernary()
{
var t = "local";
return (0 / 0 ? null : eval)("t");
}
assertEq(groupAndNaNTernary(), "global");
/******************************************************************************/
if (typeof reportCompare === "function")
reportCompare(true, true);
print("All tests passed!");

Просмотреть файл

@ -0,0 +1,118 @@
/*
* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/licenses/publicdomain/
* Contributor:
* Jeff Walden <jwalden+code@mit.edu>
*/
var gTestfile = 'add-property-non-extensible.js';
//-----------------------------------------------------------------------------
var BUGNUMBER = 602144;
var summary =
'Properly method-compile attempted addition of properties to ' +
'non-extensible objects';
print(BUGNUMBER + ": " + summary);
/**************
* BEGIN TEST *
**************/
// No property
var o1 = {};
for (var i = 0; i < 5; i++)
o1.a = 3;
assertEq(o1.a, 3);
var o2 = Object.preventExtensions({});
for (var i = 0; i < 5; i++)
o2.a = 3;
assertEq(Object.getOwnPropertyDescriptor(o2, "a"), undefined);
var o3 = Object.seal({});
for (var i = 0; i < 5; i++)
o3.a = 3;
assertEq(Object.getOwnPropertyDescriptor(o3, "a"), undefined);
var o4 = Object.freeze({});
for (var i = 0; i < 5; i++)
o4.a = 3;
assertEq(Object.getOwnPropertyDescriptor(o4, "a"), undefined);
// Has property
var o5 = { a: 2 };
for (var i = 0; i < 5; i++)
o5.a = 3;
assertEq(o5.a, 3);
var o6 = Object.preventExtensions({ a: 2 });
for (var i = 0; i < 5; i++)
o6.a = 3;
assertEq(o6.a, 3);
var o7 = Object.seal({ a: 2 });
for (var i = 0; i < 5; i++)
o7.a = 3;
assertEq(o7.a, 3);
var o8 = Object.freeze({ a: 2 });
for (var i = 0; i < 5; i++)
o8.a = 3;
assertEq(o8.a, 2);
// Extensible, hit up the prototype chain
var o9 = Object.create({ a: 2 });
for (var i = 0; i < 5; i++)
o9.a = 3;
assertEq(o9.a, 3);
var o10 = Object.create(Object.preventExtensions({ a: 2 }));
for (var i = 0; i < 5; i++)
o10.a = 3;
assertEq(o10.a, 3);
var o11 = Object.create(Object.seal({ a: 2 }));
for (var i = 0; i < 5; i++)
o11.a = 3;
assertEq(o11.a, 3);
var o12 = Object.create(Object.freeze({ a: 2 }));
for (var i = 0; i < 5; i++)
o12.a = 3;
assertEq(Object.getOwnPropertyDescriptor(o12, "a"), undefined);
// Not extensible, hit up the prototype chain
var o13 = Object.preventExtensions(Object.create({ a: 2 }));
for (var i = 0; i < 5; i++)
o13.a = 3;
assertEq(Object.getOwnPropertyDescriptor(o13, "a"), undefined);
var o14 =
Object.preventExtensions(Object.create(Object.preventExtensions({ a: 2 })));
for (var i = 0; i < 5; i++)
o14.a = 3;
assertEq(Object.getOwnPropertyDescriptor(o14, "a"), undefined);
var o15 = Object.preventExtensions(Object.create(Object.seal({ a: 2 })));
for (var i = 0; i < 5; i++)
o15.a = 3;
assertEq(Object.getOwnPropertyDescriptor(o15, "a"), undefined);
var o16 = Object.preventExtensions(Object.create(Object.freeze({ a: 2 })));
for (var i = 0; i < 5; i++)
o16.a = 3;
assertEq(Object.getOwnPropertyDescriptor(o16, "a"), undefined);
/******************************************************************************/
reportCompare(true, true);
print("All tests passed!");

Просмотреть файл

@ -39,3 +39,4 @@ script 15.2.3.6-define-over-method.js
script mutation-prevention-methods.js
script object-toString-01.js
script vacuous-accessor-unqualified-name.js
script add-property-non-extensible.js

Просмотреть файл

@ -20,4 +20,4 @@ skip-if(!xulRuntime.shell) script clone-regexp.js
skip-if(!xulRuntime.shell) script clone-object.js
skip-if(!xulRuntime.shell) script clone-typed-array.js
skip-if(!xulRuntime.shell) script clone-errors.js
skip-if(!xulRuntime.shell) script set-property-non-extensible.js # methodjit mis-implements extensibility checking, bug 602441
script set-property-non-extensible.js

Просмотреть файл

@ -12,17 +12,9 @@ print(BUGNUMBER + ": " + summary);
* BEGIN TEST *
**************/
if (typeof options === "function" &&
options().split(",").indexOf("methodjit") < 0)
{
var o = Object.freeze({});
for (var i = 0; i < 10; i++)
print(o.u = "");
}
else
{
print("non-extensible+loop adding property disabled, bug 602441");
}
var o = Object.freeze({});
for (var i = 0; i < 10; i++)
print(o.u = "");
Object.freeze(this);
for (let j = 0; j < 10; j++)

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -753,8 +753,7 @@ nsJSCID::CreateInstance(nsISupports **_retval)
nsIXPCSecurityManager::HOOK_CREATE_INSTANCE);
if(sm && NS_FAILED(sm->CanCreateInstance(cx, mDetails.ID())))
{
NS_ASSERTION(JS_IsExceptionPending(cx),
"security manager vetoed CreateInstance without setting exception");
NS_ERROR("how are we not being called from chrome here?");
return NS_OK;
}
@ -775,13 +774,9 @@ nsJSCID::CreateInstance(nsISupports **_retval)
if(NS_FAILED(rv) || !inst)
return NS_ERROR_XPC_CI_RETURNED_FAILURE;
JSObject* instJSObj;
nsCOMPtr<nsIXPConnectJSObjectHolder> holder;
rv = xpc->WrapNative(cx, obj, inst, *iid, getter_AddRefs(holder));
if(NS_FAILED(rv) || !holder || NS_FAILED(holder->GetJSObject(&instJSObj)))
rv = xpc->WrapNativeToJSVal(cx, obj, inst, nsnull, iid, PR_TRUE, vp, nsnull);
if(NS_FAILED(rv) || JSVAL_IS_PRIMITIVE(*vp))
return NS_ERROR_XPC_CANT_CREATE_WN;
*vp = OBJECT_TO_JSVAL(instJSObj);
ccxp->SetReturnValueWasSet(JS_TRUE);
return NS_OK;
}

Просмотреть файл

@ -52,6 +52,13 @@ https://bugzilla.mozilla.org/show_bug.cgi?id=500931
"this is wrapped correctly");
SimpleTest.finish();
}, 0)
var saw0 = false;
for (let i in $('ifr').contentDocument.getElementsByTagName('body')) {
if (i === "0")
saw0 = true;
}
ok(saw0, "properly enumerated the 0 value");
}
SimpleTest.waitForExplicitFinish();

Просмотреть файл

@ -697,10 +697,10 @@ XrayWrapper<Base, Policy>::enumerate(JSContext *cx, JSObject *wrapper, js::AutoI
// Go through the properties we got and enumerate all native ones.
for (size_t n = 0; n < wnProps.length(); ++n) {
jsid id = wnProps[n];
JSPropertyDescriptor dummy;
if (!ResolveNativeProperty(cx, wrapper, holder, id, false, &dummy))
JSBool hasProp;
if (!JS_HasPropertyById(cx, wrapper, id, &hasProp))
return false;
if (dummy.obj)
if (hasProp)
props.append(id);
}
return true;

Просмотреть файл

@ -1523,6 +1523,11 @@ public:
{
generate();
if (oom()) {
m_shouldFallBack = true;
return;
}
ExecutablePool *executablePool = allocator.poolForSize(size());
if (!executablePool) {
m_shouldFallBack = true;