зеркало из https://github.com/mozilla/gecko-dev.git
Bug 990568 - Remove unnecessary whitespace, r=jandem
This commit is contained in:
Родитель
14117d9d80
Коммит
a26f062142
|
@ -55,7 +55,7 @@ __attribute__((noinline))
|
|||
void pre_run ( void ) { }
|
||||
|
||||
/////////////////////////////////////////////////////////////////
|
||||
//// test1 (simple straight line code)
|
||||
//// test1 (simple straight line code)
|
||||
#if WTF_COMPILER_GCC
|
||||
|
||||
void test1 ( void )
|
||||
|
@ -170,7 +170,7 @@ void test1 ( void )
|
|||
#endif /* WTF_COMPILER_GCC */
|
||||
|
||||
/////////////////////////////////////////////////////////////////
|
||||
//// test2 (a simple counting-down loop)
|
||||
//// test2 (a simple counting-down loop)
|
||||
#if WTF_COMPILER_GCC
|
||||
|
||||
void test2 ( void )
|
||||
|
@ -326,7 +326,7 @@ void test2 ( void )
|
|||
#endif /* WTF_COMPILER_GCC */
|
||||
|
||||
/////////////////////////////////////////////////////////////////
|
||||
//// test3 (if-then-else)
|
||||
//// test3 (if-then-else)
|
||||
#if WTF_COMPILER_GCC
|
||||
|
||||
void test3 ( void )
|
||||
|
@ -513,7 +513,7 @@ void test3 ( void )
|
|||
#endif /* WTF_COMPILER_GCC */
|
||||
|
||||
/////////////////////////////////////////////////////////////////
|
||||
//// test4 (callable function)
|
||||
//// test4 (callable function)
|
||||
|
||||
void test4 ( void )
|
||||
{
|
||||
|
@ -696,7 +696,7 @@ void test4 ( void )
|
|||
|
||||
|
||||
/////////////////////////////////////////////////////////////////
|
||||
//// test5 (call in, out, repatch)
|
||||
//// test5 (call in, out, repatch)
|
||||
|
||||
// a function which we will call from the JIT generated code
|
||||
unsigned long cube ( unsigned long x ) { return x * x * x; }
|
||||
|
@ -909,7 +909,7 @@ void test5 ( void )
|
|||
JSC::RepatchBuffer repatchBuffer(&cb);
|
||||
repatchBuffer.relink( JSC::CodeLocationCall(cp),
|
||||
JSC::FunctionPtr::FunctionPtr( &square ));
|
||||
|
||||
|
||||
result = fn();
|
||||
printf("value computed is %lu (expected 81)\n", result);
|
||||
printf("\n\n");
|
||||
|
|
|
@ -428,7 +428,7 @@ void ARMAssembler::dataTransfer8(bool isLoad, RegisterID srcDst, RegisterID base
|
|||
mem_reg_off(isLoad, true, 8, true, srcDst, base, ARMRegisters::S0);
|
||||
else
|
||||
dtrb_ur(isLoad, srcDst, base, ARMRegisters::S0);
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#ifndef assembler_assembler_ARMAssembler_h
|
||||
|
@ -221,7 +221,7 @@ namespace JSC {
|
|||
// This flag makes switches the instruction between {ld,st}r{,s}h and {ld,st}rsb
|
||||
HDT_UH = (1 << 5),
|
||||
// if this bit is on, we do a register offset, if it is off, we do an immediate offest.
|
||||
HDT_IMM = (1 << 22),
|
||||
HDT_IMM = (1 << 22),
|
||||
// Differentiates half word load/store between signed and unsigned (also enables signed byte loads.)
|
||||
HDT_S = (1 << 6),
|
||||
DT_LOAD = (1 << 20)
|
||||
|
@ -556,7 +556,7 @@ namespace JSC {
|
|||
ASSERT(size == 8 || size == 16 || size == 32);
|
||||
char const * mnemonic_act = (isLoad) ? ("ld") : ("st");
|
||||
char const * mnemonic_sign = (isSigned) ? ("s") : ("");
|
||||
|
||||
|
||||
char const * mnemonic_size = NULL;
|
||||
switch (size / 8) {
|
||||
case 1:
|
||||
|
@ -762,7 +762,7 @@ namespace JSC {
|
|||
{
|
||||
spew("%-15s %s, [%s, #-%u]",
|
||||
"ldrsh", nameGpReg(rd), nameGpReg(rb), offset);
|
||||
emitInst(static_cast<ARMWord>(cc) | LDRH | HDT_UH | HDT_S | DT_PRE, rd, rb, offset);
|
||||
emitInst(static_cast<ARMWord>(cc) | LDRH | HDT_UH | HDT_S | DT_PRE, rd, rb, offset);
|
||||
}
|
||||
|
||||
void ldrsh_u(int rd, int rb, ARMWord offset, Condition cc = AL)
|
||||
|
@ -1296,7 +1296,7 @@ namespace JSC {
|
|||
|
||||
if ((op2 & OP2_IMM) || (op2 & OP2_IMMh)) {
|
||||
// Immediate values.
|
||||
|
||||
|
||||
uint32_t imm = decOp2Imm(op2 & ~(OP2_IMM | OP2_IMMh));
|
||||
sprintf(out, "#0x%x @ (%d)", imm, static_cast<int32_t>(imm));
|
||||
} else {
|
||||
|
@ -1315,7 +1315,7 @@ namespace JSC {
|
|||
// Immediate-shifted register.
|
||||
// Example: "r0, ASR #31"
|
||||
uint32_t imm = (op2 >> 7) & 0x1f;
|
||||
|
||||
|
||||
// Deal with special encodings.
|
||||
if ((type == LSL) && (imm == 0)) {
|
||||
// "LSL #0" doesn't shift at all (and is the default).
|
||||
|
@ -1331,7 +1331,7 @@ namespace JSC {
|
|||
|
||||
if (((type == LSR) || (type == ASR)) && (imm == 0)) {
|
||||
// Both LSR and ASR have a range of 1-32, with 32
|
||||
// encoded as 0.
|
||||
// encoded as 0.
|
||||
imm = 32;
|
||||
}
|
||||
|
||||
|
@ -1450,11 +1450,11 @@ namespace JSC {
|
|||
VFP_DXFER = 0x0C400A00,
|
||||
|
||||
VFP_DBL = (1<<8),
|
||||
|
||||
|
||||
/*integer conversions*/
|
||||
VFP_ICVT = 0x00B80040,
|
||||
VFP_FPCVT = 0x00B700C0,
|
||||
|
||||
|
||||
VFP_DTR = 0x01000000,
|
||||
VFP_MOV = 0x00000010,
|
||||
|
||||
|
@ -1523,16 +1523,16 @@ namespace JSC {
|
|||
void fmem_imm_off(bool isLoad, bool isDouble, bool isUp, int dest, int rn, ARMWord offset, Condition cc = AL)
|
||||
{
|
||||
char const * ins = isLoad ? "vldr.f" : "vstr.f";
|
||||
spew("%s%d %s, [%s, #%s%u]",
|
||||
spew("%s%d %s, [%s, #%s%u]",
|
||||
ins, (isDouble ? 64 : 32), (isDouble ? nameFpRegD(dest) : nameFpRegS(dest)),
|
||||
nameGpReg(rn), (isUp ? "+" : "-"), offset);
|
||||
ASSERT(offset <= 0xff);
|
||||
emitVFPInst(static_cast<ARMWord>(cc) |
|
||||
VFP_EXT | VFP_DTR |
|
||||
emitVFPInst(static_cast<ARMWord>(cc) |
|
||||
VFP_EXT | VFP_DTR |
|
||||
(isDouble ? VFP_DBL : 0) |
|
||||
(isUp ? DT_UP : 0) |
|
||||
(isUp ? DT_UP : 0) |
|
||||
(isLoad ? DT_LOAD : 0), isDouble ? DD(dest) : SD(dest), RN(rn), offset);
|
||||
|
||||
|
||||
}
|
||||
|
||||
// WARNING: even for an int -> float conversion, all registers used
|
||||
|
@ -1542,10 +1542,10 @@ namespace JSC {
|
|||
ASSERT(srcType != dstType);
|
||||
ASSERT(isFloatType(srcType) || isFloatType(dstType));
|
||||
|
||||
spew("vcvt.%s.%-15s, %s,%s",
|
||||
spew("vcvt.%s.%-15s, %s,%s",
|
||||
nameType(dstType), nameType(srcType),
|
||||
nameTypedReg(dstType,dest), nameTypedReg(srcType,src));
|
||||
|
||||
|
||||
if (isFloatType(srcType) && isFloatType (dstType)) {
|
||||
// doing a float -> float conversion
|
||||
bool dblToFloat = srcType == FloatReg64;
|
||||
|
@ -1562,7 +1562,7 @@ namespace JSC {
|
|||
void vmov64 (bool fromFP, bool isDbl, int r1, int r2, int rFP, Condition cc = AL)
|
||||
{
|
||||
if (fromFP) {
|
||||
spew("%-15s %s, %s, %s", "vmov",
|
||||
spew("%-15s %s, %s, %s", "vmov",
|
||||
nameGpReg(r1), nameGpReg(r2), nameFpRegD(rFP));
|
||||
} else {
|
||||
spew("%-15s %s, %s, %s", "vmov",
|
||||
|
|
|
@ -24,8 +24,8 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#ifndef assembler_assembler_ARMv7Assembler_h
|
||||
|
@ -230,7 +230,7 @@ class ARMThumbImmediate {
|
|||
{
|
||||
m_value.asInt = 0;
|
||||
}
|
||||
|
||||
|
||||
ARMThumbImmediate(ThumbImmediateType type, ThumbImmediateValue value)
|
||||
: m_type(type)
|
||||
, m_value(value)
|
||||
|
@ -270,14 +270,14 @@ public:
|
|||
// zero. count(B) == 8, so the count of bits to be checked is 24 - count(Z).
|
||||
int32_t rightShiftAmount = 24 - leadingZeros;
|
||||
if (value == ((value >> rightShiftAmount) << rightShiftAmount)) {
|
||||
// Shift the value down to the low byte position. The assign to
|
||||
// Shift the value down to the low byte position. The assign to
|
||||
// shiftValue7 drops the implicit top bit.
|
||||
encoding.shiftValue7 = value >> rightShiftAmount;
|
||||
// The endoded shift amount is the magnitude of a right rotate.
|
||||
encoding.shiftAmount = 8 + leadingZeros;
|
||||
return ARMThumbImmediate(TypeEncoded, encoding);
|
||||
}
|
||||
|
||||
|
||||
PatternBytes bytes;
|
||||
bytes.asInt = value;
|
||||
|
||||
|
@ -324,7 +324,7 @@ public:
|
|||
{
|
||||
return ARMThumbImmediate(TypeUInt16, value);
|
||||
}
|
||||
|
||||
|
||||
bool isValid()
|
||||
{
|
||||
return m_type != TypeInvalid;
|
||||
|
@ -379,16 +379,16 @@ public:
|
|||
m_u.type = (ARMShiftType)0;
|
||||
m_u.amount = 0;
|
||||
}
|
||||
|
||||
|
||||
ShiftTypeAndAmount(ARMShiftType type, unsigned amount)
|
||||
{
|
||||
m_u.type = type;
|
||||
m_u.amount = amount & 31;
|
||||
}
|
||||
|
||||
|
||||
unsigned lo4() { return m_u.lo4; }
|
||||
unsigned hi4() { return m_u.hi4; }
|
||||
|
||||
|
||||
private:
|
||||
union {
|
||||
struct {
|
||||
|
@ -404,14 +404,14 @@ private:
|
|||
|
||||
|
||||
/*
|
||||
Some features of the Thumb instruction set are deprecated in ARMv7. Deprecated features affecting
|
||||
instructions supported by ARMv7-M are as follows:
|
||||
• use of the PC as <Rd> or <Rm> in a 16-bit ADD (SP plus register) instruction
|
||||
• use of the SP as <Rm> in a 16-bit ADD (SP plus register) instruction
|
||||
• use of the SP as <Rm> in a 16-bit CMP (register) instruction
|
||||
• use of MOV (register) instructions in which <Rd> is the SP or PC and <Rm> is also the SP or PC.
|
||||
• use of <Rn> as the lowest-numbered register in the register list of a 16-bit STM instruction with base
|
||||
register writeback
|
||||
Some features of the Thumb instruction set are deprecated in ARMv7. Deprecated features affecting
|
||||
instructions supported by ARMv7-M are as follows:
|
||||
• use of the PC as <Rd> or <Rm> in a 16-bit ADD (SP plus register) instruction
|
||||
• use of the SP as <Rm> in a 16-bit ADD (SP plus register) instruction
|
||||
• use of the SP as <Rm> in a 16-bit CMP (register) instruction
|
||||
• use of MOV (register) instructions in which <Rd> is the SP or PC and <Rm> is also the SP or PC.
|
||||
• use of <Rn> as the lowest-numbered register in the register list of a 16-bit STM instruction with base
|
||||
register writeback
|
||||
*/
|
||||
|
||||
class ARMv7Assembler {
|
||||
|
@ -468,7 +468,7 @@ public:
|
|||
|
||||
int m_offset;
|
||||
};
|
||||
|
||||
|
||||
class JmpDst {
|
||||
friend class ARMv7Assembler;
|
||||
friend class ARMInstructionFormatter;
|
||||
|
@ -871,7 +871,7 @@ public:
|
|||
m_formatter.twoWordOp16Op16(OP_B_T4a, OP_B_T4b);
|
||||
return JmpSrc(m_formatter.size());
|
||||
}
|
||||
|
||||
|
||||
// Only allowed in IT (if then) block if last instruction.
|
||||
JmpSrc blx(RegisterID rm)
|
||||
{
|
||||
|
@ -1006,19 +1006,19 @@ public:
|
|||
ASSERT(rn != ARMRegisters::pc);
|
||||
ASSERT(index || wback);
|
||||
ASSERT(!wback | (rt != rn));
|
||||
|
||||
|
||||
bool add = true;
|
||||
if (offset < 0) {
|
||||
add = false;
|
||||
offset = -offset;
|
||||
}
|
||||
ASSERT((offset & ~0xff) == 0);
|
||||
|
||||
|
||||
offset |= (wback << 8);
|
||||
offset |= (add << 9);
|
||||
offset |= (index << 10);
|
||||
offset |= (1 << 11);
|
||||
|
||||
|
||||
m_formatter.twoWordOp12Reg4Reg4Imm12(OP_LDR_imm_T4, rn, rt, offset);
|
||||
}
|
||||
|
||||
|
@ -1064,19 +1064,19 @@ public:
|
|||
ASSERT(rn != ARMRegisters::pc);
|
||||
ASSERT(index || wback);
|
||||
ASSERT(!wback | (rt != rn));
|
||||
|
||||
|
||||
bool add = true;
|
||||
if (offset < 0) {
|
||||
add = false;
|
||||
offset = -offset;
|
||||
}
|
||||
ASSERT((offset & ~0xff) == 0);
|
||||
|
||||
|
||||
offset |= (wback << 8);
|
||||
offset |= (add << 9);
|
||||
offset |= (index << 10);
|
||||
offset |= (1 << 11);
|
||||
|
||||
|
||||
m_formatter.twoWordOp12Reg4Reg4Imm12(OP_LDRH_imm_T3, rn, rt, offset);
|
||||
}
|
||||
|
||||
|
@ -1176,7 +1176,7 @@ public:
|
|||
ASSERT(imm.isValid());
|
||||
ASSERT(!imm.isEncodedImm());
|
||||
ASSERT(!BadReg(rd));
|
||||
|
||||
|
||||
m_formatter.twoWordOp5i6Imm4Reg4EncodedImm(OP_MOV_imm_T3, imm.m_value.imm4, rd, imm);
|
||||
}
|
||||
|
||||
|
@ -1184,7 +1184,7 @@ public:
|
|||
{
|
||||
ASSERT(imm.isValid());
|
||||
ASSERT(!BadReg(rd));
|
||||
|
||||
|
||||
if ((rd < 8) && imm.isUInt8())
|
||||
m_formatter.oneWordOp5Reg3Imm8(OP_MOV_imm_T1, rd, imm.getUInt8());
|
||||
else if (imm.isEncodedImm())
|
||||
|
@ -1209,7 +1209,7 @@ public:
|
|||
{
|
||||
ASSERT(imm.isEncodedImm());
|
||||
ASSERT(!BadReg(rd));
|
||||
|
||||
|
||||
m_formatter.twoWordOp5i6Imm4Reg4EncodedImm(OP_MVN_imm, 0xf, rd, imm);
|
||||
}
|
||||
|
||||
|
@ -1312,19 +1312,19 @@ public:
|
|||
ASSERT(rn != ARMRegisters::pc);
|
||||
ASSERT(index || wback);
|
||||
ASSERT(!wback | (rt != rn));
|
||||
|
||||
|
||||
bool add = true;
|
||||
if (offset < 0) {
|
||||
add = false;
|
||||
offset = -offset;
|
||||
}
|
||||
ASSERT((offset & ~0xff) == 0);
|
||||
|
||||
|
||||
offset |= (wback << 8);
|
||||
offset |= (add << 9);
|
||||
offset |= (index << 10);
|
||||
offset |= (1 << 11);
|
||||
|
||||
|
||||
m_formatter.twoWordOp12Reg4Reg4Imm12(OP_STR_imm_T4, rn, rt, offset);
|
||||
}
|
||||
|
||||
|
@ -1516,7 +1516,7 @@ public:
|
|||
{
|
||||
return JmpDst(m_formatter.size());
|
||||
}
|
||||
|
||||
|
||||
JmpDst align(int alignment)
|
||||
{
|
||||
while (!m_formatter.isAligned(alignment))
|
||||
|
@ -1524,36 +1524,36 @@ public:
|
|||
|
||||
return label();
|
||||
}
|
||||
|
||||
|
||||
static void* getRelocatedAddress(void* code, JmpSrc jump)
|
||||
{
|
||||
ASSERT(jump.m_offset != -1);
|
||||
|
||||
return reinterpret_cast<void*>(reinterpret_cast<ptrdiff_t>(code) + jump.m_offset);
|
||||
}
|
||||
|
||||
|
||||
static void* getRelocatedAddress(void* code, JmpDst destination)
|
||||
{
|
||||
ASSERT(destination.m_offset != -1);
|
||||
|
||||
return reinterpret_cast<void*>(reinterpret_cast<ptrdiff_t>(code) + destination.m_offset);
|
||||
}
|
||||
|
||||
|
||||
static int getDifferenceBetweenLabels(JmpDst src, JmpDst dst)
|
||||
{
|
||||
return dst.m_offset - src.m_offset;
|
||||
}
|
||||
|
||||
|
||||
static int getDifferenceBetweenLabels(JmpDst src, JmpSrc dst)
|
||||
{
|
||||
return dst.m_offset - src.m_offset;
|
||||
}
|
||||
|
||||
|
||||
static int getDifferenceBetweenLabels(JmpSrc src, JmpDst dst)
|
||||
{
|
||||
return dst.m_offset - src.m_offset;
|
||||
}
|
||||
|
||||
|
||||
// Assembler admin methods:
|
||||
|
||||
size_t size() const
|
||||
|
@ -1601,7 +1601,7 @@ public:
|
|||
static void linkJump(void* code, JmpSrc from, void* to)
|
||||
{
|
||||
ASSERT(from.m_offset != -1);
|
||||
|
||||
|
||||
uint16_t* location = reinterpret_cast<uint16_t*>(reinterpret_cast<intptr_t>(code) + from.m_offset);
|
||||
linkJumpAbsolute(location, to);
|
||||
}
|
||||
|
@ -1636,7 +1636,7 @@ public:
|
|||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
static void relinkCall(void* from, void* to)
|
||||
{
|
||||
ASSERT(!(reinterpret_cast<intptr_t>(from) & 1));
|
||||
|
@ -1650,7 +1650,7 @@ public:
|
|||
static void repatchInt32(void* where, int32_t value)
|
||||
{
|
||||
ASSERT(!(reinterpret_cast<intptr_t>(where) & 1));
|
||||
|
||||
|
||||
setInt32(where, value);
|
||||
|
||||
ExecutableAllocator::cacheFlush(reinterpret_cast<uint16_t*>(where) - 4, 4 * sizeof(uint16_t));
|
||||
|
@ -1659,7 +1659,7 @@ public:
|
|||
static void repatchPointer(void* where, void* value)
|
||||
{
|
||||
ASSERT(!(reinterpret_cast<intptr_t>(where) & 1));
|
||||
|
||||
|
||||
setPointer(where, value);
|
||||
|
||||
ExecutableAllocator::cacheFlush(reinterpret_cast<uint16_t*>(where) - 4, 4 * sizeof(uint16_t));
|
||||
|
@ -1838,7 +1838,7 @@ private:
|
|||
{
|
||||
m_buffer.putShort(op | (rd << 8) | imm);
|
||||
}
|
||||
|
||||
|
||||
void oneWordOp5Imm5Reg3Reg3(OpcodeID op, uint8_t imm, RegisterID reg1, RegisterID reg2)
|
||||
{
|
||||
m_buffer.putShort(op | (imm << 6) | (reg1 << 3) | reg2);
|
||||
|
@ -1873,13 +1873,13 @@ private:
|
|||
m_buffer.putShort(op | reg);
|
||||
m_buffer.putShort(ff.m_u.value);
|
||||
}
|
||||
|
||||
|
||||
void twoWordOp16FourFours(OpcodeID1 op, FourFours ff)
|
||||
{
|
||||
m_buffer.putShort(op);
|
||||
m_buffer.putShort(ff.m_u.value);
|
||||
}
|
||||
|
||||
|
||||
void twoWordOp16Op16(OpcodeID1 op1, OpcodeID2 op2)
|
||||
{
|
||||
m_buffer.putShort(op1);
|
||||
|
|
|
@ -23,8 +23,8 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#ifndef assembler_assembler_AbstractMacroAssembler_h
|
||||
|
@ -98,7 +98,7 @@ public:
|
|||
, offset(offset)
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
RegisterID base;
|
||||
intptr_t offset;
|
||||
};
|
||||
|
@ -190,7 +190,7 @@ public:
|
|||
{
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// TrustedImm32:
|
||||
//
|
||||
// A 32bit immediate operand to an instruction - this is wrapped in a
|
||||
|
@ -289,7 +289,7 @@ public:
|
|||
: m_label(masm->m_assembler.label())
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
bool isUsed() const { return m_label.isUsed(); }
|
||||
void used() { m_label.used(); }
|
||||
bool isSet() const { return m_label.isValid(); }
|
||||
|
@ -314,7 +314,7 @@ public:
|
|||
: m_label(masm->m_assembler.label())
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
bool isSet() const { return m_label.isValid(); }
|
||||
|
||||
private:
|
||||
|
@ -365,7 +365,7 @@ public:
|
|||
: m_flags(None)
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
Call(JmpSrc jmp, Flags flags)
|
||||
: m_jmp(jmp)
|
||||
, m_flags(flags)
|
||||
|
@ -402,17 +402,17 @@ public:
|
|||
Jump()
|
||||
{
|
||||
}
|
||||
|
||||
Jump(JmpSrc jmp)
|
||||
|
||||
Jump(JmpSrc jmp)
|
||||
: m_jmp(jmp)
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
void link(AbstractMacroAssembler<AssemblerType>* masm) const
|
||||
{
|
||||
masm->m_assembler.linkJump(m_jmp, masm->m_assembler.label());
|
||||
}
|
||||
|
||||
|
||||
void linkTo(Label label, AbstractMacroAssembler<AssemblerType>* masm) const
|
||||
{
|
||||
masm->m_assembler.linkJump(m_jmp, label.m_label);
|
||||
|
@ -455,7 +455,7 @@ public:
|
|||
m_jumps[i].link(masm);
|
||||
m_jumps.clear();
|
||||
}
|
||||
|
||||
|
||||
void linkTo(Label label, AbstractMacroAssembler<AssemblerType>* masm)
|
||||
{
|
||||
size_t size = m_jumps.length();
|
||||
|
@ -463,12 +463,12 @@ public:
|
|||
m_jumps[i].linkTo(label, masm);
|
||||
m_jumps.clear();
|
||||
}
|
||||
|
||||
|
||||
void append(Jump jump)
|
||||
{
|
||||
m_jumps.append(jump);
|
||||
}
|
||||
|
||||
|
||||
void append(const JumpList& other)
|
||||
{
|
||||
m_jumps.append(other.m_jumps.begin(), other.m_jumps.length());
|
||||
|
@ -483,7 +483,7 @@ public:
|
|||
{
|
||||
return !m_jumps.length();
|
||||
}
|
||||
|
||||
|
||||
const JumpVector& jumps() const { return m_jumps; }
|
||||
|
||||
private:
|
||||
|
@ -528,7 +528,7 @@ public:
|
|||
{
|
||||
return DataLabel32(this);
|
||||
}
|
||||
|
||||
|
||||
Label align()
|
||||
{
|
||||
m_assembler.align(16);
|
||||
|
|
|
@ -23,8 +23,8 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#ifndef assembler_assembler_AssemblerBuffer_h
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#ifndef assembler_assembler_AssemblerBufferWithConstantPool_h
|
||||
|
@ -248,7 +248,7 @@ public:
|
|||
} dpun;
|
||||
|
||||
dpun.d = constant;
|
||||
|
||||
|
||||
m_pool[m_numConsts] = dpun.s.lo;
|
||||
m_pool[m_numConsts+1] = dpun.s.hi;
|
||||
m_mask[m_numConsts] = static_cast<char>(isReusable ? ReusableConst : UniqueConst);
|
||||
|
|
|
@ -23,8 +23,8 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#ifndef assembler_assembler_CodeLocation_h
|
||||
|
|
|
@ -23,8 +23,8 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#ifndef assembler_assembler_LinkBuffer_h
|
||||
|
@ -91,7 +91,7 @@ public:
|
|||
ASSERT(call.isFlagSet(Call::Linkable));
|
||||
MacroAssembler::linkCall(code(), call, function);
|
||||
}
|
||||
|
||||
|
||||
void link(Jump jump, CodeLocationLabel label)
|
||||
{
|
||||
MacroAssembler::linkJump(code(), jump, label);
|
||||
|
@ -180,7 +180,7 @@ public:
|
|||
}
|
||||
|
||||
protected:
|
||||
// Keep this private! - the underlying code should only be obtained externally via
|
||||
// Keep this private! - the underlying code should only be obtained externally via
|
||||
// finalizeCode() or finalizeCodeAddendum().
|
||||
void* code()
|
||||
{
|
||||
|
|
|
@ -317,7 +317,7 @@ public:
|
|||
|
||||
void mul(RegisterID rd, RegisterID rs, RegisterID rt)
|
||||
{
|
||||
#if WTF_MIPS_ISA_AT_LEAST(32)
|
||||
#if WTF_MIPS_ISA_AT_LEAST(32)
|
||||
emitInst(0x70000002 | (rd << OP_SH_RD) | (rs << OP_SH_RS)
|
||||
| (rt << OP_SH_RT));
|
||||
#else
|
||||
|
|
|
@ -23,8 +23,8 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#ifndef assembler_assembler_MacroAssembler_h
|
||||
|
@ -84,7 +84,7 @@ public:
|
|||
{
|
||||
addPtr(Imm32(sizeof(void*)), stackPointerRegister);
|
||||
}
|
||||
|
||||
|
||||
void peek(RegisterID dest, int index = 0)
|
||||
{
|
||||
loadPtr(Address(stackPointerRegister, (index * sizeof(void*))), dest);
|
||||
|
@ -131,7 +131,7 @@ public:
|
|||
{
|
||||
branch16(cond, left, right).linkTo(target, this);
|
||||
}
|
||||
|
||||
|
||||
void branchTestPtr(Condition cond, RegisterID reg, Label target)
|
||||
{
|
||||
branchTestPtr(cond, reg).linkTo(target, this);
|
||||
|
@ -216,12 +216,12 @@ public:
|
|||
{
|
||||
sub32(src, dest);
|
||||
}
|
||||
|
||||
|
||||
void subPtr(Imm32 imm, RegisterID dest)
|
||||
{
|
||||
sub32(imm, dest);
|
||||
}
|
||||
|
||||
|
||||
void subPtr(ImmPtr imm, RegisterID dest)
|
||||
{
|
||||
sub32(Imm32(imm), dest);
|
||||
|
|
|
@ -25,7 +25,7 @@
|
|||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#include "assembler/wtf/Platform.h"
|
||||
|
|
|
@ -26,7 +26,7 @@
|
|||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#ifndef assembler_assembler_MacroAssemblerARM_h
|
||||
|
@ -190,16 +190,16 @@ public:
|
|||
{
|
||||
m_assembler.movs_r(dest, m_assembler.asr(dest, imm.m_value & 0x1f));
|
||||
}
|
||||
|
||||
|
||||
void urshift32(RegisterID shift_amount, RegisterID dest)
|
||||
{
|
||||
ARMWord w = ARMAssembler::getOp2(0x1f);
|
||||
ASSERT(w != ARMAssembler::INVALID_IMM);
|
||||
m_assembler.and_r(ARMRegisters::S0, shift_amount, w);
|
||||
|
||||
|
||||
m_assembler.movs_r(dest, m_assembler.lsr_r(dest, ARMRegisters::S0));
|
||||
}
|
||||
|
||||
|
||||
void urshift32(Imm32 imm, RegisterID dest)
|
||||
{
|
||||
m_assembler.movs_r(dest, m_assembler.lsr(dest, imm.m_value & 0x1f));
|
||||
|
@ -258,7 +258,7 @@ public:
|
|||
void load8SignExtend(ImplicitAddress address, RegisterID dest)
|
||||
{
|
||||
m_assembler.dataTransferN(true, true, 8, dest, address.base, address.offset);
|
||||
}
|
||||
}
|
||||
|
||||
void load8ZeroExtend(ImplicitAddress address, RegisterID dest)
|
||||
{
|
||||
|
@ -287,11 +287,11 @@ public:
|
|||
{
|
||||
load16(address, dest);
|
||||
}
|
||||
|
||||
|
||||
void load16SignExtend(ImplicitAddress address, RegisterID dest)
|
||||
{
|
||||
m_assembler.dataTransferN(true, true, 16, dest, address.base, address.offset);
|
||||
}
|
||||
}
|
||||
|
||||
void load16ZeroExtend(ImplicitAddress address, RegisterID dest)
|
||||
{
|
||||
|
@ -360,7 +360,7 @@ public:
|
|||
m_assembler.add_r(ARMRegisters::S1, address.base, m_assembler.lsl(address.index, address.scale));
|
||||
load16(Address(ARMRegisters::S1, address.offset), dest);
|
||||
}
|
||||
|
||||
|
||||
void load16(ImplicitAddress address, RegisterID dest)
|
||||
{
|
||||
if (address.offset >= 0)
|
||||
|
@ -1179,7 +1179,7 @@ public:
|
|||
m_assembler.vcvt(m_assembler.FloatReg32, m_assembler.FloatReg64, dest_s, dest);
|
||||
return label;
|
||||
}
|
||||
|
||||
|
||||
void storeDouble(FPRegisterID src, ImplicitAddress address)
|
||||
{
|
||||
// Store a double at base+offset.
|
||||
|
|
|
@ -24,8 +24,8 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#ifndef assembler_assembler_MacroAssemblerARMv7_h
|
||||
|
@ -62,14 +62,14 @@ class MacroAssemblerARMv7 : public AbstractMacroAssembler<ARMv7Assembler> {
|
|||
Scale scale;
|
||||
};
|
||||
} u;
|
||||
|
||||
|
||||
explicit ArmAddress(RegisterID base, int32_t offset = 0)
|
||||
: type(HasOffset)
|
||||
, base(base)
|
||||
{
|
||||
u.offset = offset;
|
||||
}
|
||||
|
||||
|
||||
explicit ArmAddress(RegisterID base, RegisterID index, Scale scale = TimesOne)
|
||||
: type(HasIndex)
|
||||
, base(base)
|
||||
|
@ -78,7 +78,7 @@ class MacroAssemblerARMv7 : public AbstractMacroAssembler<ARMv7Assembler> {
|
|||
u.scale = scale;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
public:
|
||||
|
||||
static const Scale ScalePtr = TimesFour;
|
||||
|
@ -264,17 +264,17 @@ public:
|
|||
{
|
||||
m_assembler.asr(dest, dest, imm.m_value & 0x1f);
|
||||
}
|
||||
|
||||
|
||||
void urshift32(RegisterID shift_amount, RegisterID dest)
|
||||
{
|
||||
// Clamp the shift to the range 0..31
|
||||
ARMThumbImmediate armImm = ARMThumbImmediate::makeEncodedImm(0x1f);
|
||||
ASSERT(armImm.isValid());
|
||||
m_assembler.ARM_and(dataTempRegister, shift_amount, armImm);
|
||||
|
||||
|
||||
m_assembler.lsr(dest, dest, dataTempRegister);
|
||||
}
|
||||
|
||||
|
||||
void urshift32(Imm32 imm, RegisterID dest)
|
||||
{
|
||||
m_assembler.lsr(dest, dest, imm.m_value & 0x1f);
|
||||
|
@ -351,7 +351,6 @@ public:
|
|||
m_assembler.eor(dest, dest, dataTempRegister);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Memory access operations:
|
||||
//
|
||||
|
@ -463,7 +462,7 @@ public:
|
|||
{
|
||||
m_assembler.ldrh(dest, makeBaseIndexBase(address), address.index, address.scale);
|
||||
}
|
||||
|
||||
|
||||
void load16(ImplicitAddress address, RegisterID dest)
|
||||
{
|
||||
m_assembler.ldrh(dest, address.base, address.offset);
|
||||
|
@ -537,7 +536,7 @@ public:
|
|||
base = addressTempRegister;
|
||||
offset = 0;
|
||||
}
|
||||
|
||||
|
||||
m_assembler.vldr(dest, base, offset);
|
||||
}
|
||||
|
||||
|
@ -552,7 +551,7 @@ public:
|
|||
base = addressTempRegister;
|
||||
offset = 0;
|
||||
}
|
||||
|
||||
|
||||
m_assembler.vstr(src, base, offset);
|
||||
}
|
||||
|
||||
|
@ -638,7 +637,7 @@ public:
|
|||
// operations add and remove a single register sized unit of data
|
||||
// to or from the stack. Peek and poke operations read or write
|
||||
// values on the stack, without moving the current stack position.
|
||||
|
||||
|
||||
void pop(RegisterID dest)
|
||||
{
|
||||
// store postindexed with writeback
|
||||
|
@ -933,7 +932,7 @@ public:
|
|||
// * jz operations branch if the result is zero.
|
||||
// * jo operations branch if the (signed) arithmetic
|
||||
// operation caused an overflow to occur.
|
||||
|
||||
|
||||
Jump branchAdd32(Condition cond, RegisterID src, RegisterID dest)
|
||||
{
|
||||
ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
|
||||
|
@ -990,7 +989,7 @@ public:
|
|||
}
|
||||
return Jump(makeBranch(cond));
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Miscellaneous operations:
|
||||
|
||||
|
|
|
@ -23,8 +23,8 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#ifndef assembler_assembler_MacroAssemblerCodeRef_h
|
||||
|
|
|
@ -187,12 +187,12 @@ namespace JSC {
|
|||
// The last 5 bit of imm.m_value will be used anyway.
|
||||
m_assembler.sra_imm(dest, imm.m_value, dest);
|
||||
}
|
||||
|
||||
|
||||
void urshift32(RegisterID shift_amount, RegisterID dest)
|
||||
{
|
||||
m_assembler.srl_r(dest, shift_amount, dest);
|
||||
}
|
||||
|
||||
|
||||
void urshift32(Imm32 imm, RegisterID dest)
|
||||
{
|
||||
// No need to check if imm.m_value.
|
||||
|
@ -326,7 +326,7 @@ namespace JSC {
|
|||
add32(Imm32(address.offset), SparcRegisters::g2);
|
||||
m_assembler.lduh_r(address.base, SparcRegisters::g2, dest);
|
||||
}
|
||||
|
||||
|
||||
void load16(ImplicitAddress address, RegisterID dest)
|
||||
{
|
||||
if (m_assembler.isimm13(address.offset))
|
||||
|
|
|
@ -23,8 +23,8 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#ifndef assembler_assembler_MacroAssemblerX86_h
|
||||
|
@ -77,17 +77,17 @@ public:
|
|||
{
|
||||
m_assembler.addl_im(imm.m_value, address.m_ptr);
|
||||
}
|
||||
|
||||
|
||||
void addWithCarry32(Imm32 imm, AbsoluteAddress address)
|
||||
{
|
||||
m_assembler.adcl_im(imm.m_value, address.m_ptr);
|
||||
}
|
||||
|
||||
|
||||
void and32(Imm32 imm, AbsoluteAddress address)
|
||||
{
|
||||
m_assembler.andl_im(imm.m_value, address.m_ptr);
|
||||
}
|
||||
|
||||
|
||||
void or32(TrustedImm32 imm, AbsoluteAddress address)
|
||||
{
|
||||
m_assembler.orl_im(imm.m_value, address.m_ptr);
|
||||
|
|
|
@ -23,8 +23,8 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#ifndef assembler_assembler_MacroAssemblerX86Common_h
|
||||
|
@ -127,7 +127,7 @@ public:
|
|||
{
|
||||
m_assembler.addl_ir(imm.m_value, dest);
|
||||
}
|
||||
|
||||
|
||||
void add32(Address src, RegisterID dest)
|
||||
{
|
||||
m_assembler.addl_mr(src.offset, src.base, dest);
|
||||
|
@ -137,7 +137,7 @@ public:
|
|||
{
|
||||
m_assembler.addl_rm(src, dest.offset, dest.base);
|
||||
}
|
||||
|
||||
|
||||
void and32(RegisterID src, RegisterID dest)
|
||||
{
|
||||
m_assembler.andl_rr(src, dest);
|
||||
|
@ -167,7 +167,7 @@ public:
|
|||
{
|
||||
m_assembler.shll_i8r(imm.m_value, dest);
|
||||
}
|
||||
|
||||
|
||||
void lshift32(RegisterID shift_amount, RegisterID dest)
|
||||
{
|
||||
// On x86 we can only shift by ecx; if asked to shift by another register we'll
|
||||
|
@ -184,12 +184,12 @@ public:
|
|||
// E.g. transform "shll %eax, %ebx" -> "xchgl %eax, %ecx; shll %ecx, %ebx; xchgl %eax, %ecx"
|
||||
else
|
||||
m_assembler.shll_CLr(dest);
|
||||
|
||||
|
||||
swap(shift_amount, X86Registers::ecx);
|
||||
} else
|
||||
m_assembler.shll_CLr(dest);
|
||||
}
|
||||
|
||||
|
||||
void mul32(RegisterID src, RegisterID dest)
|
||||
{
|
||||
m_assembler.imull_rr(src, dest);
|
||||
|
@ -199,7 +199,7 @@ public:
|
|||
{
|
||||
m_assembler.imull_mr(src.offset, src.base, dest);
|
||||
}
|
||||
|
||||
|
||||
void mul32(Imm32 imm, RegisterID src, RegisterID dest)
|
||||
{
|
||||
m_assembler.imull_i32r(src, imm.m_value, dest);
|
||||
|
@ -230,7 +230,7 @@ public:
|
|||
{
|
||||
m_assembler.notl_m(srcDest.offset, srcDest.base);
|
||||
}
|
||||
|
||||
|
||||
void or32(RegisterID src, RegisterID dest)
|
||||
{
|
||||
m_assembler.orl_rr(src, dest);
|
||||
|
@ -272,7 +272,7 @@ public:
|
|||
// E.g. transform "shll %eax, %ebx" -> "xchgl %eax, %ecx; shll %ecx, %ebx; xchgl %eax, %ecx"
|
||||
else
|
||||
m_assembler.sarl_CLr(dest);
|
||||
|
||||
|
||||
swap(shift_amount, X86Registers::ecx);
|
||||
} else
|
||||
m_assembler.sarl_CLr(dest);
|
||||
|
@ -282,14 +282,14 @@ public:
|
|||
{
|
||||
m_assembler.sarl_i8r(imm.m_value, dest);
|
||||
}
|
||||
|
||||
|
||||
void urshift32(RegisterID shift_amount, RegisterID dest)
|
||||
{
|
||||
// On x86 we can only shift by ecx; if asked to shift by another register we'll
|
||||
// need rejig the shift amount into ecx first, and restore the registers afterwards.
|
||||
if (shift_amount != X86Registers::ecx) {
|
||||
swap(shift_amount, X86Registers::ecx);
|
||||
|
||||
|
||||
// E.g. transform "shrl %eax, %eax" -> "xchgl %eax, %ecx; shrl %ecx, %ecx; xchgl %eax, %ecx"
|
||||
if (dest == shift_amount)
|
||||
m_assembler.shrl_CLr(X86Registers::ecx);
|
||||
|
@ -299,12 +299,12 @@ public:
|
|||
// E.g. transform "shrl %eax, %ebx" -> "xchgl %eax, %ecx; shrl %ecx, %ebx; xchgl %eax, %ecx"
|
||||
else
|
||||
m_assembler.shrl_CLr(dest);
|
||||
|
||||
|
||||
swap(shift_amount, X86Registers::ecx);
|
||||
} else
|
||||
m_assembler.shrl_CLr(dest);
|
||||
}
|
||||
|
||||
|
||||
void urshift32(Imm32 imm, RegisterID dest)
|
||||
{
|
||||
m_assembler.shrl_i8r(imm.m_value, dest);
|
||||
|
@ -314,12 +314,12 @@ public:
|
|||
{
|
||||
m_assembler.subl_rr(src, dest);
|
||||
}
|
||||
|
||||
|
||||
void sub32(TrustedImm32 imm, RegisterID dest)
|
||||
{
|
||||
m_assembler.subl_ir(imm.m_value, dest);
|
||||
}
|
||||
|
||||
|
||||
void sub32(TrustedImm32 imm, Address address)
|
||||
{
|
||||
m_assembler.subl_im(imm.m_value, address.offset, address.base);
|
||||
|
@ -360,7 +360,7 @@ public:
|
|||
{
|
||||
m_assembler.xorl_mr(src.offset, src.base, dest);
|
||||
}
|
||||
|
||||
|
||||
void sqrtDouble(FPRegisterID src, FPRegisterID dst)
|
||||
{
|
||||
m_assembler.sqrtsd_rr(src, dst);
|
||||
|
@ -423,7 +423,7 @@ public:
|
|||
{
|
||||
m_assembler.movzbl_mr(address.offset, address.base, address.index, address.scale, dest);
|
||||
}
|
||||
|
||||
|
||||
void load8ZeroExtend(Address address, RegisterID dest)
|
||||
{
|
||||
m_assembler.movzbl_mr(address.offset, address.base, dest);
|
||||
|
@ -433,7 +433,7 @@ public:
|
|||
{
|
||||
m_assembler.movsbl_mr(address.offset, address.base, address.index, address.scale, dest);
|
||||
}
|
||||
|
||||
|
||||
void load8SignExtend(Address address, RegisterID dest)
|
||||
{
|
||||
m_assembler.movsbl_mr(address.offset, address.base, dest);
|
||||
|
@ -443,7 +443,7 @@ public:
|
|||
{
|
||||
m_assembler.movswl_mr(address.offset, address.base, address.index, address.scale, dest);
|
||||
}
|
||||
|
||||
|
||||
void load16SignExtend(Address address, RegisterID dest)
|
||||
{
|
||||
m_assembler.movswl_mr(address.offset, address.base, dest);
|
||||
|
@ -453,7 +453,7 @@ public:
|
|||
{
|
||||
m_assembler.movzwl_mr(address.offset, address.base, address.index, address.scale, dest);
|
||||
}
|
||||
|
||||
|
||||
void load16(Address address, RegisterID dest)
|
||||
{
|
||||
m_assembler.movzwl_mr(address.offset, address.base, dest);
|
||||
|
@ -723,7 +723,7 @@ public:
|
|||
void branchConvertDoubleToInt32(FPRegisterID src, RegisterID dest, JumpList& failureCases, FPRegisterID fpTemp)
|
||||
{
|
||||
ASSERT(isSSE2Present());
|
||||
ASSERT(src != fpTemp);
|
||||
ASSERT(src != fpTemp);
|
||||
m_assembler.cvttsd2si_rr(src, dest);
|
||||
|
||||
// If the result is zero, it might have been -0.0, and the double comparison won't catch this!
|
||||
|
@ -750,7 +750,7 @@ public:
|
|||
// operations add and remove a single register sized unit of data
|
||||
// to or from the stack. Peek and poke operations read or write
|
||||
// values on the stack, without moving the current stack position.
|
||||
|
||||
|
||||
void pop(RegisterID dest)
|
||||
{
|
||||
m_assembler.pop_r(dest);
|
||||
|
@ -888,7 +888,7 @@ public:
|
|||
m_assembler.cmpl_ir(right.m_value, left);
|
||||
return Jump(m_assembler.jCC(x86Condition(cond)));
|
||||
}
|
||||
|
||||
|
||||
// Branch based on a 32-bit comparison, forcing the size of the
|
||||
// immediate operand to 32 bits in the native code stream to ensure that
|
||||
// the length of code emitted by this instruction is consistent.
|
||||
|
@ -919,7 +919,7 @@ public:
|
|||
m_assembler.cmpl_mr(right.offset, right.base, left);
|
||||
return Jump(m_assembler.jCC(x86Condition(cond)));
|
||||
}
|
||||
|
||||
|
||||
Jump branch32(Condition cond, Address left, RegisterID right)
|
||||
{
|
||||
m_assembler.cmpl_rm(right, left.offset, left.base);
|
||||
|
@ -996,7 +996,7 @@ public:
|
|||
m_assembler.testl_i32m(mask.m_value, address.offset, address.base, address.index, address.scale);
|
||||
return Jump(m_assembler.jCC(x86Condition(cond)));
|
||||
}
|
||||
|
||||
|
||||
Jump branchTest8(Condition cond, Address address, Imm32 mask = Imm32(-1))
|
||||
{
|
||||
ASSERT((cond == Zero) || (cond == NonZero));
|
||||
|
@ -1006,7 +1006,7 @@ public:
|
|||
m_assembler.testb_im(mask.m_value, address.offset, address.base);
|
||||
return Jump(m_assembler.jCC(x86Condition(cond)));
|
||||
}
|
||||
|
||||
|
||||
Jump branchTest8(Condition cond, BaseIndex address, Imm32 mask = Imm32(-1))
|
||||
{
|
||||
ASSERT((cond == Zero) || (cond == NonZero));
|
||||
|
@ -1047,7 +1047,7 @@ public:
|
|||
// * jz operations branch if the result is zero.
|
||||
// * jo operations branch if the (signed) arithmetic
|
||||
// operation caused an overflow to occur.
|
||||
|
||||
|
||||
Jump branchAdd32(Condition cond, RegisterID src, RegisterID dest)
|
||||
{
|
||||
ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
|
||||
|
@ -1061,7 +1061,7 @@ public:
|
|||
add32(imm, dest);
|
||||
return Jump(m_assembler.jCC(x86Condition(cond)));
|
||||
}
|
||||
|
||||
|
||||
Jump branchAdd32(Condition cond, Imm32 src, Address dest)
|
||||
{
|
||||
ASSERT((cond == Overflow) || (cond == Zero) || (cond == NonZero));
|
||||
|
@ -1096,21 +1096,21 @@ public:
|
|||
mul32(src, dest);
|
||||
return Jump(m_assembler.jCC(x86Condition(cond)));
|
||||
}
|
||||
|
||||
|
||||
Jump branchMul32(Condition cond, Imm32 imm, RegisterID src, RegisterID dest)
|
||||
{
|
||||
ASSERT(cond == Overflow);
|
||||
mul32(imm, src, dest);
|
||||
return Jump(m_assembler.jCC(x86Condition(cond)));
|
||||
}
|
||||
|
||||
|
||||
Jump branchSub32(Condition cond, RegisterID src, RegisterID dest)
|
||||
{
|
||||
ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
|
||||
sub32(src, dest);
|
||||
return Jump(m_assembler.jCC(x86Condition(cond)));
|
||||
}
|
||||
|
||||
|
||||
Jump branchSub32(Condition cond, Imm32 imm, RegisterID dest)
|
||||
{
|
||||
ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
|
||||
|
@ -1453,7 +1453,6 @@ private:
|
|||
|
||||
return s_sseCheckState >= HasSSE2;
|
||||
}
|
||||
|
||||
|
||||
#endif // PLATFORM(MAC)
|
||||
#elif !defined(NDEBUG) // CPU(X86)
|
||||
|
|
|
@ -23,8 +23,8 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#ifndef assembler_assembler_MacroAssemblerX86_64_h
|
||||
|
@ -67,13 +67,13 @@ public:
|
|||
move(ImmPtr(address.m_ptr), scratchRegister);
|
||||
add32(imm, Address(scratchRegister));
|
||||
}
|
||||
|
||||
|
||||
void and32(Imm32 imm, AbsoluteAddress address)
|
||||
{
|
||||
move(ImmPtr(address.m_ptr), scratchRegister);
|
||||
and32(imm, Address(scratchRegister));
|
||||
}
|
||||
|
||||
|
||||
void or32(TrustedImm32 imm, AbsoluteAddress address)
|
||||
{
|
||||
move(ImmPtr(address.m_ptr), scratchRegister);
|
||||
|
@ -191,7 +191,7 @@ public:
|
|||
move(ImmPtr(address.m_ptr), scratchRegister);
|
||||
addPtr(imm, Address(scratchRegister));
|
||||
}
|
||||
|
||||
|
||||
void andPtr(RegisterID src, RegisterID dest)
|
||||
{
|
||||
m_assembler.andq_rr(src, dest);
|
||||
|
@ -255,12 +255,12 @@ public:
|
|||
{
|
||||
m_assembler.subq_rr(src, dest);
|
||||
}
|
||||
|
||||
|
||||
void subPtr(Imm32 imm, RegisterID dest)
|
||||
{
|
||||
m_assembler.subq_ir(imm.m_value, dest);
|
||||
}
|
||||
|
||||
|
||||
void subPtr(ImmPtr imm, RegisterID dest)
|
||||
{
|
||||
move(imm, scratchRegister);
|
||||
|
@ -337,7 +337,7 @@ public:
|
|||
{
|
||||
m_assembler.movq_rm(src, address.offset, address.base, address.index, address.scale);
|
||||
}
|
||||
|
||||
|
||||
void storePtr(RegisterID src, void* address)
|
||||
{
|
||||
if (src == X86Registers::eax)
|
||||
|
|
|
@ -23,8 +23,8 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#ifndef assembler_assembler_RepatchBuffer_h
|
||||
|
@ -129,7 +129,7 @@ public:
|
|||
{
|
||||
relink(CodeLocationCall(CodePtr(returnAddress)), label);
|
||||
}
|
||||
|
||||
|
||||
void relinkCallerToTrampoline(ReturnAddressPtr returnAddress, CodePtr newCalleeFunction)
|
||||
{
|
||||
relinkCallerToTrampoline(returnAddress, CodeLocationLabel(newCalleeFunction));
|
||||
|
@ -139,12 +139,12 @@ public:
|
|||
{
|
||||
relink(CodeLocationCall(CodePtr(returnAddress)), function);
|
||||
}
|
||||
|
||||
|
||||
void relinkNearCallerToTrampoline(ReturnAddressPtr returnAddress, CodeLocationLabel label)
|
||||
{
|
||||
relink(CodeLocationNearCall(CodePtr(returnAddress)), label);
|
||||
}
|
||||
|
||||
|
||||
void relinkNearCallerToTrampoline(ReturnAddressPtr returnAddress, CodePtr newCalleeFunction)
|
||||
{
|
||||
relinkNearCallerToTrampoline(returnAddress, CodeLocationLabel(newCalleeFunction));
|
||||
|
|
|
@ -963,12 +963,12 @@ namespace JSC {
|
|||
{
|
||||
return dst.m_offset - src.m_offset;
|
||||
}
|
||||
|
||||
|
||||
static int getDifferenceBetweenLabels(JmpDst src, JmpSrc dst)
|
||||
{
|
||||
return dst.m_offset - src.m_offset;
|
||||
}
|
||||
|
||||
|
||||
static int getDifferenceBetweenLabels(JmpSrc src, JmpDst dst)
|
||||
{
|
||||
return dst.m_offset - src.m_offset;
|
||||
|
@ -985,7 +985,7 @@ namespace JSC {
|
|||
|
||||
return reinterpret_cast<void*>(reinterpret_cast<ptrdiff_t>(code) + jump.m_offset);
|
||||
}
|
||||
|
||||
|
||||
static void* getRelocatedAddress(void* code, JmpDst destination)
|
||||
{
|
||||
ASSERT(destination.m_offset != -1);
|
||||
|
@ -1035,7 +1035,7 @@ namespace JSC {
|
|||
from, to);
|
||||
|
||||
int value = ((int)to - (int)from) / 4;
|
||||
if (isimm22(value))
|
||||
if (isimm22(value))
|
||||
patchbranch(from, value);
|
||||
else {
|
||||
patchbranch(from, 4);
|
||||
|
@ -1104,7 +1104,7 @@ namespace JSC {
|
|||
}
|
||||
|
||||
static void repatchPointer(void* where, void* value)
|
||||
{
|
||||
{
|
||||
js::JaegerSpew(js::JSpew_Insns,
|
||||
ISPFX "##repatchPointer ((where = %p)) points to ((%p))\n",
|
||||
where, value);
|
||||
|
|
|
@ -23,8 +23,8 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#ifndef assembler_assembler_X86Assembler_h
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include "assembler/jit/ExecutableAllocator.h"
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#ifndef assembler_jit_ExecutableAllocator_h
|
||||
|
@ -298,7 +298,7 @@ private:
|
|||
|
||||
if ((std::numeric_limits<size_t>::max() - granularity) <= request)
|
||||
return OVERSIZE_ALLOCATION;
|
||||
|
||||
|
||||
// Round up to next page boundary
|
||||
size_t size = request + (granularity - 1);
|
||||
size = size & ~(granularity - 1);
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include "assembler/jit/ExecutableAllocator.h"
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#ifndef assembler_wtf_Assertions_h
|
||||
|
|
|
@ -42,7 +42,7 @@
|
|||
#define CPU(WTF_FEATURE) (defined WTF_CPU_##WTF_FEATURE && WTF_CPU_##WTF_FEATURE)
|
||||
/* HAVE() - specific system features (headers, functions or similar) that are present or not */
|
||||
#define HAVE(WTF_FEATURE) (defined HAVE_##WTF_FEATURE && HAVE_##WTF_FEATURE)
|
||||
/* OS() - underlying operating system; only to be used for mandated low-level services like
|
||||
/* OS() - underlying operating system; only to be used for mandated low-level services like
|
||||
virtual memory, not to choose a GUI toolkit */
|
||||
#define OS(WTF_FEATURE) (defined WTF_OS_##WTF_FEATURE && WTF_OS_##WTF_FEATURE)
|
||||
|
||||
|
@ -341,7 +341,7 @@
|
|||
|
||||
|
||||
/* WTF_CPU_ARMV5_OR_LOWER - ARM instruction set v5 or earlier */
|
||||
/* On ARMv5 and below the natural alignment is required.
|
||||
/* On ARMv5 and below the natural alignment is required.
|
||||
And there are some other differences for v5 or earlier. */
|
||||
#if !defined(ARMV5_OR_LOWER) && WTF_CPU_ARM && !(WTF_ARM_ARCH_VERSION >= 6)
|
||||
#define WTF_CPU_ARMV5_OR_LOWER 1
|
||||
|
@ -383,7 +383,7 @@
|
|||
#define WTF_CPU_NEEDS_ALIGNED_ACCESS 1
|
||||
#endif
|
||||
|
||||
/* ==== OS() - underlying operating system; only to be used for mandated low-level services like
|
||||
/* ==== OS() - underlying operating system; only to be used for mandated low-level services like
|
||||
virtual memory, not to choose a GUI toolkit ==== */
|
||||
|
||||
/* WTF_OS_ANDROID - Android */
|
||||
|
@ -1243,7 +1243,7 @@
|
|||
//#include "GTypedefs.h"
|
||||
#endif
|
||||
|
||||
/* FIXME: This define won't be needed once #27551 is fully landed. However,
|
||||
/* FIXME: This define won't be needed once #27551 is fully landed. However,
|
||||
since most ports try to support sub-project independence, adding new headers
|
||||
to WTF causes many ports to break, and so this way we can address the build
|
||||
breakages one port at a time. */
|
||||
|
|
|
@ -20,14 +20,14 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#ifndef assembler_wtf_VMTags_h
|
||||
#define assembler_wtf_VMTags_h
|
||||
|
||||
// On Mac OS X, the VM subsystem allows tagging memory requested from mmap and vm_map
|
||||
// in order to aid tools that inspect system memory use.
|
||||
// in order to aid tools that inspect system memory use.
|
||||
#if WTF_PLATFORM_DARWIN
|
||||
|
||||
#include <mach/vm_statistics.h>
|
||||
|
|
|
@ -1208,7 +1208,7 @@ InitTypeClasses(JSContext* cx, HandleObject parent)
|
|||
|
||||
// Attach the prototypes just created to each of ctypes.CType.prototype,
|
||||
// and the special type constructors, so we can access them when constructing
|
||||
// instances of those types.
|
||||
// instances of those types.
|
||||
AttachProtos(CTypeProto, protos);
|
||||
AttachProtos(protos[SLOT_POINTERPROTO], protos);
|
||||
AttachProtos(protos[SLOT_ARRAYPROTO], protos);
|
||||
|
@ -2719,7 +2719,7 @@ BuildTypeName(JSContext* cx, JSObject* typeObj_)
|
|||
// Outer type is pointer, inner type is array. Grouping is required.
|
||||
PrependString(result, "(");
|
||||
AppendString(result, ")");
|
||||
}
|
||||
}
|
||||
|
||||
// Array types go on the right.
|
||||
AppendString(result, "[");
|
||||
|
@ -2805,8 +2805,8 @@ BuildTypeName(JSContext* cx, JSObject* typeObj_)
|
|||
// equality is determined by strict JSObject pointer equality.)
|
||||
static void
|
||||
BuildTypeSource(JSContext* cx,
|
||||
JSObject* typeObj_,
|
||||
bool makeShort,
|
||||
JSObject* typeObj_,
|
||||
bool makeShort,
|
||||
AutoString& result)
|
||||
{
|
||||
RootedObject typeObj(cx, typeObj_);
|
||||
|
@ -2952,9 +2952,9 @@ BuildTypeSource(JSContext* cx,
|
|||
// structs and arrays are converted with ImplicitConvert.)
|
||||
static bool
|
||||
BuildDataSource(JSContext* cx,
|
||||
HandleObject typeObj,
|
||||
void* data,
|
||||
bool isImplicit,
|
||||
HandleObject typeObj,
|
||||
void* data,
|
||||
bool isImplicit,
|
||||
AutoString& result)
|
||||
{
|
||||
TypeCode type = CType::GetTypeCode(typeObj);
|
||||
|
|
|
@ -119,7 +119,7 @@ Library::Create(JSContext* cx, jsval path_, JSCTypesCallbacks* callbacks)
|
|||
// provided.
|
||||
char* pathBytes;
|
||||
if (callbacks && callbacks->unicodeToNative) {
|
||||
pathBytes =
|
||||
pathBytes =
|
||||
callbacks->unicodeToNative(cx, pathStr->chars(), pathStr->length());
|
||||
if (!pathBytes)
|
||||
return nullptr;
|
||||
|
@ -273,7 +273,7 @@ Library::Declare(JSContext* cx, unsigned argc, jsval* vp)
|
|||
// back will be of type 'type', and will point into the symbol data.
|
||||
// This data will be both readable and writable via the usual CData
|
||||
// accessors. If 'type' is a PointerType to a FunctionType, the result will
|
||||
// be a function pointer, as with 1).
|
||||
// be a function pointer, as with 1).
|
||||
if (args.length() < 2) {
|
||||
JS_ReportError(cx, "declare requires at least two arguments");
|
||||
return false;
|
||||
|
|
|
@ -157,7 +157,7 @@ class IonBuilder : public MIRGenerator
|
|||
// MIR instruction
|
||||
MTableSwitch *ins;
|
||||
|
||||
// The number of current successor that get mapped into a block.
|
||||
// The number of current successor that get mapped into a block.
|
||||
uint32_t currentBlock;
|
||||
|
||||
} tableswitch;
|
||||
|
|
|
@ -647,7 +647,7 @@ HandleException(ResumeFromException *rfe)
|
|||
// to be. Unset the flag here so that if we call DebugEpilogue below,
|
||||
// it doesn't try to pop the SPS frame again.
|
||||
iter.baselineFrame()->unsetPushedSPSFrame();
|
||||
|
||||
|
||||
if (cx->compartment()->debugMode() && !calledDebugEpilogue) {
|
||||
// If DebugEpilogue returns |true|, we have to perform a forced
|
||||
// return, e.g. return frame->returnValue() to the caller.
|
||||
|
|
|
@ -208,7 +208,7 @@ class FrameSizeClass
|
|||
|
||||
explicit FrameSizeClass(uint32_t class_) : class_(class_)
|
||||
{ }
|
||||
|
||||
|
||||
public:
|
||||
FrameSizeClass()
|
||||
{ }
|
||||
|
|
|
@ -1380,7 +1380,7 @@ LinearScanAllocator::setIntervalRequirement(LiveInterval *interval)
|
|||
void
|
||||
LinearScanAllocator::UnhandledQueue::enqueueBackward(LiveInterval *interval)
|
||||
{
|
||||
IntervalReverseIterator i(rbegin());
|
||||
IntervalReverseIterator i(rbegin());
|
||||
|
||||
for (; i != rend(); i++) {
|
||||
if (i->start() > interval->start())
|
||||
|
|
|
@ -922,7 +922,7 @@ MTypeBarrier::printOpcode(FILE *fp) const
|
|||
fprintf(fp, " ");
|
||||
getOperand(0)->printName(fp);
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
MPhi::removeOperand(size_t index)
|
||||
{
|
||||
|
@ -2484,7 +2484,7 @@ MCompare::evaluateConstantOperands(bool *result)
|
|||
int32_t comp = 0; // Default to equal.
|
||||
if (left != right)
|
||||
comp = CompareAtoms(&lhs.toString()->asAtom(), &rhs.toString()->asAtom());
|
||||
|
||||
|
||||
switch (jsop_) {
|
||||
case JSOP_LT:
|
||||
*result = (comp < 0);
|
||||
|
|
|
@ -727,7 +727,6 @@ class O2RegImmShift : public Op2Reg
|
|||
datastore::Reg r(*this);
|
||||
datastore::RIS ris(r);
|
||||
return ris.ShiftAmount;
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -55,7 +55,7 @@ CodeGeneratorARM::generatePrologue()
|
|||
bool
|
||||
CodeGeneratorARM::generateEpilogue()
|
||||
{
|
||||
masm.bind(&returnLabel_);
|
||||
masm.bind(&returnLabel_);
|
||||
#if JS_TRACE_LOGGING
|
||||
masm.tracelogStop();
|
||||
#endif
|
||||
|
|
|
@ -891,7 +891,7 @@ MacroAssemblerARM::ma_check_mul(Register src1, Register src2, Register dest, Con
|
|||
as_smull(ScratchRegister, dest, src1, src2, SetCond);
|
||||
return cond;
|
||||
}
|
||||
|
||||
|
||||
if (cond == Overflow) {
|
||||
as_smull(ScratchRegister, dest, src1, src2);
|
||||
as_cmp(ScratchRegister, asr(dest, 31));
|
||||
|
@ -909,7 +909,7 @@ MacroAssemblerARM::ma_check_mul(Register src1, Imm32 imm, Register dest, Conditi
|
|||
as_smull(ScratchRegister, dest, ScratchRegister, src1, SetCond);
|
||||
return cond;
|
||||
}
|
||||
|
||||
|
||||
if (cond == Overflow) {
|
||||
as_smull(ScratchRegister, dest, ScratchRegister, src1);
|
||||
as_cmp(ScratchRegister, asr(dest, 31));
|
||||
|
@ -1186,7 +1186,6 @@ MacroAssemblerARM::ma_dataTransferN(LoadStore ls, int size, bool IsSigned,
|
|||
//
|
||||
// Note a neg_bottom of 0x1000 can not be encoded as an immediate negative offset in the
|
||||
// instruction and this occurs when bottom is zero, so this case is guarded against below.
|
||||
//
|
||||
if (off < 0) {
|
||||
Operand2 sub_off = Imm8(-(off-bottom)); // sub_off = bottom - off
|
||||
if (!sub_off.invalid) {
|
||||
|
@ -1227,7 +1226,6 @@ MacroAssemblerARM::ma_dataTransferN(LoadStore ls, int size, bool IsSigned,
|
|||
//
|
||||
// Note a neg_bottom of 0x100 can not be encoded as an immediate negative offset in the
|
||||
// instruction and this occurs when bottom is zero, so this case is guarded against below.
|
||||
//
|
||||
if (off < 0) {
|
||||
Operand2 sub_off = Imm8(-(off-bottom)); // sub_off = bottom - off
|
||||
if (!sub_off.invalid) {
|
||||
|
@ -1644,7 +1642,6 @@ MacroAssemblerARM::ma_vdtr(LoadStore ls, const Operand &addr, VFPRegister rt, Co
|
|||
//
|
||||
// Note a neg_bottom of 0x400 can not be encoded as an immediate negative offset in the
|
||||
// instruction and this occurs when bottom is zero, so this case is guarded against below.
|
||||
//
|
||||
if (off < 0) {
|
||||
Operand2 sub_off = Imm8(-(off-bottom)); // sub_off = bottom - off
|
||||
if (!sub_off.invalid) {
|
||||
|
|
|
@ -861,7 +861,7 @@ class Assembler
|
|||
BufferOffset as_mfc1(Register rt, FloatRegister fs);
|
||||
|
||||
protected:
|
||||
// These instructions should only be used to access the odd part of
|
||||
// These instructions should only be used to access the odd part of
|
||||
// 64-bit register pair. Do not use odd registers as 32-bit registers.
|
||||
// :TODO: Bug 972836, Remove _Odd functions once we can use odd regs.
|
||||
BufferOffset as_ls_Odd(FloatRegister fd, Register base, int32_t off);
|
||||
|
|
|
@ -1344,7 +1344,7 @@ class AssemblerX86Shared
|
|||
void pcmpeqw(const FloatRegister &lhs, const FloatRegister &rhs) {
|
||||
JS_ASSERT(HasSSE2());
|
||||
masm.pcmpeqw_rr(rhs.code(), lhs.code());
|
||||
}
|
||||
}
|
||||
void movd(const Register &src, const FloatRegister &dest) {
|
||||
JS_ASSERT(HasSSE2());
|
||||
masm.movd_rr(src.code(), dest.code());
|
||||
|
|
|
@ -187,7 +187,7 @@ struct Pool
|
|||
int getAlignment() {
|
||||
return alignment;
|
||||
}
|
||||
|
||||
|
||||
uint32_t addPoolSize(uint32_t start) {
|
||||
start = align(start);
|
||||
start += immSize * numEntries;
|
||||
|
|
|
@ -139,7 +139,7 @@ class FloatRegisters {
|
|||
|
||||
static const uint32_t AllMask = (1 << Total) - 1;
|
||||
|
||||
static const uint32_t VolatileMask =
|
||||
static const uint32_t VolatileMask =
|
||||
#if defined(_WIN64)
|
||||
(1 << JSC::X86Registers::xmm0) |
|
||||
(1 << JSC::X86Registers::xmm1) |
|
||||
|
|
|
@ -110,7 +110,7 @@ MacroAssemblerX64::setupABICall(uint32_t args)
|
|||
inCall_ = true;
|
||||
|
||||
args_ = args;
|
||||
passedIntArgs_ = 0;
|
||||
passedIntArgs_ = 0;
|
||||
passedFloatArgs_ = 0;
|
||||
stackForCall_ = ShadowStackSpace;
|
||||
}
|
||||
|
|
|
@ -110,5 +110,5 @@ probes::StopExecution(JSScript *script)
|
|||
}
|
||||
|
||||
} /* namespace js */
|
||||
|
||||
|
||||
#endif /* vm_Probes_inl_h */
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
#include "jscntxt.h"
|
||||
|
||||
#ifdef INCLUDE_MOZILLA_DTRACE
|
||||
#include "jsscriptinlines.h"
|
||||
#include "jsscriptinlines.h"
|
||||
#endif
|
||||
|
||||
#define TYPEOF(cx,v) (JSVAL_IS_NULL(v) ? JSTYPE_NULL : JS_TypeOfValue(cx,v))
|
||||
|
|
|
@ -1050,7 +1050,7 @@ FrameIter::isConstructing() const
|
|||
return data_.ionFrames_.isConstructing();
|
||||
#else
|
||||
break;
|
||||
#endif
|
||||
#endif
|
||||
case INTERP:
|
||||
return interpFrame()->isConstructing();
|
||||
}
|
||||
|
|
|
@ -584,7 +584,7 @@ iJIT_IsProfilingActiveFlags JITAPI iJIT_IsProfilingActive(void);
|
|||
*
|
||||
* @param[in] event_type - type of the data sent to the agent
|
||||
* @param[in] EventSpecificData - pointer to event-specific data
|
||||
*
|
||||
*
|
||||
* @returns 1 on success, otherwise 0.
|
||||
*/
|
||||
int JITAPI iJIT_NotifyEvent(iJIT_JVM_EVENT event_type, void *EventSpecificData);
|
||||
|
|
|
@ -9,13 +9,13 @@
|
|||
* are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
|
||||
* its contributors may be used to endorse or promote products derived
|
||||
* from this software without specific prior written permission.
|
||||
* from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
|
||||
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
|
@ -180,7 +180,6 @@ private:
|
|||
return pool;
|
||||
}
|
||||
|
||||
//
|
||||
void* current = pool->m_current;
|
||||
void* allocationEnd = static_cast<char*>(current) + size;
|
||||
ASSERT(allocationEnd > current); // check for overflow
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#ifndef yarr_CheckedArithmetic_h
|
||||
|
@ -141,7 +141,7 @@ template <typename Target, typename Source> struct BoundsChecker<Target, Source,
|
|||
return false;
|
||||
// If our (unsigned) Target is the same or greater width we can
|
||||
// convert value to type Target without losing precision
|
||||
if (sizeof(Target) >= sizeof(Source))
|
||||
if (sizeof(Target) >= sizeof(Source))
|
||||
return static_cast<Target>(value) <= ::std::numeric_limits<Target>::max();
|
||||
// The signed Source type has greater precision than the target so
|
||||
// max(Target) -> Source will widen.
|
||||
|
@ -153,7 +153,7 @@ template <typename Target, typename Source> struct BoundsChecker<Target, Source,
|
|||
static bool inBounds(Source value)
|
||||
{
|
||||
// Signed target with an unsigned source
|
||||
if (sizeof(Target) <= sizeof(Source))
|
||||
if (sizeof(Target) <= sizeof(Source))
|
||||
return value <= static_cast<Source>(::std::numeric_limits<Target>::max());
|
||||
// Target is Wider than Source so we're guaranteed to fit any value in
|
||||
// unsigned Source
|
||||
|
@ -175,7 +175,7 @@ template <typename Target, typename Source> static inline bool isInBounds(Source
|
|||
|
||||
template <typename T> struct RemoveChecked {
|
||||
typedef T CleanType;
|
||||
static const CleanType DefaultValue = 0;
|
||||
static const CleanType DefaultValue = 0;
|
||||
};
|
||||
|
||||
template <typename T> struct RemoveChecked<Checked<T, CrashOnOverflow> > {
|
||||
|
@ -227,7 +227,7 @@ template <typename U, typename V> struct ResultBase<U, V, false, true> {
|
|||
template <typename U, typename V> struct Result : ResultBase<typename RemoveChecked<U>::CleanType, typename RemoveChecked<V>::CleanType> {
|
||||
};
|
||||
|
||||
template <typename LHS, typename RHS, typename ResultType = typename Result<LHS, RHS>::ResultType,
|
||||
template <typename LHS, typename RHS, typename ResultType = typename Result<LHS, RHS>::ResultType,
|
||||
bool lhsSigned = ::std::numeric_limits<LHS>::is_signed, bool rhsSigned = ::std::numeric_limits<RHS>::is_signed> struct ArithmeticOperations;
|
||||
|
||||
template <typename LHS, typename RHS, typename ResultType> struct ArithmeticOperations<LHS, RHS, ResultType, true, true> {
|
||||
|
@ -343,7 +343,7 @@ template <typename ResultType> struct ArithmeticOperations<int, unsigned, Result
|
|||
result = static_cast<ResultType>(temp);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
static inline bool sub(int64_t lhs, int64_t rhs, ResultType& result)
|
||||
{
|
||||
int64_t temp = lhs - rhs;
|
||||
|
@ -377,7 +377,7 @@ template <typename ResultType> struct ArithmeticOperations<unsigned, int, Result
|
|||
{
|
||||
return ArithmeticOperations<int, unsigned, ResultType>::add(rhs, lhs, result);
|
||||
}
|
||||
|
||||
|
||||
static inline bool sub(int64_t lhs, int64_t rhs, ResultType& result)
|
||||
{
|
||||
return ArithmeticOperations<int, unsigned, ResultType>::sub(lhs, rhs, result);
|
||||
|
@ -415,7 +415,7 @@ template <typename U, typename V> static inline bool safeEquals(U lhs, V rhs)
|
|||
}
|
||||
|
||||
enum ResultOverflowedTag { ResultOverflowed };
|
||||
|
||||
|
||||
// FIXME: Needed to workaround http://llvm.org/bugs/show_bug.cgi?id=10801
|
||||
static inline bool workAroundClangBug() { return true; }
|
||||
|
||||
|
@ -441,14 +441,14 @@ public:
|
|||
this->overflowed();
|
||||
m_value = static_cast<T>(value);
|
||||
}
|
||||
|
||||
|
||||
template <typename V> Checked(const Checked<T, V>& rhs)
|
||||
: m_value(rhs.m_value)
|
||||
{
|
||||
if (rhs.hasOverflowed())
|
||||
this->overflowed();
|
||||
}
|
||||
|
||||
|
||||
template <typename U> Checked(const Checked<U, OverflowHandler>& rhs)
|
||||
: OverflowHandler(rhs)
|
||||
{
|
||||
|
@ -456,7 +456,7 @@ public:
|
|||
this->overflowed();
|
||||
m_value = static_cast<T>(rhs.m_value);
|
||||
}
|
||||
|
||||
|
||||
template <typename U, typename V> Checked(const Checked<U, V>& rhs)
|
||||
{
|
||||
if (rhs.hasOverflowed())
|
||||
|
@ -465,7 +465,7 @@ public:
|
|||
this->overflowed();
|
||||
m_value = static_cast<T>(rhs.m_value);
|
||||
}
|
||||
|
||||
|
||||
const Checked& operator=(Checked rhs)
|
||||
{
|
||||
this->clearOverflow();
|
||||
|
@ -474,17 +474,17 @@ public:
|
|||
m_value = static_cast<T>(rhs.m_value);
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
||||
template <typename U> const Checked& operator=(U value)
|
||||
{
|
||||
return *this = Checked(value);
|
||||
}
|
||||
|
||||
|
||||
template <typename U, typename V> const Checked& operator=(const Checked<U, V>& rhs)
|
||||
{
|
||||
return *this = Checked(rhs);
|
||||
}
|
||||
|
||||
|
||||
// prefix
|
||||
const Checked& operator++()
|
||||
{
|
||||
|
@ -493,7 +493,7 @@ public:
|
|||
m_value++;
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
||||
const Checked& operator--()
|
||||
{
|
||||
if (m_value == ::std::numeric_limits<T>::min())
|
||||
|
@ -501,7 +501,7 @@ public:
|
|||
m_value--;
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
||||
// postfix operators
|
||||
const Checked operator++(int)
|
||||
{
|
||||
|
@ -509,14 +509,14 @@ public:
|
|||
this->overflowed();
|
||||
return Checked(m_value++);
|
||||
}
|
||||
|
||||
|
||||
const Checked operator--(int)
|
||||
{
|
||||
if (m_value == ::std::numeric_limits<T>::min())
|
||||
this->overflowed();
|
||||
return Checked(m_value--);
|
||||
}
|
||||
|
||||
|
||||
// Boolean operators
|
||||
bool operator!() const
|
||||
{
|
||||
|
@ -540,7 +540,7 @@ public:
|
|||
CRASH();
|
||||
return m_value;
|
||||
}
|
||||
|
||||
|
||||
bool safeGet(T& value) const WARN_UNUSED_RETURN
|
||||
{
|
||||
value = m_value;
|
||||
|
@ -583,7 +583,7 @@ public:
|
|||
{
|
||||
return *this *= (double)rhs;
|
||||
}
|
||||
|
||||
|
||||
template <typename U, typename V> const Checked operator+=(Checked<U, V> rhs)
|
||||
{
|
||||
if (rhs.hasOverflowed())
|
||||
|
@ -617,7 +617,7 @@ public:
|
|||
this->overflowed();
|
||||
return safeEquals(m_value, rhs);
|
||||
}
|
||||
|
||||
|
||||
template <typename U, typename V> const Checked operator==(Checked<U, V> rhs)
|
||||
{
|
||||
return unsafeGet() == Checked(rhs.unsafeGet());
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#ifndef yarr_MatchResult_h
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
|
|
|
@ -45,7 +45,7 @@ public:
|
|||
PageBlock();
|
||||
PageBlock(const PageBlock&);
|
||||
PageBlock(void*, size_t);
|
||||
|
||||
|
||||
void* base() const { return m_base; }
|
||||
size_t size() const { return m_size; }
|
||||
|
||||
|
|
|
@ -22,14 +22,14 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#ifndef yarr_VMTags_h
|
||||
#define yarr_VMTags_h
|
||||
|
||||
// On Mac OS X, the VM subsystem allows tagging memory requested from mmap and vm_map
|
||||
// in order to aid tools that inspect system memory use.
|
||||
// in order to aid tools that inspect system memory use.
|
||||
#if WTF_OS_DARWIN
|
||||
|
||||
#include <mach/vm_statistics.h>
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
// DO NOT EDIT! - this file autogenerated by YarrCanonicalizeUCS2.js
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#ifndef yarr_YarrCanonicalizeUCS2_h
|
||||
|
|
|
@ -1270,7 +1270,7 @@ public:
|
|||
case ByteTerm::TypeUncheckInput:
|
||||
input.uncheckInput(currentTerm().checkInputCount);
|
||||
MATCH_NEXT();
|
||||
|
||||
|
||||
case ByteTerm::TypeDotStarEnclosure:
|
||||
if (matchDotStarEnclosure(currentTerm(), context))
|
||||
return JSRegExpMatch;
|
||||
|
@ -1517,7 +1517,7 @@ public:
|
|||
{
|
||||
m_bodyDisjunction->terms.append(ByteTerm::UncheckInput(count));
|
||||
}
|
||||
|
||||
|
||||
void assertionBOL(unsigned inputPosition)
|
||||
{
|
||||
m_bodyDisjunction->terms.append(ByteTerm::BOL(inputPosition));
|
||||
|
|
|
@ -176,7 +176,7 @@ struct ByteTerm {
|
|||
atom.quantityCount = 1;
|
||||
inputPosition = inputPos;
|
||||
}
|
||||
|
||||
|
||||
ByteTerm(Type type, bool invert = false)
|
||||
: type(type)
|
||||
, m_capture(false)
|
||||
|
@ -222,7 +222,7 @@ struct ByteTerm {
|
|||
term.checkInputCount = count.unsafeGet();
|
||||
return term;
|
||||
}
|
||||
|
||||
|
||||
static ByteTerm EOL(int inputPos)
|
||||
{
|
||||
ByteTerm term(TypeAssertionEOL);
|
||||
|
@ -236,7 +236,7 @@ struct ByteTerm {
|
|||
term.inputPosition = inputPos;
|
||||
return term;
|
||||
}
|
||||
|
||||
|
||||
static ByteTerm BackReference(unsigned subpatternId, int inputPos)
|
||||
{
|
||||
return ByteTerm(TypeBackReference, subpatternId, false, false, inputPos);
|
||||
|
@ -305,7 +305,7 @@ struct ByteTerm {
|
|||
{
|
||||
return ByteTerm(TypeSubpatternEnd);
|
||||
}
|
||||
|
||||
|
||||
static ByteTerm DotStarEnclosure(bool bolAnchor, bool eolAnchor)
|
||||
{
|
||||
ByteTerm term(TypeDotStarEnclosure);
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include "yarr/YarrJIT.h"
|
||||
|
@ -751,7 +751,7 @@ class YarrGenerator : private MacroAssembler {
|
|||
|
||||
if (op.m_isDeadCode)
|
||||
return true;
|
||||
|
||||
|
||||
// m_ops always ends with a OpBodyAlternativeEnd or OpMatchFailed
|
||||
// node, so there must always be at least one more node.
|
||||
ASSERT(opIndex + 1 < m_ops.size());
|
||||
|
@ -790,7 +790,7 @@ class YarrGenerator : private MacroAssembler {
|
|||
|
||||
for (numberCharacters = 1; numberCharacters < maxCharactersAtOnce && nextOp->m_op == OpTerm; ++numberCharacters, nextOp = &m_ops[opIndex + numberCharacters]) {
|
||||
PatternTerm* nextTerm = nextOp->m_term;
|
||||
|
||||
|
||||
if (nextTerm->type != PatternTerm::TypePatternCharacter
|
||||
|| nextTerm->quantityType != QuantifierFixedCount
|
||||
|| nextTerm->quantityCount != 1
|
||||
|
@ -820,7 +820,7 @@ class YarrGenerator : private MacroAssembler {
|
|||
allCharacters |= (currentCharacter << shiftAmount);
|
||||
|
||||
if ((m_pattern.m_ignoreCase) && (isASCIIAlpha(currentCharacter)))
|
||||
ignoreCaseMask |= 32 << shiftAmount;
|
||||
ignoreCaseMask |= 32 << shiftAmount;
|
||||
}
|
||||
|
||||
if (m_charSize == Char8) {
|
||||
|
@ -1203,7 +1203,7 @@ class YarrGenerator : private MacroAssembler {
|
|||
|
||||
move(index, matchPos);
|
||||
|
||||
Label findEOLLoop(this);
|
||||
Label findEOLLoop(this);
|
||||
foundEndingNewLine.append(branch32(Equal, matchPos, length));
|
||||
if (m_charSize == Char8)
|
||||
load8(BaseIndex(input, matchPos, TimesOne, 0), character);
|
||||
|
@ -1226,7 +1226,7 @@ class YarrGenerator : private MacroAssembler {
|
|||
{
|
||||
backtrackTermDefault(opIndex);
|
||||
}
|
||||
|
||||
|
||||
// Code generation/backtracking for simple terms
|
||||
// (pattern characters, character classes, and assertions).
|
||||
// These methods farm out work to the set of functions above.
|
||||
|
@ -1417,7 +1417,7 @@ class YarrGenerator : private MacroAssembler {
|
|||
PatternAlternative* alternative = op.m_alternative;
|
||||
|
||||
// If we get here, the prior alternative matched - return success.
|
||||
|
||||
|
||||
// Adjust the stack pointer to remove the pattern's frame.
|
||||
#if !WTF_CPU_SPARC
|
||||
removeCallFrame();
|
||||
|
@ -1425,7 +1425,7 @@ class YarrGenerator : private MacroAssembler {
|
|||
|
||||
// Load appropriate values into the return register and the first output
|
||||
// slot, and return. In the case of pattern with a fixed size, we will
|
||||
// not have yet set the value in the first
|
||||
// not have yet set the value in the first
|
||||
ASSERT(index != returnRegister);
|
||||
if (m_pattern.m_body->m_hasFixedSize) {
|
||||
move(index, returnRegister);
|
||||
|
@ -1592,7 +1592,7 @@ class YarrGenerator : private MacroAssembler {
|
|||
// OpParenthesesSubpatternOnceBegin/End
|
||||
//
|
||||
// These nodes support (optionally) capturing subpatterns, that have a
|
||||
// quantity count of 1 (this covers fixed once, and ?/?? quantifiers).
|
||||
// quantity count of 1 (this covers fixed once, and ?/?? quantifiers).
|
||||
case OpParenthesesSubpatternOnceBegin: {
|
||||
PatternTerm* term = op.m_term;
|
||||
unsigned parenthesesFrameLocation = term->frameLocation;
|
||||
|
@ -1732,7 +1732,7 @@ class YarrGenerator : private MacroAssembler {
|
|||
unsigned parenthesesFrameLocation = term->frameLocation;
|
||||
storeToFrame(index, parenthesesFrameLocation);
|
||||
|
||||
// Check
|
||||
// Check
|
||||
op.m_checkAdjust = m_checked - term->inputPosition;
|
||||
if (op.m_checkAdjust)
|
||||
sub32(Imm32(op.m_checkAdjust), index);
|
||||
|
@ -1911,7 +1911,7 @@ class YarrGenerator : private MacroAssembler {
|
|||
// Generate code to handle input check failures from alternatives except the last.
|
||||
// prevOp is the alternative we're handling a bail out from (initially Begin), and
|
||||
// nextOp is the alternative we will be attempting to reenter into.
|
||||
//
|
||||
//
|
||||
// We will link input check failures from the forwards matching path back to the code
|
||||
// that can handle them.
|
||||
YarrOp* prevOp = beginOp;
|
||||
|
@ -2530,7 +2530,7 @@ class YarrGenerator : private MacroAssembler {
|
|||
lastOp.m_alternative = alternative;
|
||||
lastOp.m_nextOp = thisOpIndex;
|
||||
thisOp.m_previousOp = lastOpIndex;
|
||||
|
||||
|
||||
++currentAlternativeIndex;
|
||||
} while (currentAlternativeIndex < alternatives.size() && alternatives[currentAlternativeIndex]->onceThrough());
|
||||
|
||||
|
@ -2566,7 +2566,7 @@ class YarrGenerator : private MacroAssembler {
|
|||
lastOp.m_alternative = alternative;
|
||||
lastOp.m_nextOp = thisOpIndex;
|
||||
thisOp.m_previousOp = lastOpIndex;
|
||||
|
||||
|
||||
++currentAlternativeIndex;
|
||||
} while (currentAlternativeIndex < alternatives.size());
|
||||
YarrOp& lastOp = m_ops.last();
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#ifndef yarr_YarrJIT_h
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#ifndef yarr_YarrParser_h
|
||||
|
@ -303,7 +303,7 @@ private:
|
|||
|
||||
unsigned backReference;
|
||||
if (!consumeNumber(backReference))
|
||||
break;
|
||||
break;
|
||||
if (backReference <= m_backReferenceLimit) {
|
||||
delegate.atomBackReference(backReference);
|
||||
break;
|
||||
|
@ -311,7 +311,7 @@ private:
|
|||
|
||||
restoreState(state);
|
||||
}
|
||||
|
||||
|
||||
// Not a backreference, and not octal.
|
||||
if (peek() >= '8') {
|
||||
delegate.atomPatternCharacter('\\');
|
||||
|
@ -392,7 +392,7 @@ private:
|
|||
default:
|
||||
delegate.atomPatternCharacter(consume());
|
||||
}
|
||||
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -470,7 +470,7 @@ private:
|
|||
case ':':
|
||||
m_delegate.atomParenthesesSubpatternBegin(false);
|
||||
break;
|
||||
|
||||
|
||||
case '=':
|
||||
m_delegate.atomParentheticalAssertionBegin();
|
||||
break;
|
||||
|
@ -478,7 +478,7 @@ private:
|
|||
case '!':
|
||||
m_delegate.atomParentheticalAssertionBegin(true);
|
||||
break;
|
||||
|
||||
|
||||
default:
|
||||
m_err = ParenthesesTypeInvalid;
|
||||
}
|
||||
|
@ -669,7 +669,7 @@ private:
|
|||
// Misc helper functions:
|
||||
|
||||
typedef unsigned ParseState;
|
||||
|
||||
|
||||
ParseState saveState()
|
||||
{
|
||||
return m_index;
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include "yarr/YarrPattern.h"
|
||||
|
@ -54,7 +54,7 @@ public:
|
|||
: m_isCaseInsensitive(isCaseInsensitive)
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
void reset()
|
||||
{
|
||||
m_matches.clear();
|
||||
|
@ -122,7 +122,7 @@ public:
|
|||
char asciiLo = lo;
|
||||
char asciiHi = std::min(hi, (UChar)0x7f);
|
||||
addSortedRange(m_ranges, lo, asciiHi);
|
||||
|
||||
|
||||
if (m_isCaseInsensitive) {
|
||||
if ((asciiLo <= 'Z') && (asciiHi >= 'A'))
|
||||
addSortedRange(m_ranges, std::max(asciiLo, 'A')+('a'-'A'), std::min(asciiHi, 'Z')+('a'-'A'));
|
||||
|
@ -135,7 +135,7 @@ public:
|
|||
|
||||
lo = std::max(lo, (UChar)0x80);
|
||||
addSortedRange(m_rangesUnicode, lo, hi);
|
||||
|
||||
|
||||
if (!m_isCaseInsensitive)
|
||||
return;
|
||||
|
||||
|
@ -217,7 +217,7 @@ private:
|
|||
range -= (index+1);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (pos == matches.size())
|
||||
matches.append(ch);
|
||||
else
|
||||
|
@ -227,7 +227,7 @@ private:
|
|||
void addSortedRange(Vector<CharacterRange>& ranges, UChar lo, UChar hi)
|
||||
{
|
||||
unsigned end = ranges.size();
|
||||
|
||||
|
||||
// Simple linear scan - I doubt there are that many ranges anyway...
|
||||
// feel free to fix this with something faster (eg binary chop).
|
||||
for (unsigned i = 0; i < end; ++i) {
|
||||
|
@ -260,7 +260,7 @@ private:
|
|||
} else
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -303,7 +303,7 @@ public:
|
|||
m_alternative = m_pattern.m_body->addNewAlternative();
|
||||
m_pattern.m_disjunctions.append(m_pattern.m_body);
|
||||
}
|
||||
|
||||
|
||||
void assertionBOL()
|
||||
{
|
||||
if (!m_alternative->m_terms.size() & !m_invertParentheticalAssertion) {
|
||||
|
@ -384,15 +384,15 @@ public:
|
|||
case DigitClassID:
|
||||
m_characterClassConstructor.append(invert ? m_pattern.nondigitsCharacterClass() : m_pattern.digitsCharacterClass());
|
||||
break;
|
||||
|
||||
|
||||
case SpaceClassID:
|
||||
m_characterClassConstructor.append(invert ? m_pattern.nonspacesCharacterClass() : m_pattern.spacesCharacterClass());
|
||||
break;
|
||||
|
||||
|
||||
case WordClassID:
|
||||
m_characterClassConstructor.append(invert ? m_pattern.nonwordcharCharacterClass() : m_pattern.wordcharCharacterClass());
|
||||
break;
|
||||
|
||||
|
||||
default:
|
||||
ASSERT_NOT_REACHED();
|
||||
}
|
||||
|
@ -470,7 +470,7 @@ public:
|
|||
PatternAlternative* currentAlternative = m_alternative;
|
||||
ASSERT(currentAlternative);
|
||||
|
||||
// Note to self: if we waited until the AST was baked, we could also remove forwards refs
|
||||
// Note to self: if we waited until the AST was baked, we could also remove forwards refs
|
||||
while ((currentAlternative = currentAlternative->m_parent->m_parent)) {
|
||||
PatternTerm& term = currentAlternative->lastTerm();
|
||||
ASSERT((term.type == PatternTerm::TypeParenthesesSubpattern) || (term.type == PatternTerm::TypeParentheticalAssertion));
|
||||
|
@ -484,7 +484,7 @@ public:
|
|||
m_alternative->m_terms.append(PatternTerm(subpatternId));
|
||||
}
|
||||
|
||||
// deep copy the argument disjunction. If filterStartsWithBOL is true,
|
||||
// deep copy the argument disjunction. If filterStartsWithBOL is true,
|
||||
// skip alternatives with m_startsWithBOL set true.
|
||||
PatternDisjunction* copyDisjunction(PatternDisjunction* disjunction, bool filterStartsWithBOL = false)
|
||||
{
|
||||
|
@ -507,17 +507,17 @@ public:
|
|||
m_pattern.m_disjunctions.append(newDisjunction);
|
||||
return newDisjunction;
|
||||
}
|
||||
|
||||
|
||||
PatternTerm copyTerm(PatternTerm& term, bool filterStartsWithBOL = false)
|
||||
{
|
||||
if ((term.type != PatternTerm::TypeParenthesesSubpattern) && (term.type != PatternTerm::TypeParentheticalAssertion))
|
||||
return PatternTerm(term);
|
||||
|
||||
|
||||
PatternTerm termCopy = term;
|
||||
termCopy.parentheses.disjunction = copyDisjunction(termCopy.parentheses.disjunction, filterStartsWithBOL);
|
||||
return termCopy;
|
||||
}
|
||||
|
||||
|
||||
void quantifyAtom(unsigned min, unsigned max, bool greedy)
|
||||
{
|
||||
ASSERT(min <= max);
|
||||
|
@ -694,7 +694,7 @@ public:
|
|||
ASSERT(minimumInputSize != UINT_MAX);
|
||||
if (minimumInputSize == UINT_MAX)
|
||||
return PatternTooLarge;
|
||||
|
||||
|
||||
ASSERT(minimumInputSize != UINT_MAX);
|
||||
ASSERT(maximumCallFrameSize >= initialCallFrameSize);
|
||||
|
||||
|
@ -747,10 +747,10 @@ public:
|
|||
// m_startsWithBOL and rolling those up to containing alternatives.
|
||||
// At this point, this is only valid for non-multiline expressions.
|
||||
PatternDisjunction* disjunction = m_pattern.m_body;
|
||||
|
||||
|
||||
if (!m_pattern.m_containsBOL || m_pattern.m_multiline)
|
||||
return;
|
||||
|
||||
|
||||
PatternDisjunction* loopDisjunction = copyDisjunction(disjunction, true);
|
||||
|
||||
// Set alternatives in disjunction to "onceThrough"
|
||||
|
@ -761,7 +761,7 @@ public:
|
|||
// Move alternatives from loopDisjunction to disjunction
|
||||
for (unsigned alt = 0; alt < loopDisjunction->m_alternatives.size(); ++alt)
|
||||
disjunction->m_alternatives.append(loopDisjunction->m_alternatives[alt]);
|
||||
|
||||
|
||||
loopDisjunction->m_alternatives.clear();
|
||||
}
|
||||
}
|
||||
|
@ -791,10 +791,10 @@ public:
|
|||
return false;
|
||||
}
|
||||
|
||||
// This optimization identifies alternatives in the form of
|
||||
// [^].*[?]<expression>.*[$] for expressions that don't have any
|
||||
// capturing terms. The alternative is changed to <expression>
|
||||
// followed by processing of the dot stars to find and adjust the
|
||||
// This optimization identifies alternatives in the form of
|
||||
// [^].*[?]<expression>.*[$] for expressions that don't have any
|
||||
// capturing terms. The alternative is changed to <expression>
|
||||
// followed by processing of the dot stars to find and adjust the
|
||||
// beginning and the end of the match.
|
||||
void optimizeDotStarWrappedExpressions()
|
||||
{
|
||||
|
@ -814,23 +814,23 @@ public:
|
|||
startsWithBOL = true;
|
||||
++termIndex;
|
||||
}
|
||||
|
||||
|
||||
PatternTerm& firstNonAnchorTerm = terms[termIndex];
|
||||
if ((firstNonAnchorTerm.type != PatternTerm::TypeCharacterClass) || (firstNonAnchorTerm.characterClass != m_pattern.newlineCharacterClass()) || !((firstNonAnchorTerm.quantityType == QuantifierGreedy) || (firstNonAnchorTerm.quantityType == QuantifierNonGreedy)))
|
||||
return;
|
||||
|
||||
|
||||
firstExpressionTerm = termIndex + 1;
|
||||
|
||||
|
||||
termIndex = terms.size() - 1;
|
||||
if (terms[termIndex].type == PatternTerm::TypeAssertionEOL) {
|
||||
endsWithEOL = true;
|
||||
--termIndex;
|
||||
}
|
||||
|
||||
|
||||
PatternTerm& lastNonAnchorTerm = terms[termIndex];
|
||||
if ((lastNonAnchorTerm.type != PatternTerm::TypeCharacterClass) || (lastNonAnchorTerm.characterClass != m_pattern.newlineCharacterClass()) || (lastNonAnchorTerm.quantityType != QuantifierGreedy))
|
||||
return;
|
||||
|
||||
|
||||
lastExpressionTerm = termIndex - 1;
|
||||
|
||||
if (firstExpressionTerm > lastExpressionTerm)
|
||||
|
@ -844,7 +844,7 @@ public:
|
|||
terms.remove(termIndex - 1);
|
||||
|
||||
terms.append(PatternTerm(startsWithBOL, endsWithEOL));
|
||||
|
||||
|
||||
m_pattern.m_containsBOL = false;
|
||||
}
|
||||
}
|
||||
|
@ -869,7 +869,7 @@ ErrorCode YarrPattern::compile(const String& patternString)
|
|||
|
||||
if (ErrorCode error = parse(constructor, patternString))
|
||||
return error;
|
||||
|
||||
|
||||
// If the pattern contains illegal backreferences reset & reparse.
|
||||
// Quoting Netscape's "What's new in JavaScript 1.2",
|
||||
// "Note: if the number of left parentheses is less than the number specified
|
||||
|
@ -893,7 +893,7 @@ ErrorCode YarrPattern::compile(const String& patternString)
|
|||
constructor.checkForTerminalParentheses();
|
||||
constructor.optimizeDotStarWrappedExpressions();
|
||||
constructor.optimizeBOL();
|
||||
|
||||
|
||||
if (ErrorCode error = constructor.setupOffsets())
|
||||
return error;
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#ifndef yarr_YarrPattern_h
|
||||
|
@ -186,7 +186,7 @@ struct PatternTerm {
|
|||
quantityType = QuantifierFixedCount;
|
||||
quantityCount = 1;
|
||||
}
|
||||
|
||||
|
||||
PatternTerm(Type type, bool invert = false)
|
||||
: type(type)
|
||||
, m_capture(false)
|
||||
|
@ -227,7 +227,7 @@ struct PatternTerm {
|
|||
quantityType = QuantifierFixedCount;
|
||||
quantityCount = 1;
|
||||
}
|
||||
|
||||
|
||||
static PatternTerm ForwardReference()
|
||||
{
|
||||
return PatternTerm(TypeForwardReference);
|
||||
|
@ -247,7 +247,7 @@ struct PatternTerm {
|
|||
{
|
||||
return PatternTerm(TypeAssertionWordBoundary, invert);
|
||||
}
|
||||
|
||||
|
||||
bool invert()
|
||||
{
|
||||
return m_invert;
|
||||
|
@ -257,7 +257,7 @@ struct PatternTerm {
|
|||
{
|
||||
return m_capture;
|
||||
}
|
||||
|
||||
|
||||
void quantify(unsigned count, QuantifierType type)
|
||||
{
|
||||
quantityCount = count;
|
||||
|
@ -282,18 +282,18 @@ public:
|
|||
ASSERT(m_terms.size());
|
||||
return m_terms[m_terms.size() - 1];
|
||||
}
|
||||
|
||||
|
||||
void removeLastTerm()
|
||||
{
|
||||
ASSERT(m_terms.size());
|
||||
m_terms.shrink(m_terms.size() - 1);
|
||||
}
|
||||
|
||||
|
||||
void setOnceThrough()
|
||||
{
|
||||
m_onceThrough = true;
|
||||
}
|
||||
|
||||
|
||||
bool onceThrough()
|
||||
{
|
||||
return m_onceThrough;
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#include "yarr/YarrSyntaxChecker.h"
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#ifndef yarr_YarrSyntaxChecker_h
|
||||
|
|
|
@ -277,7 +277,7 @@ class JSGlobalData {
|
|||
} /* namespace Yarr */
|
||||
|
||||
/*
|
||||
* Replacements for std:: functions used in Yarr. We put them in
|
||||
* Replacements for std:: functions used in Yarr. We put them in
|
||||
* namespace JSC::std so that they can still be called as std::X
|
||||
* in Yarr.
|
||||
*/
|
||||
|
|
Загрузка…
Ссылка в новой задаче