diff --git a/js/src/jit/BaselineCacheIRCompiler.cpp b/js/src/jit/BaselineCacheIRCompiler.cpp index 85c5394538b3..023480926ed2 100644 --- a/js/src/jit/BaselineCacheIRCompiler.cpp +++ b/js/src/jit/BaselineCacheIRCompiler.cpp @@ -492,18 +492,9 @@ bool BaselineCacheIRCompiler::emitCallScriptedGetterResultShared( AutoScratchRegister callee(allocator, masm); AutoScratchRegister scratch(allocator, masm); - // First, ensure our getter is non-lazy. - { - FailurePath* failure; - if (!addFailurePath(&failure)) { - return false; - } - - masm.loadPtr(getterAddr, callee); - masm.branchIfFunctionHasNoJitEntry(callee, /* constructing */ false, - failure->label()); - masm.loadJitCodeRaw(callee, code); - } + // First, retrieve jitCodeRaw for getter. + masm.loadPtr(getterAddr, callee); + masm.loadJitCodeRaw(callee, code); allocator.discardStack(masm); @@ -1547,18 +1538,8 @@ bool BaselineCacheIRCompiler::emitCallScriptedSetter() { ValueOperand val = allocator.useValueRegister(masm, reader.valOperandId()); bool isSameRealm = reader.readBool(); - // First, ensure our setter is non-lazy. This also loads the callee in - // scratch1. - { - FailurePath* failure; - if (!addFailurePath(&failure)) { - return false; - } - - masm.loadPtr(setterAddr, scratch1); - masm.branchIfFunctionHasNoJitEntry(scratch1, /* constructing */ false, - failure->label()); - } + // First, load the callee in scratch1. + masm.loadPtr(setterAddr, scratch1); allocator.discardStack(masm); diff --git a/js/src/jit/CacheIR.cpp b/js/src/jit/CacheIR.cpp index 3f2ea93295bc..6cafce78481c 100644 --- a/js/src/jit/CacheIR.cpp +++ b/js/src/jit/CacheIR.cpp @@ -5091,8 +5091,6 @@ AttachDecision CallIRGenerator::tryAttachCallScripted( if (isSpecialized) { // Ensure callee matches this stub's callee calleeOffset = writer.guardSpecificFunction(calleeObjId, calleeFunc); - // Guard against relazification - writer.guardFunctionHasJitEntry(calleeObjId, isConstructing); } else { // Guard that object is a scripted function writer.guardClass(calleeObjId, GuardClassKind::JSFunction); diff --git a/js/src/jit/CodeGenerator.cpp b/js/src/jit/CodeGenerator.cpp index b5ef8c5a6f15..d151a02b6168 100644 --- a/js/src/jit/CodeGenerator.cpp +++ b/js/src/jit/CodeGenerator.cpp @@ -4971,8 +4971,13 @@ void CodeGenerator::visitCallGeneric(LCallGeneric* call) { if (call->mir()->isConstructing()) { masm.branchIfNotInterpretedConstructor(calleereg, nargsreg, &invoke); } else { - masm.branchIfFunctionHasNoJitEntry(calleereg, /* isConstructing */ false, - &invoke); + // See visitCallKnown. + if (call->mir()->needsArgCheck()) { + masm.branchIfFunctionHasNoJitEntry(calleereg, /* isConstructing */ false, + &invoke); + } else { + masm.branchIfFunctionHasNoScript(calleereg, &invoke); + } masm.branchFunctionKind(Assembler::Equal, JSFunction::ClassConstructor, calleereg, objreg, &invoke); } @@ -5098,14 +5103,6 @@ void CodeGenerator::visitCallKnown(LCallKnown* call) { MOZ_ASSERT_IF(target->isClassConstructor(), call->isConstructing()); - Label uncompiled; - if (!target->isNativeWithJitEntry()) { - // The calleereg is known to be a non-native function, but might point - // to a LazyScript instead of a JSScript. - masm.branchIfFunctionHasNoJitEntry(calleereg, call->isConstructing(), - &uncompiled); - } - if (call->mir()->maybeCrossRealm()) { masm.switchToObjectRealm(calleereg, objreg); } @@ -5113,7 +5110,22 @@ void CodeGenerator::visitCallKnown(LCallKnown* call) { if (call->mir()->needsArgCheck()) { masm.loadJitCodeRaw(calleereg, objreg); } else { + // In order to use the jitCodeNoArgCheck entry point, we must ensure the + // JSFunction is pointing to the canonical JSScript. Due to lambda cloning, + // we may still be referencing the original LazyScript. + // + // NOTE: We checked that canonical function script had a valid JitScript. + // This will not be tossed without all Ion code being tossed first. + + Label uncompiled, end; + masm.branchIfFunctionHasNoScript(calleereg, &uncompiled); masm.loadJitCodeNoArgCheck(calleereg, objreg); + masm.jump(&end); + + // jitCodeRaw is still valid even if uncompiled. + masm.bind(&uncompiled); + masm.loadJitCodeRaw(calleereg, objreg); + masm.bind(&end); } // Nestle the StackPointer up to the argument vector. @@ -5141,24 +5153,6 @@ void CodeGenerator::visitCallKnown(LCallKnown* call) { int prefixGarbage = sizeof(JitFrameLayout) - sizeof(void*); masm.adjustStack(prefixGarbage - unusedStack); - if (uncompiled.used()) { - Label end; - masm.jump(&end); - - // Handle uncompiled functions. - masm.bind(&uncompiled); - if (call->isConstructing() && target->nargs() > call->numActualArgs()) { - emitCallInvokeFunctionShuffleNewTarget(call, calleereg, target->nargs(), - unusedStack); - } else { - emitCallInvokeFunction(call, calleereg, call->isConstructing(), - call->ignoresReturnValue(), call->numActualArgs(), - unusedStack); - } - - masm.bind(&end); - } - // If the return value of the constructing function is Primitive, // replace the return value with the Object from CreateThis. if (call->mir()->isConstructing()) { diff --git a/js/src/jit/MacroAssembler-inl.h b/js/src/jit/MacroAssembler-inl.h index 5330c11df786..df93a8780aa3 100644 --- a/js/src/jit/MacroAssembler-inl.h +++ b/js/src/jit/MacroAssembler-inl.h @@ -369,13 +369,18 @@ void MacroAssembler::branchTestFunctionFlags(Register fun, uint32_t flags, void MacroAssembler::branchIfFunctionHasNoJitEntry(Register fun, bool isConstructing, Label* label) { - int32_t flags = JSFunction::INTERPRETED; + int32_t flags = JSFunction::INTERPRETED | JSFunction::INTERPRETED_LAZY; if (!isConstructing) { flags |= JSFunction::WASM_JIT_ENTRY; } branchTestFunctionFlags(fun, flags, Assembler::Zero, label); } +void MacroAssembler::branchIfFunctionHasNoScript(Register fun, Label* label) { + int32_t flags = JSFunction::INTERPRETED; + branchTestFunctionFlags(fun, flags, Assembler::Zero, label); +} + void MacroAssembler::branchIfInterpreted(Register fun, bool isConstructing, Label* label) { int32_t flags = JSFunction::INTERPRETED | JSFunction::INTERPRETED_LAZY; diff --git a/js/src/jit/MacroAssembler.cpp b/js/src/jit/MacroAssembler.cpp index 94c4ed3245f5..5c7432133181 100644 --- a/js/src/jit/MacroAssembler.cpp +++ b/js/src/jit/MacroAssembler.cpp @@ -2927,8 +2927,10 @@ void MacroAssembler::moveRegPair(Register src0, Register src1, Register dst0, void MacroAssembler::branchIfNotInterpretedConstructor(Register fun, Register scratch, Label* label) { - // First, ensure it's a scripted function. - branchTestFunctionFlags(fun, JSFunction::INTERPRETED, Assembler::Zero, label); + // First, ensure it's a scripted function. It is fine if it is still lazy. + branchTestFunctionFlags( + fun, JSFunction::INTERPRETED | JSFunction::INTERPRETED_LAZY, + Assembler::Zero, label); // Check if the CONSTRUCTOR bit is set. branchTestFunctionFlags(fun, JSFunction::CONSTRUCTOR, Assembler::Zero, label); diff --git a/js/src/jit/MacroAssembler.h b/js/src/jit/MacroAssembler.h index bb8735536367..47eab65499ba 100644 --- a/js/src/jit/MacroAssembler.h +++ b/js/src/jit/MacroAssembler.h @@ -1303,6 +1303,7 @@ class MacroAssembler : public MacroAssemblerSpecific { inline void branchIfFunctionHasNoJitEntry(Register fun, bool isConstructing, Label* label); + inline void branchIfFunctionHasNoScript(Register fun, Label* label); inline void branchIfInterpreted(Register fun, bool isConstructing, Label* label); diff --git a/js/src/vm/JSFunction.h b/js/src/vm/JSFunction.h index 9b4583660807..26b07d5e31c8 100644 --- a/js/src/vm/JSFunction.h +++ b/js/src/vm/JSFunction.h @@ -340,7 +340,9 @@ class JSFunction : public js::NativeObject { return nonLazyScript()->hasJitScript(); } - bool hasJitEntry() const { return hasScript() || isNativeWithJitEntry(); } + bool hasJitEntry() const { + return hasScript() || isInterpretedLazy() || isNativeWithJitEntry(); + } /* Compound attributes: */ bool isBuiltin() const { return isBuiltinNative() || isSelfHostedBuiltin(); }