diff options
author | Lorry Tar Creator <lorry-tar-importer@lorry> | 2016-04-10 09:28:39 +0000 |
---|---|---|
committer | Lorry Tar Creator <lorry-tar-importer@lorry> | 2016-04-10 09:28:39 +0000 |
commit | 32761a6cee1d0dee366b885b7b9c777e67885688 (patch) | |
tree | d6bec92bebfb216f4126356e55518842c2f476a1 /Source/JavaScriptCore/jit/JITOpcodes.cpp | |
parent | a4e969f4965059196ca948db781e52f7cfebf19e (diff) | |
download | WebKitGtk-tarball-32761a6cee1d0dee366b885b7b9c777e67885688.tar.gz |
webkitgtk-2.4.11webkitgtk-2.4.11
Diffstat (limited to 'Source/JavaScriptCore/jit/JITOpcodes.cpp')
-rw-r--r-- | Source/JavaScriptCore/jit/JITOpcodes.cpp | 1100 |
1 files changed, 445 insertions, 655 deletions
diff --git a/Source/JavaScriptCore/jit/JITOpcodes.cpp b/Source/JavaScriptCore/jit/JITOpcodes.cpp index 738cb63fe..2bdae1914 100644 --- a/Source/JavaScriptCore/jit/JITOpcodes.cpp +++ b/Source/JavaScriptCore/jit/JITOpcodes.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009, 2012-2016 Apple Inc. All rights reserved. + * Copyright (C) 2009, 2012, 2013 Apple Inc. All rights reserved. * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com> * * Redistribution and use in source and binary forms, with or without @@ -28,23 +28,18 @@ #if ENABLE(JIT) #include "JIT.h" -#include "BasicBlockLocation.h" +#include "Arguments.h" #include "CopiedSpaceInlines.h" #include "Debugger.h" -#include "Exception.h" #include "Heap.h" #include "JITInlines.h" #include "JSArray.h" #include "JSCell.h" #include "JSFunction.h" -#include "JSPropertyNameEnumerator.h" +#include "JSPropertyNameIterator.h" #include "LinkBuffer.h" -#include "MaxFrameExtentForSlowPathCall.h" #include "SlowPathCall.h" -#include "TypeLocation.h" -#include "TypeProfilerLog.h" #include "VirtualRegister.h" -#include "Watchdog.h" namespace JSC { @@ -64,13 +59,21 @@ void JIT::emit_op_mov(Instruction* currentInstruction) emitPutVirtualRegister(dst); } +void JIT::emit_op_captured_mov(Instruction* currentInstruction) +{ + int dst = currentInstruction[1].u.operand; + int src = currentInstruction[2].u.operand; + + emitGetVirtualRegister(src, regT0); + emitNotifyWrite(regT0, regT1, currentInstruction[3].u.watchpointSet); + emitPutVirtualRegister(dst); +} void JIT::emit_op_end(Instruction* currentInstruction) { RELEASE_ASSERT(returnValueGPR != callFrameRegister); emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR); - emitRestoreCalleeSaves(); - emitFunctionEpilogue(); + restoreReturnAddressBeforeReturn(Address(callFrameRegister, CallFrame::returnPCOffset())); ret(); } @@ -104,29 +107,18 @@ void JIT::emitSlow_op_new_object(Instruction* currentInstruction, Vector<SlowCas emitStoreCell(dst, returnValueGPR); } -void JIT::emit_op_overrides_has_instance(Instruction* currentInstruction) +void JIT::emit_op_check_has_instance(Instruction* currentInstruction) { - int dst = currentInstruction[1].u.operand; - int constructor = currentInstruction[2].u.operand; - int hasInstanceValue = currentInstruction[3].u.operand; - - emitGetVirtualRegister(hasInstanceValue, regT0); + int baseVal = currentInstruction[3].u.operand; - // We don't jump if we know what Symbol.hasInstance would do. - Jump customhasInstanceValue = branchPtr(NotEqual, regT0, TrustedImmPtr(m_codeBlock->globalObject()->functionProtoHasInstanceSymbolFunction())); + emitGetVirtualRegister(baseVal, regT0); - emitGetVirtualRegister(constructor, regT0); - - // Check that constructor 'ImplementsDefaultHasInstance' i.e. the object is not a C-API user nor a bound function. - test8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance), regT0); - emitTagBool(regT0); - Jump done = jump(); + // Check that baseVal is a cell. + emitJumpSlowCaseIfNotJSCell(regT0, baseVal); - customhasInstanceValue.link(this); - move(TrustedImm32(ValueTrue), regT0); - - done.link(this); - emitPutVirtualRegister(dst); + // Check that baseVal 'ImplementsHasInstance'. + loadPtr(Address(regT0, JSCell::structureOffset()), regT0); + addSlowCase(branchTest8(Zero, Address(regT0, Structure::typeInfoFlagsOffset()), TrustedImm32(ImplementsDefaultHasInstance))); } void JIT::emit_op_instanceof(Instruction* currentInstruction) @@ -140,12 +132,13 @@ void JIT::emit_op_instanceof(Instruction* currentInstruction) emitGetVirtualRegister(value, regT2); emitGetVirtualRegister(proto, regT1); - // Check that proto are cells. baseVal must be a cell - this is checked by the get_by_id for Symbol.hasInstance. + // Check that proto are cells. baseVal must be a cell - this is checked by op_check_has_instance. emitJumpSlowCaseIfNotJSCell(regT2, value); emitJumpSlowCaseIfNotJSCell(regT1, proto); // Check that prototype is an object - addSlowCase(emitJumpIfCellNotObject(regT1)); + loadPtr(Address(regT1, JSCell::structureOffset()), regT3); + addSlowCase(emitJumpIfNotObject(regT3)); // Optimistically load the result true, and start looping. // Initially, regT1 still contains proto and regT2 still contains value. @@ -155,7 +148,7 @@ void JIT::emit_op_instanceof(Instruction* currentInstruction) // Load the prototype of the object in regT2. If this is equal to regT1 - WIN! // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again. - emitLoadStructure(regT2, regT2, regT3); + loadPtr(Address(regT2, JSCell::structureOffset()), regT2); load64(Address(regT2, Structure::prototypeOffset()), regT2); Jump isInstance = branchPtr(Equal, regT2, regT1); emitJumpIfJSCell(regT2).linkTo(loop, this); @@ -168,12 +161,6 @@ void JIT::emit_op_instanceof(Instruction* currentInstruction) emitPutVirtualRegister(dst); } -void JIT::emit_op_instanceof_custom(Instruction*) -{ - // This always goes to slow path since we expect it to be rare. - addSlowCase(jump()); -} - void JIT::emit_op_is_undefined(Instruction* currentInstruction) { int dst = currentInstruction[1].u.operand; @@ -186,19 +173,19 @@ void JIT::emit_op_is_undefined(Instruction* currentInstruction) Jump done = jump(); isCell.link(this); - Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)); + loadPtr(Address(regT0, JSCell::structureOffset()), regT1); + Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT1, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)); move(TrustedImm32(0), regT0); Jump notMasqueradesAsUndefined = jump(); isMasqueradesAsUndefined.link(this); - emitLoadStructure(regT0, regT1, regT2); move(TrustedImmPtr(m_codeBlock->globalObject()), regT0); loadPtr(Address(regT1, Structure::globalObjectOffset()), regT1); comparePtr(Equal, regT0, regT1, regT0); notMasqueradesAsUndefined.link(this); done.link(this); - emitTagBool(regT0); + emitTagAsBoolImmediate(regT0); emitPutVirtualRegister(dst); } @@ -210,7 +197,7 @@ void JIT::emit_op_is_boolean(Instruction* currentInstruction) emitGetVirtualRegister(value, regT0); xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), regT0); test64(Zero, regT0, TrustedImm32(static_cast<int32_t>(~1)), regT0); - emitTagBool(regT0); + emitTagAsBoolImmediate(regT0); emitPutVirtualRegister(dst); } @@ -221,7 +208,7 @@ void JIT::emit_op_is_number(Instruction* currentInstruction) emitGetVirtualRegister(value, regT0); test64(NonZero, regT0, tagTypeNumberRegister, regT0); - emitTagBool(regT0); + emitTagAsBoolImmediate(regT0); emitPutVirtualRegister(dst); } @@ -233,8 +220,9 @@ void JIT::emit_op_is_string(Instruction* currentInstruction) emitGetVirtualRegister(value, regT0); Jump isNotCell = emitJumpIfNotJSCell(regT0); - compare8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType), regT0); - emitTagBool(regT0); + loadPtr(Address(regT0, JSCell::structureOffset()), regT1); + compare8(Equal, Address(regT1, Structure::typeInfoTypeOffset()), TrustedImm32(StringType), regT0); + emitTagAsBoolImmediate(regT0); Jump done = jump(); isNotCell.link(this); @@ -244,26 +232,48 @@ void JIT::emit_op_is_string(Instruction* currentInstruction) emitPutVirtualRegister(dst); } -void JIT::emit_op_is_object(Instruction* currentInstruction) +void JIT::emit_op_tear_off_activation(Instruction* currentInstruction) { - int dst = currentInstruction[1].u.operand; - int value = currentInstruction[2].u.operand; + int activation = currentInstruction[1].u.operand; + Jump activationNotCreated = branchTest64(Zero, addressFor(activation)); + emitGetVirtualRegister(activation, regT0); + callOperation(operationTearOffActivation, regT0); + activationNotCreated.link(this); +} - emitGetVirtualRegister(value, regT0); - Jump isNotCell = emitJumpIfNotJSCell(regT0); +void JIT::emit_op_tear_off_arguments(Instruction* currentInstruction) +{ + int arguments = currentInstruction[1].u.operand; + int activation = currentInstruction[2].u.operand; - compare8(AboveOrEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType), regT0); - emitTagBool(regT0); - Jump done = jump(); + Jump argsNotCreated = branchTest64(Zero, Address(callFrameRegister, sizeof(Register) * (unmodifiedArgumentsRegister(VirtualRegister(arguments)).offset()))); + emitGetVirtualRegister(unmodifiedArgumentsRegister(VirtualRegister(arguments)).offset(), regT0); + emitGetVirtualRegister(activation, regT1); + callOperation(operationTearOffArguments, regT0, regT1); + argsNotCreated.link(this); +} - isNotCell.link(this); - move(TrustedImm32(ValueFalse), regT0); +void JIT::emit_op_ret(Instruction* currentInstruction) +{ + ASSERT(callFrameRegister != regT1); + ASSERT(regT1 != returnValueGPR); + ASSERT(returnValueGPR != callFrameRegister); - done.link(this); - emitPutVirtualRegister(dst); + // Return the result in %eax. + emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR); + + // Grab the return address. + emitGetReturnPCFromCallFrameHeaderPtr(regT1); + + // Restore our caller's "r". + emitGetCallerFrameFromCallFrameHeaderPtr(callFrameRegister); + + // Return. + restoreReturnAddressBeforeReturn(regT1); + ret(); } -void JIT::emit_op_ret(Instruction* currentInstruction) +void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction) { ASSERT(callFrameRegister != regT1); ASSERT(regT1 != returnValueGPR); @@ -271,10 +281,33 @@ void JIT::emit_op_ret(Instruction* currentInstruction) // Return the result in %eax. emitGetVirtualRegister(currentInstruction[1].u.operand, returnValueGPR); + Jump notJSCell = emitJumpIfNotJSCell(returnValueGPR); + loadPtr(Address(returnValueGPR, JSCell::structureOffset()), regT2); + Jump notObject = emitJumpIfNotObject(regT2); + + // Grab the return address. + emitGetReturnPCFromCallFrameHeaderPtr(regT1); - checkStackPointerAlignment(); - emitRestoreCalleeSaves(); - emitFunctionEpilogue(); + // Restore our caller's "r". + emitGetCallerFrameFromCallFrameHeaderPtr(callFrameRegister); + + // Return. + restoreReturnAddressBeforeReturn(regT1); + ret(); + + // Return 'this' in %eax. + notJSCell.link(this); + notObject.link(this); + emitGetVirtualRegister(currentInstruction[2].u.operand, returnValueGPR); + + // Grab the return address. + emitGetReturnPCFromCallFrameHeaderPtr(regT1); + + // Restore our caller's "r". + emitGetCallerFrameFromCallFrameHeaderPtr(callFrameRegister); + + // Return. + restoreReturnAddressBeforeReturn(regT1); ret(); } @@ -286,7 +319,7 @@ void JIT::emit_op_to_primitive(Instruction* currentInstruction) emitGetVirtualRegister(src, regT0); Jump isImm = emitJumpIfNotJSCell(regT0); - addSlowCase(emitJumpIfCellObject(regT0)); + addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get()))); isImm.link(this); if (dst != src) @@ -320,7 +353,7 @@ void JIT::emit_op_jfalse(Instruction* currentInstruction) emitGetVirtualRegister(currentInstruction[1].u.operand, regT0); addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0)))), target); - Jump isNonZero = emitJumpIfInt(regT0); + Jump isNonZero = emitJumpIfImmediateInteger(regT0); addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(false)))), target); addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(true))))); @@ -337,8 +370,8 @@ void JIT::emit_op_jeq_null(Instruction* currentInstruction) Jump isImmediate = emitJumpIfNotJSCell(regT0); // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure. - Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)); - emitLoadStructure(regT0, regT2, regT1); + loadPtr(Address(regT0, JSCell::structureOffset()), regT2); + Jump isNotMasqueradesAsUndefined = branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)); move(TrustedImmPtr(m_codeBlock->globalObject()), regT0); addJump(branchPtr(Equal, Address(regT2, Structure::globalObjectOffset()), regT0), target); Jump masqueradesGlobalObjectIsForeign = jump(); @@ -360,8 +393,8 @@ void JIT::emit_op_jneq_null(Instruction* currentInstruction) Jump isImmediate = emitJumpIfNotJSCell(regT0); // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure. - addJump(branchTest8(Zero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target); - emitLoadStructure(regT0, regT2, regT1); + loadPtr(Address(regT0, JSCell::structureOffset()), regT2); + addJump(branchTest8(Zero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)), target); move(TrustedImmPtr(m_codeBlock->globalObject()), regT0); addJump(branchPtr(NotEqual, Address(regT2, Structure::globalObjectOffset()), regT0), target); Jump wasNotImmediate = jump(); @@ -387,9 +420,9 @@ void JIT::emit_op_jneq_ptr(Instruction* currentInstruction) void JIT::emit_op_eq(Instruction* currentInstruction) { emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1); - emitJumpSlowCaseIfNotInt(regT0, regT1, regT2); + emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2); compare32(Equal, regT1, regT0, regT0); - emitTagBool(regT0); + emitTagAsBoolImmediate(regT0); emitPutVirtualRegister(currentInstruction[1].u.operand); } @@ -399,7 +432,7 @@ void JIT::emit_op_jtrue(Instruction* currentInstruction) emitGetVirtualRegister(currentInstruction[1].u.operand, regT0); Jump isZero = branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNumber(0)))); - addJump(emitJumpIfInt(regT0), target); + addJump(emitJumpIfImmediateInteger(regT0), target); addJump(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsBoolean(true)))), target); addSlowCase(branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsBoolean(false))))); @@ -410,27 +443,150 @@ void JIT::emit_op_jtrue(Instruction* currentInstruction) void JIT::emit_op_neq(Instruction* currentInstruction) { emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1); - emitJumpSlowCaseIfNotInt(regT0, regT1, regT2); + emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2); compare32(NotEqual, regT1, regT0, regT0); - emitTagBool(regT0); + emitTagAsBoolImmediate(regT0); + + emitPutVirtualRegister(currentInstruction[1].u.operand); + +} +void JIT::emit_op_bitxor(Instruction* currentInstruction) +{ + emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1); + emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2); + xor64(regT1, regT0); + emitFastArithReTagImmediate(regT0, regT0); emitPutVirtualRegister(currentInstruction[1].u.operand); +} +void JIT::emit_op_bitor(Instruction* currentInstruction) +{ + emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1); + emitJumpSlowCaseIfNotImmediateIntegers(regT0, regT1, regT2); + or64(regT1, regT0); + emitPutVirtualRegister(currentInstruction[1].u.operand); } void JIT::emit_op_throw(Instruction* currentInstruction) { ASSERT(regT0 == returnValueGPR); - copyCalleeSavesToVMCalleeSavesBuffer(); emitGetVirtualRegister(currentInstruction[1].u.operand, regT0); callOperationNoExceptionCheck(operationThrow, regT0); jumpToExceptionHandler(); } +void JIT::emit_op_get_pnames(Instruction* currentInstruction) +{ + int dst = currentInstruction[1].u.operand; + int base = currentInstruction[2].u.operand; + int i = currentInstruction[3].u.operand; + int size = currentInstruction[4].u.operand; + int breakTarget = currentInstruction[5].u.operand; + + JumpList isNotObject; + + emitGetVirtualRegister(base, regT0); + if (!m_codeBlock->isKnownNotImmediate(base)) + isNotObject.append(emitJumpIfNotJSCell(regT0)); + if (base != m_codeBlock->thisRegister().offset() || m_codeBlock->isStrictMode()) { + loadPtr(Address(regT0, JSCell::structureOffset()), regT2); + isNotObject.append(emitJumpIfNotObject(regT2)); + } + + // We could inline the case where you have a valid cache, but + // this call doesn't seem to be hot. + Label isObject(this); + callOperation(operationGetPNames, regT0); + emitStoreCell(dst, returnValueGPR); + load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3); + store64(tagTypeNumberRegister, addressFor(i)); + store32(TrustedImm32(Int32Tag), intTagFor(size)); + store32(regT3, intPayloadFor(size)); + Jump end = jump(); + + isNotObject.link(this); + move(regT0, regT1); + and32(TrustedImm32(~TagBitUndefined), regT1); + addJump(branch32(Equal, regT1, TrustedImm32(ValueNull)), breakTarget); + callOperation(operationToObject, base, regT0); + jump().linkTo(isObject, this); + + end.link(this); +} + +void JIT::emit_op_next_pname(Instruction* currentInstruction) +{ + int dst = currentInstruction[1].u.operand; + int base = currentInstruction[2].u.operand; + int i = currentInstruction[3].u.operand; + int size = currentInstruction[4].u.operand; + int it = currentInstruction[5].u.operand; + int target = currentInstruction[6].u.operand; + + JumpList callHasProperty; + + Label begin(this); + load32(intPayloadFor(i), regT0); + Jump end = branch32(Equal, regT0, intPayloadFor(size)); + + // Grab key @ i + loadPtr(addressFor(it), regT1); + loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2); + + load64(BaseIndex(regT2, regT0, TimesEight), regT2); + + emitPutVirtualRegister(dst, regT2); + + // Increment i + add32(TrustedImm32(1), regT0); + store32(regT0, intPayloadFor(i)); + + // Verify that i is valid: + emitGetVirtualRegister(base, regT0); + + // Test base's structure + loadPtr(Address(regT0, JSCell::structureOffset()), regT2); + callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))))); + + // Test base's prototype chain + loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3); + loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3); + addJump(branchTestPtr(Zero, Address(regT3)), target); + + Label checkPrototype(this); + load64(Address(regT2, Structure::prototypeOffset()), regT2); + callHasProperty.append(emitJumpIfNotJSCell(regT2)); + loadPtr(Address(regT2, JSCell::structureOffset()), regT2); + callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3))); + addPtr(TrustedImm32(sizeof(Structure*)), regT3); + branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this); + + // Continue loop. + addJump(jump(), target); + + // Slow case: Ask the object if i is valid. + callHasProperty.link(this); + emitGetVirtualRegister(dst, regT1); + callOperation(operationHasProperty, regT0, regT1); + + // Test for valid key. + addJump(branchTest32(NonZero, regT0), target); + jump().linkTo(begin, this); + + // End of loop. + end.link(this); +} + void JIT::emit_op_push_with_scope(Instruction* currentInstruction) { - JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_push_with_scope); - slowPathCall.call(); + emitGetVirtualRegister(currentInstruction[1].u.operand, regT0); + callOperation(operationPushWithScope, regT0); +} + +void JIT::emit_op_pop_scope(Instruction*) +{ + callOperation(operationPopScope); } void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type) @@ -448,18 +604,18 @@ void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqTy // Jump slow if either is a double. First test if it's an integer, which is fine, and then test // if it's a double. - Jump leftOK = emitJumpIfInt(regT0); - addSlowCase(emitJumpIfNumber(regT0)); + Jump leftOK = emitJumpIfImmediateInteger(regT0); + addSlowCase(emitJumpIfImmediateNumber(regT0)); leftOK.link(this); - Jump rightOK = emitJumpIfInt(regT1); - addSlowCase(emitJumpIfNumber(regT1)); + Jump rightOK = emitJumpIfImmediateInteger(regT1); + addSlowCase(emitJumpIfImmediateNumber(regT1)); rightOK.link(this); if (type == OpStrictEq) compare64(Equal, regT1, regT0, regT0); else compare64(NotEqual, regT1, regT0, regT0); - emitTagBool(regT0); + emitTagAsBoolImmediate(regT0); emitPutVirtualRegister(dst); } @@ -479,64 +635,24 @@ void JIT::emit_op_to_number(Instruction* currentInstruction) int srcVReg = currentInstruction[2].u.operand; emitGetVirtualRegister(srcVReg, regT0); - addSlowCase(emitJumpIfNotNumber(regT0)); + addSlowCase(emitJumpIfNotImmediateNumber(regT0)); emitPutVirtualRegister(currentInstruction[1].u.operand); } -void JIT::emit_op_to_string(Instruction* currentInstruction) +void JIT::emit_op_push_name_scope(Instruction* currentInstruction) { - int srcVReg = currentInstruction[2].u.operand; - emitGetVirtualRegister(srcVReg, regT0); - - addSlowCase(emitJumpIfNotJSCell(regT0)); - addSlowCase(branch8(NotEqual, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType))); - - emitPutVirtualRegister(currentInstruction[1].u.operand); + emitGetVirtualRegister(currentInstruction[2].u.operand, regT0); + callOperation(operationPushNameScope, &m_codeBlock->identifier(currentInstruction[1].u.operand), regT0, currentInstruction[3].u.operand); } void JIT::emit_op_catch(Instruction* currentInstruction) { - restoreCalleeSavesFromVMCalleeSavesBuffer(); - - move(TrustedImmPtr(m_vm), regT3); - load64(Address(regT3, VM::callFrameForCatchOffset()), callFrameRegister); - storePtr(TrustedImmPtr(nullptr), Address(regT3, VM::callFrameForCatchOffset())); - - addPtr(TrustedImm32(stackPointerOffsetFor(codeBlock()) * sizeof(Register)), callFrameRegister, stackPointerRegister); - - callOperationNoExceptionCheck(operationCheckIfExceptionIsUncatchableAndNotifyProfiler); - Jump isCatchableException = branchTest32(Zero, returnValueGPR); - jumpToExceptionHandler(); - isCatchableException.link(this); - move(TrustedImmPtr(m_vm), regT3); + load64(Address(regT3, VM::callFrameForThrowOffset()), callFrameRegister); load64(Address(regT3, VM::exceptionOffset()), regT0); store64(TrustedImm64(JSValue::encode(JSValue())), Address(regT3, VM::exceptionOffset())); emitPutVirtualRegister(currentInstruction[1].u.operand); - - load64(Address(regT0, Exception::valueOffset()), regT0); - emitPutVirtualRegister(currentInstruction[2].u.operand); -} - -void JIT::emit_op_assert(Instruction* currentInstruction) -{ - JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_assert); - slowPathCall.call(); -} - -void JIT::emit_op_create_lexical_environment(Instruction* currentInstruction) -{ - JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_lexical_environment); - slowPathCall.call(); -} - -void JIT::emit_op_get_parent_scope(Instruction* currentInstruction) -{ - int currentScope = currentInstruction[2].u.operand; - emitGetVirtualRegister(currentScope, regT0); - loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0); - emitStoreCell(currentInstruction[1].u.operand, regT0); } void JIT::emit_op_switch_imm(Instruction* currentInstruction) @@ -548,7 +664,7 @@ void JIT::emit_op_switch_imm(Instruction* currentInstruction) // create jump table for switch destinations, track this switch statement. SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex); m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Immediate)); - jumpTable->ensureCTITable(); + jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size()); emitGetVirtualRegister(scrutinee, regT0); callOperation(operationSwitchImmWithUnknownKeyType, regT0, tableIndex); @@ -564,7 +680,7 @@ void JIT::emit_op_switch_char(Instruction* currentInstruction) // create jump table for switch destinations, track this switch statement. SimpleJumpTable* jumpTable = &m_codeBlock->switchJumpTable(tableIndex); m_switches.append(SwitchRecord(jumpTable, m_bytecodeOffset, defaultOffset, SwitchRecord::Character)); - jumpTable->ensureCTITable(); + jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size()); emitGetVirtualRegister(scrutinee, regT0); callOperation(operationSwitchCharWithUnknownKeyType, regT0, tableIndex); @@ -608,12 +724,12 @@ void JIT::emit_op_eq_null(Instruction* currentInstruction) emitGetVirtualRegister(src1, regT0); Jump isImmediate = emitJumpIfNotJSCell(regT0); - Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)); + loadPtr(Address(regT0, JSCell::structureOffset()), regT2); + Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)); move(TrustedImm32(0), regT0); Jump wasNotMasqueradesAsUndefined = jump(); isMasqueradesAsUndefined.link(this); - emitLoadStructure(regT0, regT2, regT1); move(TrustedImmPtr(m_codeBlock->globalObject()), regT0); loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2); comparePtr(Equal, regT0, regT2, regT0); @@ -627,7 +743,7 @@ void JIT::emit_op_eq_null(Instruction* currentInstruction) wasNotImmediate.link(this); wasNotMasqueradesAsUndefined.link(this); - emitTagBool(regT0); + emitTagAsBoolImmediate(regT0); emitPutVirtualRegister(dst); } @@ -640,12 +756,12 @@ void JIT::emit_op_neq_null(Instruction* currentInstruction) emitGetVirtualRegister(src1, regT0); Jump isImmediate = emitJumpIfNotJSCell(regT0); - Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT0, JSCell::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)); + loadPtr(Address(regT0, JSCell::structureOffset()), regT2); + Jump isMasqueradesAsUndefined = branchTest8(NonZero, Address(regT2, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)); move(TrustedImm32(1), regT0); Jump wasNotMasqueradesAsUndefined = jump(); isMasqueradesAsUndefined.link(this); - emitLoadStructure(regT0, regT2, regT1); move(TrustedImmPtr(m_codeBlock->globalObject()), regT0); loadPtr(Address(regT2, Structure::globalObjectOffset()), regT2); comparePtr(NotEqual, regT0, regT2, regT0); @@ -659,30 +775,53 @@ void JIT::emit_op_neq_null(Instruction* currentInstruction) wasNotImmediate.link(this); wasNotMasqueradesAsUndefined.link(this); - emitTagBool(regT0); + emitTagAsBoolImmediate(regT0); emitPutVirtualRegister(dst); } -void JIT::emit_op_enter(Instruction*) +void JIT::emit_op_enter(Instruction* currentInstruction) { + emitEnterOptimizationCheck(); + // Even though CTI doesn't use them, we initialize our constant // registers to zap stale pointers, to avoid unnecessarily prolonging // object lifetime and increasing GC pressure. size_t count = m_codeBlock->m_numVars; - for (size_t j = CodeBlock::llintBaselineCalleeSaveSpaceAsVirtualRegisters(); j < count; ++j) + for (size_t j = 0; j < count; ++j) emitInitRegister(virtualRegisterForLocal(j).offset()); - emitWriteBarrier(m_codeBlock); + JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_enter); + slowPathCall.call(); +} - emitEnterOptimizationCheck(); +void JIT::emit_op_create_activation(Instruction* currentInstruction) +{ + int dst = currentInstruction[1].u.operand; + + Jump activationCreated = branchTest64(NonZero, Address(callFrameRegister, sizeof(Register) * dst)); + callOperation(operationCreateActivation, 0); + emitStoreCell(dst, returnValueGPR); + activationCreated.link(this); } -void JIT::emit_op_get_scope(Instruction* currentInstruction) +void JIT::emit_op_create_arguments(Instruction* currentInstruction) { int dst = currentInstruction[1].u.operand; - emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0); - loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT0); - emitStoreCell(dst, regT0); + + Jump argsCreated = branchTest64(NonZero, Address(callFrameRegister, sizeof(Register) * dst)); + + callOperation(operationCreateArguments); + emitStoreCell(dst, returnValueGPR); + emitStoreCell(unmodifiedArgumentsRegister(VirtualRegister(dst)), returnValueGPR); + + argsCreated.link(this); +} + +void JIT::emit_op_init_lazy_reg(Instruction* currentInstruction) +{ + int dst = currentInstruction[1].u.operand; + + store64(TrustedImm64((int64_t)0), Address(callFrameRegister, sizeof(Register) * dst)); } void JIT::emit_op_to_this(Instruction* currentInstruction) @@ -691,66 +830,60 @@ void JIT::emit_op_to_this(Instruction* currentInstruction) emitGetVirtualRegister(currentInstruction[1].u.operand, regT1); emitJumpSlowCaseIfNotJSCell(regT1); + loadPtr(Address(regT1, JSCell::structureOffset()), regT0); - addSlowCase(branch8(NotEqual, Address(regT1, JSCell::typeInfoTypeOffset()), TrustedImm32(FinalObjectType))); + addSlowCase(branch8(NotEqual, Address(regT0, Structure::typeInfoTypeOffset()), TrustedImm32(FinalObjectType))); loadPtr(cachedStructure, regT2); - addSlowCase(branchTestPtr(Zero, regT2)); - load32(Address(regT2, Structure::structureIDOffset()), regT2); - addSlowCase(branch32(NotEqual, Address(regT1, JSCell::structureIDOffset()), regT2)); + addSlowCase(branchPtr(NotEqual, regT0, regT2)); +} + +void JIT::emit_op_get_callee(Instruction* currentInstruction) +{ + int result = currentInstruction[1].u.operand; + WriteBarrierBase<JSCell>* cachedFunction = ¤tInstruction[2].u.jsCell; + emitGetFromCallFrameHeaderPtr(JSStack::Callee, regT0); + + loadPtr(cachedFunction, regT2); + addSlowCase(branchPtr(NotEqual, regT0, regT2)); + + emitPutVirtualRegister(result); +} + +void JIT::emitSlow_op_get_callee(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) +{ + linkSlowCase(iter); + + JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_callee); + slowPathCall.call(); } void JIT::emit_op_create_this(Instruction* currentInstruction) { int callee = currentInstruction[2].u.operand; - WriteBarrierBase<JSCell>* cachedFunction = ¤tInstruction[4].u.jsCell; RegisterID calleeReg = regT0; - RegisterID rareDataReg = regT4; RegisterID resultReg = regT0; RegisterID allocatorReg = regT1; RegisterID structureReg = regT2; - RegisterID cachedFunctionReg = regT4; RegisterID scratchReg = regT3; emitGetVirtualRegister(callee, calleeReg); - loadPtr(Address(calleeReg, JSFunction::offsetOfRareData()), rareDataReg); - addSlowCase(branchTestPtr(Zero, rareDataReg)); - loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg); - loadPtr(Address(rareDataReg, FunctionRareData::offsetOfObjectAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg); + loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorReg); + loadPtr(Address(calleeReg, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureReg); addSlowCase(branchTestPtr(Zero, allocatorReg)); - loadPtr(cachedFunction, cachedFunctionReg); - Jump hasSeenMultipleCallees = branchPtr(Equal, cachedFunctionReg, TrustedImmPtr(JSCell::seenMultipleCalleeObjects())); - addSlowCase(branchPtr(NotEqual, calleeReg, cachedFunctionReg)); - hasSeenMultipleCallees.link(this); - emitAllocateJSObject(allocatorReg, structureReg, resultReg, scratchReg); emitPutVirtualRegister(currentInstruction[1].u.operand); } void JIT::emitSlow_op_create_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) { - linkSlowCase(iter); // doesn't have rare data linkSlowCase(iter); // doesn't have an allocation profile linkSlowCase(iter); // allocation failed - linkSlowCase(iter); // cached function didn't match JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_this); slowPathCall.call(); } -void JIT::emit_op_check_tdz(Instruction* currentInstruction) -{ - emitGetVirtualRegister(currentInstruction[1].u.operand, regT0); - addSlowCase(branchTest64(Zero, regT0)); -} - -void JIT::emitSlow_op_check_tdz(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) -{ - linkSlowCase(iter); - JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_throw_tdz_error); - slowPathCall.call(); -} - void JIT::emit_op_profile_will_call(Instruction* currentInstruction) { Jump profilerDone = branchTestPtr(Zero, AbsoluteAddress(m_vm->enabledProfilerAddress())); @@ -775,7 +908,6 @@ void JIT::emitSlow_op_to_this(Instruction* currentInstruction, Vector<SlowCaseEn linkSlowCase(iter); linkSlowCase(iter); linkSlowCase(iter); - linkSlowCase(iter); JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_this); slowPathCall.call(); @@ -811,11 +943,25 @@ void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntr emitJumpSlowToHot(branchTest32(NonZero, returnValueGPR), currentInstruction[2].u.operand); } +void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) +{ + linkSlowCase(iter); + JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitxor); + slowPathCall.call(); +} + +void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) +{ + linkSlowCase(iter); + JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_bitor); + slowPathCall.call(); +} + void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) { linkSlowCase(iter); callOperation(operationCompareEq, regT0, regT1); - emitTagBool(returnValueGPR); + emitTagAsBoolImmediate(returnValueGPR); emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR); } @@ -824,7 +970,7 @@ void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry> linkSlowCase(iter); callOperation(operationCompareEq, regT0, regT1); xor32(TrustedImm32(0x1), regT0); - emitTagBool(returnValueGPR); + emitTagAsBoolImmediate(returnValueGPR); emitPutVirtualRegister(currentInstruction[1].u.operand, returnValueGPR); } @@ -846,34 +992,33 @@ void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCase slowPathCall.call(); } -void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) +void JIT::emitSlow_op_check_has_instance(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) { int dst = currentInstruction[1].u.operand; int value = currentInstruction[2].u.operand; - int proto = currentInstruction[3].u.operand; + int baseVal = currentInstruction[3].u.operand; - linkSlowCaseIfNotJSCell(iter, value); - linkSlowCaseIfNotJSCell(iter, proto); + linkSlowCaseIfNotJSCell(iter, baseVal); linkSlowCase(iter); emitGetVirtualRegister(value, regT0); - emitGetVirtualRegister(proto, regT1); - callOperation(operationInstanceOf, dst, regT0, regT1); + emitGetVirtualRegister(baseVal, regT1); + callOperation(operationCheckHasInstance, dst, regT0, regT1); + + emitJumpSlowToHot(jump(), currentInstruction[4].u.operand); } -void JIT::emitSlow_op_instanceof_custom(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) +void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) { int dst = currentInstruction[1].u.operand; int value = currentInstruction[2].u.operand; - int constructor = currentInstruction[3].u.operand; - int hasInstanceValue = currentInstruction[4].u.operand; + int proto = currentInstruction[3].u.operand; + linkSlowCaseIfNotJSCell(iter, value); + linkSlowCaseIfNotJSCell(iter, proto); linkSlowCase(iter); emitGetVirtualRegister(value, regT0); - emitGetVirtualRegister(constructor, regT1); - emitGetVirtualRegister(hasInstanceValue, regT2); - callOperation(operationInstanceOfCustom, regT0, regT1, regT2); - emitTagBool(returnValueGPR); - emitPutVirtualRegister(dst, returnValueGPR); + emitGetVirtualRegister(proto, regT1); + callOperation(operationInstanceOf, dst, regT0, regT1); } void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) @@ -884,63 +1029,125 @@ void JIT::emitSlow_op_to_number(Instruction* currentInstruction, Vector<SlowCase slowPathCall.call(); } -void JIT::emitSlow_op_to_string(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) +void JIT::emit_op_get_arguments_length(Instruction* currentInstruction) { - linkSlowCase(iter); // Not JSCell. - linkSlowCase(iter); // Not JSString. + int dst = currentInstruction[1].u.operand; + int argumentsRegister = currentInstruction[2].u.operand; + addSlowCase(branchTest64(NonZero, addressFor(argumentsRegister))); + emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT0); + sub32(TrustedImm32(1), regT0); + emitFastArithReTagImmediate(regT0, regT0); + emitPutVirtualRegister(dst, regT0); +} - JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_string); - slowPathCall.call(); +void JIT::emitSlow_op_get_arguments_length(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) +{ + linkSlowCase(iter); + int dst = currentInstruction[1].u.operand; + int base = currentInstruction[2].u.operand; + callOperation(operationGetArgumentsLength, dst, base); +} + +void JIT::emit_op_get_argument_by_val(Instruction* currentInstruction) +{ + int dst = currentInstruction[1].u.operand; + int argumentsRegister = currentInstruction[2].u.operand; + int property = currentInstruction[3].u.operand; + addSlowCase(branchTest64(NonZero, addressFor(argumentsRegister))); + emitGetVirtualRegister(property, regT1); + addSlowCase(emitJumpIfNotImmediateInteger(regT1)); + add32(TrustedImm32(1), regT1); + // regT1 now contains the integer index of the argument we want, including this + emitGetFromCallFrameHeader32(JSStack::ArgumentCount, regT2); + addSlowCase(branch32(AboveOrEqual, regT1, regT2)); + + signExtend32ToPtr(regT1, regT1); + load64(BaseIndex(callFrameRegister, regT1, TimesEight, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))), regT0); + emitValueProfilingSite(); + emitPutVirtualRegister(dst, regT0); +} + +void JIT::emitSlow_op_get_argument_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) +{ + int dst = currentInstruction[1].u.operand; + int arguments = currentInstruction[2].u.operand; + int property = currentInstruction[3].u.operand; + + linkSlowCase(iter); + Jump skipArgumentsCreation = jump(); + + linkSlowCase(iter); + linkSlowCase(iter); + callOperation(operationCreateArguments); + emitStoreCell(arguments, returnValueGPR); + emitStoreCell(unmodifiedArgumentsRegister(VirtualRegister(arguments)), returnValueGPR); + + skipArgumentsCreation.link(this); + emitGetVirtualRegister(arguments, regT0); + emitGetVirtualRegister(property, regT1); + callOperation(WithProfile, operationGetByValGeneric, dst, regT0, regT1); } #endif // USE(JSVALUE64) +void JIT::emit_op_touch_entry(Instruction* currentInstruction) +{ + if (m_codeBlock->symbolTable()->m_functionEnteredOnce.hasBeenInvalidated()) + return; + + JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_touch_entry); + slowPathCall.call(); +} + void JIT::emit_op_loop_hint(Instruction*) { // Emit the JIT optimization check: if (canBeOptimized()) { - addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()), - AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter()))); + if (Options::enableOSREntryInLoops()) { + addSlowCase(branchAdd32(PositiveOrZero, TrustedImm32(Options::executionCounterIncrementForLoop()), + AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter()))); + } else { + // Add with saturation. + move(TrustedImmPtr(m_codeBlock->addressOfJITExecuteCounter()), regT3); + load32(regT3, regT2); + Jump dontAdd = branch32( + GreaterThan, regT2, + TrustedImm32(std::numeric_limits<int32_t>::max() - Options::executionCounterIncrementForLoop())); + add32(TrustedImm32(Options::executionCounterIncrementForLoop()), regT2); + store32(regT2, regT3); + dontAdd.link(this); + } } + + // Emit the watchdog timer check: + if (m_vm->watchdog.isEnabled()) + addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->watchdog.timerDidFireAddress()))); } void JIT::emitSlow_op_loop_hint(Instruction*, Vector<SlowCaseEntry>::iterator& iter) { #if ENABLE(DFG_JIT) // Emit the slow path for the JIT optimization check: - if (canBeOptimized()) { + if (canBeOptimized() && Options::enableOSREntryInLoops()) { linkSlowCase(iter); - - copyCalleeSavesFromFrameOrRegisterToVMCalleeSavesBuffer(); - + callOperation(operationOptimize, m_bytecodeOffset); Jump noOptimizedEntry = branchTestPtr(Zero, returnValueGPR); - if (!ASSERT_DISABLED) { - Jump ok = branchPtr(MacroAssembler::Above, returnValueGPR, TrustedImmPtr(bitwise_cast<void*>(static_cast<intptr_t>(1000)))); - abortWithReason(JITUnreasonableLoopHintJumpTarget); - ok.link(this); - } jump(returnValueGPR); noOptimizedEntry.link(this); emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint)); } -#else - UNUSED_PARAM(iter); #endif -} -void JIT::emit_op_watchdog(Instruction*) -{ - ASSERT(m_vm->watchdog()); - addSlowCase(branchTest8(NonZero, AbsoluteAddress(m_vm->watchdog()->timerDidFireAddress()))); -} + // Emit the slow path of the watchdog timer check: + if (m_vm->watchdog.isEnabled()) { + linkSlowCase(iter); + callOperation(operationHandleWatchdogTimer); + + emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_loop_hint)); + } -void JIT::emitSlow_op_watchdog(Instruction*, Vector<SlowCaseEntry>::iterator& iter) -{ - ASSERT(m_vm->watchdog()); - linkSlowCase(iter); - callOperation(operationHandleWatchdogTimer); } void JIT::emit_op_new_regexp(Instruction* currentInstruction) @@ -948,81 +1155,38 @@ void JIT::emit_op_new_regexp(Instruction* currentInstruction) callOperation(operationNewRegexp, currentInstruction[1].u.operand, m_codeBlock->regexp(currentInstruction[2].u.operand)); } -void JIT::emitNewFuncCommon(Instruction* currentInstruction) +void JIT::emit_op_new_func(Instruction* currentInstruction) { Jump lazyJump; int dst = currentInstruction[1].u.operand; - -#if USE(JSVALUE64) - emitGetVirtualRegister(currentInstruction[2].u.operand, regT0); + if (currentInstruction[3].u.operand) { +#if USE(JSVALUE32_64) + lazyJump = branch32(NotEqual, tagFor(dst), TrustedImm32(JSValue::EmptyValueTag)); #else - emitLoadPayload(currentInstruction[2].u.operand, regT0); + lazyJump = branchTest64(NonZero, addressFor(dst)); #endif - FunctionExecutable* funcExec = m_codeBlock->functionDecl(currentInstruction[3].u.operand); - - OpcodeID opcodeID = m_vm->interpreter->getOpcodeID(currentInstruction->u.opcode); - if (opcodeID == op_new_func) - callOperation(operationNewFunction, dst, regT0, funcExec); - else { - ASSERT(opcodeID == op_new_generator_func); - callOperation(operationNewGeneratorFunction, dst, regT0, funcExec); } -} -void JIT::emit_op_new_func(Instruction* currentInstruction) -{ - emitNewFuncCommon(currentInstruction); -} + FunctionExecutable* funcExec = m_codeBlock->functionDecl(currentInstruction[2].u.operand); + callOperation(operationNewFunction, dst, funcExec); -void JIT::emit_op_new_generator_func(Instruction* currentInstruction) -{ - emitNewFuncCommon(currentInstruction); + if (currentInstruction[3].u.operand) + lazyJump.link(this); } -void JIT::emitNewFuncExprCommon(Instruction* currentInstruction) +void JIT::emit_op_new_captured_func(Instruction* currentInstruction) { - Jump notUndefinedScope; - int dst = currentInstruction[1].u.operand; -#if USE(JSVALUE64) - emitGetVirtualRegister(currentInstruction[2].u.operand, regT0); - notUndefinedScope = branch64(NotEqual, regT0, TrustedImm64(JSValue::encode(jsUndefined()))); - store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, sizeof(Register) * dst)); -#else - emitLoadPayload(currentInstruction[2].u.operand, regT0); - notUndefinedScope = branch32(NotEqual, tagFor(currentInstruction[2].u.operand), TrustedImm32(JSValue::UndefinedTag)); - emitStore(dst, jsUndefined()); -#endif - Jump done = jump(); - notUndefinedScope.link(this); - - FunctionExecutable* function = m_codeBlock->functionExpr(currentInstruction[3].u.operand); - OpcodeID opcodeID = m_vm->interpreter->getOpcodeID(currentInstruction->u.opcode); - - if (opcodeID == op_new_func_exp || opcodeID == op_new_arrow_func_exp) - callOperation(operationNewFunction, dst, regT0, function); - else { - ASSERT(opcodeID == op_new_generator_func_exp); - callOperation(operationNewGeneratorFunction, dst, regT0, function); - } - - done.link(this); + JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_new_captured_func); + slowPathCall.call(); } void JIT::emit_op_new_func_exp(Instruction* currentInstruction) { - emitNewFuncExprCommon(currentInstruction); -} - -void JIT::emit_op_new_generator_func_exp(Instruction* currentInstruction) -{ - emitNewFuncExprCommon(currentInstruction); + int dst = currentInstruction[1].u.operand; + FunctionExecutable* funcExpr = m_codeBlock->functionExpr(currentInstruction[2].u.operand); + callOperation(operationNewFunction, dst, funcExpr); } -void JIT::emit_op_new_arrow_func_exp(Instruction* currentInstruction) -{ - emitNewFuncExprCommon(currentInstruction); -} - void JIT::emit_op_new_array(Instruction* currentInstruction) { int dst = currentInstruction[1].u.operand; @@ -1057,387 +1221,13 @@ void JIT::emit_op_new_array_buffer(Instruction* currentInstruction) callOperation(operationNewArrayBufferWithProfile, dst, currentInstruction[4].u.arrayAllocationProfile, values, size); } -#if USE(JSVALUE64) -void JIT::emit_op_has_structure_property(Instruction* currentInstruction) -{ - int dst = currentInstruction[1].u.operand; - int base = currentInstruction[2].u.operand; - int enumerator = currentInstruction[4].u.operand; - - emitGetVirtualRegister(base, regT0); - emitGetVirtualRegister(enumerator, regT1); - emitJumpSlowCaseIfNotJSCell(regT0, base); - - load32(Address(regT0, JSCell::structureIDOffset()), regT0); - addSlowCase(branch32(NotEqual, regT0, Address(regT1, JSPropertyNameEnumerator::cachedStructureIDOffset()))); - - move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0); - emitPutVirtualRegister(dst); -} - -void JIT::privateCompileHasIndexedProperty(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode) -{ - Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex; - - PatchableJump badType; - - // FIXME: Add support for other types like TypedArrays and Arguments. - // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034. - JumpList slowCases = emitLoadForArrayMode(currentInstruction, arrayMode, badType); - move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0); - Jump done = jump(); - - LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock); - - patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath)); - patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath)); - - patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone)); - - byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB( - m_codeBlock, patchBuffer, - ("Baseline has_indexed_property stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value())); - - MacroAssembler::repatchJump(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code())); - MacroAssembler::repatchCall(CodeLocationCall(MacroAssemblerCodePtr(returnAddress)), FunctionPtr(operationHasIndexedPropertyGeneric)); -} - -void JIT::emit_op_has_indexed_property(Instruction* currentInstruction) -{ - int dst = currentInstruction[1].u.operand; - int base = currentInstruction[2].u.operand; - int property = currentInstruction[3].u.operand; - ArrayProfile* profile = currentInstruction[4].u.arrayProfile; - ByValInfo* byValInfo = m_codeBlock->addByValInfo(); - - emitGetVirtualRegisters(base, regT0, property, regT1); - - // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter. - // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if - // number was signed since m_vectorLength is always less than intmax (since the total allocation - // size is always less than 4Gb). As such zero extending will have been correct (and extending the value - // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign - // extending since it makes it easier to re-tag the value in the slow case. - zeroExtend32ToPtr(regT1, regT1); - - emitJumpSlowCaseIfNotJSCell(regT0, base); - emitArrayProfilingSiteWithCell(regT0, regT2, profile); - and32(TrustedImm32(IndexingShapeMask), regT2); - - JITArrayMode mode = chooseArrayMode(profile); - PatchableJump badType; - - // FIXME: Add support for other types like TypedArrays and Arguments. - // See https://bugs.webkit.org/show_bug.cgi?id=135033 and https://bugs.webkit.org/show_bug.cgi?id=135034. - JumpList slowCases = emitLoadForArrayMode(currentInstruction, mode, badType); - - move(TrustedImm64(JSValue::encode(jsBoolean(true))), regT0); - - addSlowCase(badType); - addSlowCase(slowCases); - - Label done = label(); - - emitPutVirtualRegister(dst); - - Label nextHotPath = label(); - - m_byValCompilationInfo.append(ByValCompilationInfo(byValInfo, m_bytecodeOffset, PatchableJump(), badType, mode, profile, done, nextHotPath)); -} - -void JIT::emitSlow_op_has_indexed_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) -{ - int dst = currentInstruction[1].u.operand; - int base = currentInstruction[2].u.operand; - int property = currentInstruction[3].u.operand; - ByValInfo* byValInfo = m_byValCompilationInfo[m_byValInstructionIndex].byValInfo; - - linkSlowCaseIfNotJSCell(iter, base); // base cell check - linkSlowCase(iter); // base array check - linkSlowCase(iter); // read barrier - linkSlowCase(iter); // vector length check - linkSlowCase(iter); // empty value - - Label slowPath = label(); - - emitGetVirtualRegister(base, regT0); - emitGetVirtualRegister(property, regT1); - Call call = callOperation(operationHasIndexedPropertyDefault, dst, regT0, regT1, byValInfo); - - m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath; - m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call; - m_byValInstructionIndex++; -} - -void JIT::emit_op_get_direct_pname(Instruction* currentInstruction) -{ - int dst = currentInstruction[1].u.operand; - int base = currentInstruction[2].u.operand; - int index = currentInstruction[4].u.operand; - int enumerator = currentInstruction[5].u.operand; - - // Check that base is a cell - emitGetVirtualRegister(base, regT0); - emitJumpSlowCaseIfNotJSCell(regT0, base); - - // Check the structure - emitGetVirtualRegister(enumerator, regT2); - load32(Address(regT0, JSCell::structureIDOffset()), regT1); - addSlowCase(branch32(NotEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedStructureIDOffset()))); - - // Compute the offset - emitGetVirtualRegister(index, regT1); - // If index is less than the enumerator's cached inline storage, then it's an inline access - Jump outOfLineAccess = branch32(AboveOrEqual, regT1, Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset())); - addPtr(TrustedImm32(JSObject::offsetOfInlineStorage()), regT0); - signExtend32ToPtr(regT1, regT1); - load64(BaseIndex(regT0, regT1, TimesEight), regT0); - - Jump done = jump(); - - // Otherwise it's out of line - outOfLineAccess.link(this); - loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0); - addSlowCase(branchIfNotToSpace(regT0)); - sub32(Address(regT2, JSPropertyNameEnumerator::cachedInlineCapacityOffset()), regT1); - neg32(regT1); - signExtend32ToPtr(regT1, regT1); - int32_t offsetOfFirstProperty = static_cast<int32_t>(offsetInButterfly(firstOutOfLineOffset)) * sizeof(EncodedJSValue); - load64(BaseIndex(regT0, regT1, TimesEight, offsetOfFirstProperty), regT0); - - done.link(this); - emitValueProfilingSite(); - emitPutVirtualRegister(dst, regT0); -} - -void JIT::emitSlow_op_get_direct_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) -{ - int base = currentInstruction[2].u.operand; - linkSlowCaseIfNotJSCell(iter, base); - linkSlowCase(iter); - linkSlowCase(iter); - - JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_direct_pname); - slowPathCall.call(); -} - -void JIT::emit_op_enumerator_structure_pname(Instruction* currentInstruction) -{ - int dst = currentInstruction[1].u.operand; - int enumerator = currentInstruction[2].u.operand; - int index = currentInstruction[3].u.operand; - - emitGetVirtualRegister(index, regT0); - emitGetVirtualRegister(enumerator, regT1); - Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endStructurePropertyIndexOffset())); - - move(TrustedImm64(JSValue::encode(jsNull())), regT0); - - Jump done = jump(); - inBounds.link(this); - - loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1); - signExtend32ToPtr(regT0, regT0); - load64(BaseIndex(regT1, regT0, TimesEight), regT0); - - done.link(this); - emitPutVirtualRegister(dst); -} - -void JIT::emit_op_enumerator_generic_pname(Instruction* currentInstruction) -{ - int dst = currentInstruction[1].u.operand; - int enumerator = currentInstruction[2].u.operand; - int index = currentInstruction[3].u.operand; - - emitGetVirtualRegister(index, regT0); - emitGetVirtualRegister(enumerator, regT1); - Jump inBounds = branch32(Below, regT0, Address(regT1, JSPropertyNameEnumerator::endGenericPropertyIndexOffset())); - - move(TrustedImm64(JSValue::encode(jsNull())), regT0); - - Jump done = jump(); - inBounds.link(this); - - loadPtr(Address(regT1, JSPropertyNameEnumerator::cachedPropertyNamesVectorOffset()), regT1); - signExtend32ToPtr(regT0, regT0); - load64(BaseIndex(regT1, regT0, TimesEight), regT0); - - done.link(this); - emitPutVirtualRegister(dst); -} - -void JIT::emit_op_profile_type(Instruction* currentInstruction) -{ - TypeLocation* cachedTypeLocation = currentInstruction[2].u.location; - int valueToProfile = currentInstruction[1].u.operand; - - emitGetVirtualRegister(valueToProfile, regT0); - - JumpList jumpToEnd; - - jumpToEnd.append(branchTest64(Zero, regT0)); - - // Compile in a predictive type check, if possible, to see if we can skip writing to the log. - // These typechecks are inlined to match those of the 64-bit JSValue type checks. - if (cachedTypeLocation->m_lastSeenType == TypeUndefined) - jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsUndefined())))); - else if (cachedTypeLocation->m_lastSeenType == TypeNull) - jumpToEnd.append(branch64(Equal, regT0, TrustedImm64(JSValue::encode(jsNull())))); - else if (cachedTypeLocation->m_lastSeenType == TypeBoolean) { - move(regT0, regT1); - and64(TrustedImm32(~1), regT1); - jumpToEnd.append(branch64(Equal, regT1, TrustedImm64(ValueFalse))); - } else if (cachedTypeLocation->m_lastSeenType == TypeMachineInt) - jumpToEnd.append(emitJumpIfInt(regT0)); - else if (cachedTypeLocation->m_lastSeenType == TypeNumber) - jumpToEnd.append(emitJumpIfNumber(regT0)); - else if (cachedTypeLocation->m_lastSeenType == TypeString) { - Jump isNotCell = emitJumpIfNotJSCell(regT0); - jumpToEnd.append(branch8(Equal, Address(regT0, JSCell::typeInfoTypeOffset()), TrustedImm32(StringType))); - isNotCell.link(this); - } - - // Load the type profiling log into T2. - TypeProfilerLog* cachedTypeProfilerLog = m_vm->typeProfilerLog(); - move(TrustedImmPtr(cachedTypeProfilerLog), regT2); - // Load the next log entry into T1. - loadPtr(Address(regT2, TypeProfilerLog::currentLogEntryOffset()), regT1); - - // Store the JSValue onto the log entry. - store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::valueOffset())); - - // Store the structureID of the cell if T0 is a cell, otherwise, store 0 on the log entry. - Jump notCell = emitJumpIfNotJSCell(regT0); - load32(Address(regT0, JSCell::structureIDOffset()), regT0); - store32(regT0, Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset())); - Jump skipIsCell = jump(); - notCell.link(this); - store32(TrustedImm32(0), Address(regT1, TypeProfilerLog::LogEntry::structureIDOffset())); - skipIsCell.link(this); - - // Store the typeLocation on the log entry. - move(TrustedImmPtr(cachedTypeLocation), regT0); - store64(regT0, Address(regT1, TypeProfilerLog::LogEntry::locationOffset())); - - // Increment the current log entry. - addPtr(TrustedImm32(sizeof(TypeProfilerLog::LogEntry)), regT1); - store64(regT1, Address(regT2, TypeProfilerLog::currentLogEntryOffset())); - Jump skipClearLog = branchPtr(NotEqual, regT1, TrustedImmPtr(cachedTypeProfilerLog->logEndPtr())); - // Clear the log if we're at the end of the log. - callOperation(operationProcessTypeProfilerLog); - skipClearLog.link(this); - - jumpToEnd.link(this); -} - -#endif // USE(JSVALUE64) - -void JIT::emit_op_get_enumerable_length(Instruction* currentInstruction) -{ - JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_enumerable_length); - slowPathCall.call(); -} - -void JIT::emitSlow_op_has_structure_property(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) +void JIT::emitSlow_op_captured_mov(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) { + VariableWatchpointSet* set = currentInstruction[3].u.watchpointSet; + if (!set || set->state() == IsInvalidated) + return; linkSlowCase(iter); - linkSlowCase(iter); - - JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_structure_property); - slowPathCall.call(); -} - -void JIT::emit_op_has_generic_property(Instruction* currentInstruction) -{ - JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_has_generic_property); - slowPathCall.call(); -} - -void JIT::emit_op_get_property_enumerator(Instruction* currentInstruction) -{ - JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_get_property_enumerator); - slowPathCall.call(); -} - -void JIT::emit_op_to_index_string(Instruction* currentInstruction) -{ - JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_to_index_string); - slowPathCall.call(); -} - -void JIT::emit_op_profile_control_flow(Instruction* currentInstruction) -{ - BasicBlockLocation* basicBlockLocation = currentInstruction[1].u.basicBlockLocation; -#if USE(JSVALUE64) - basicBlockLocation->emitExecuteCode(*this); -#else - basicBlockLocation->emitExecuteCode(*this, regT0); -#endif -} - -void JIT::emit_op_create_direct_arguments(Instruction* currentInstruction) -{ - JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_direct_arguments); - slowPathCall.call(); -} - -void JIT::emit_op_create_scoped_arguments(Instruction* currentInstruction) -{ - JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_scoped_arguments); - slowPathCall.call(); -} - -void JIT::emit_op_create_out_of_band_arguments(Instruction* currentInstruction) -{ - JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_create_out_of_band_arguments); - slowPathCall.call(); -} - -void JIT::emit_op_copy_rest(Instruction* currentInstruction) -{ - JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_copy_rest); - slowPathCall.call(); -} - -void JIT::emit_op_get_rest_length(Instruction* currentInstruction) -{ - int dst = currentInstruction[1].u.operand; - unsigned numParamsToSkip = currentInstruction[2].u.unsignedValue; - load32(payloadFor(JSStack::ArgumentCount), regT0); - sub32(TrustedImm32(1), regT0); - Jump zeroLength = branch32(LessThanOrEqual, regT0, Imm32(numParamsToSkip)); - sub32(Imm32(numParamsToSkip), regT0); -#if USE(JSVALUE64) - boxInt32(regT0, JSValueRegs(regT0)); -#endif - Jump done = jump(); - - zeroLength.link(this); -#if USE(JSVALUE64) - move(TrustedImm64(JSValue::encode(jsNumber(0))), regT0); -#else - move(TrustedImm32(0), regT0); -#endif - - done.link(this); -#if USE(JSVALUE64) - emitPutVirtualRegister(dst, regT0); -#else - move(TrustedImm32(JSValue::Int32Tag), regT1); - emitPutVirtualRegister(dst, JSValueRegs(regT1, regT0)); -#endif -} - -void JIT::emit_op_save(Instruction* currentInstruction) -{ - JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_save); - slowPathCall.call(); -} - -void JIT::emit_op_resume(Instruction* currentInstruction) -{ - JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_resume); + JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_captured_mov); slowPathCall.call(); } |