summaryrefslogtreecommitdiff
path: root/Source/JavaScriptCore/jit
diff options
context:
space:
mode:
authorSimon Hausmann <simon.hausmann@nokia.com>2012-02-24 16:36:50 +0100
committerSimon Hausmann <simon.hausmann@nokia.com>2012-02-24 16:36:50 +0100
commitad0d549d4cc13433f77c1ac8f0ab379c83d93f28 (patch)
treeb34b0daceb7c8e7fdde4b4ec43650ab7caadb0a9 /Source/JavaScriptCore/jit
parent03e12282df9aa1e1fb05a8b90f1cfc2e08764cec (diff)
downloadqtwebkit-ad0d549d4cc13433f77c1ac8f0ab379c83d93f28.tar.gz
Imported WebKit commit bb52bf3c0119e8a128cd93afe5572413a8617de9 (http://svn.webkit.org/repository/webkit/trunk@108790)
Diffstat (limited to 'Source/JavaScriptCore/jit')
-rw-r--r--Source/JavaScriptCore/jit/ExecutableAllocator.h8
-rw-r--r--Source/JavaScriptCore/jit/ExecutableAllocatorFixedVMPool.cpp2
-rw-r--r--Source/JavaScriptCore/jit/HostCallReturnValue.cpp40
-rw-r--r--Source/JavaScriptCore/jit/HostCallReturnValue.h67
-rw-r--r--Source/JavaScriptCore/jit/JIT.cpp22
-rw-r--r--Source/JavaScriptCore/jit/JIT.h6
-rw-r--r--Source/JavaScriptCore/jit/JITCode.h14
-rw-r--r--Source/JavaScriptCore/jit/JITDriver.h12
-rw-r--r--Source/JavaScriptCore/jit/JITExceptions.cpp2
-rw-r--r--Source/JavaScriptCore/jit/JITInlineMethods.h22
-rw-r--r--Source/JavaScriptCore/jit/JITOpcodes.cpp8
-rw-r--r--Source/JavaScriptCore/jit/JITOpcodes32_64.cpp16
-rw-r--r--Source/JavaScriptCore/jit/JITPropertyAccess.cpp12
-rw-r--r--Source/JavaScriptCore/jit/JITPropertyAccess32_64.cpp10
-rw-r--r--Source/JavaScriptCore/jit/JITStubs.cpp88
-rw-r--r--Source/JavaScriptCore/jit/JITStubs.h8
-rw-r--r--Source/JavaScriptCore/jit/JSInterfaceJIT.h2
17 files changed, 247 insertions, 92 deletions
diff --git a/Source/JavaScriptCore/jit/ExecutableAllocator.h b/Source/JavaScriptCore/jit/ExecutableAllocator.h
index bc8b816c8..7520913d0 100644
--- a/Source/JavaScriptCore/jit/ExecutableAllocator.h
+++ b/Source/JavaScriptCore/jit/ExecutableAllocator.h
@@ -89,12 +89,12 @@ inline size_t roundUpAllocationSize(size_t request, size_t granularity)
}
-#if ENABLE(JIT) && ENABLE(ASSEMBLER)
-
namespace JSC {
typedef WTF::MetaAllocatorHandle ExecutableMemoryHandle;
+#if ENABLE(JIT) && ENABLE(ASSEMBLER)
+
class ExecutableAllocator {
enum ProtectionSetting { Writable, Executable };
@@ -235,8 +235,8 @@ private:
#endif
};
-} // namespace JSC
-
#endif // ENABLE(JIT) && ENABLE(ASSEMBLER)
+} // namespace JSC
+
#endif // !defined(ExecutableAllocator)
diff --git a/Source/JavaScriptCore/jit/ExecutableAllocatorFixedVMPool.cpp b/Source/JavaScriptCore/jit/ExecutableAllocatorFixedVMPool.cpp
index 3fe631e3b..37a57e8b7 100644
--- a/Source/JavaScriptCore/jit/ExecutableAllocatorFixedVMPool.cpp
+++ b/Source/JavaScriptCore/jit/ExecutableAllocatorFixedVMPool.cpp
@@ -59,7 +59,7 @@ public:
: MetaAllocator(32) // round up all allocations to 32 bytes
{
m_reservation = PageReservation::reserveWithGuardPages(fixedPoolSize, OSAllocator::JSJITCodePages, EXECUTABLE_POOL_WRITABLE, true);
-#if !ENABLE(INTERPRETER)
+#if !ENABLE(CLASSIC_INTERPRETER)
if (!m_reservation)
CRASH();
#endif
diff --git a/Source/JavaScriptCore/jit/HostCallReturnValue.cpp b/Source/JavaScriptCore/jit/HostCallReturnValue.cpp
new file mode 100644
index 000000000..924bc7671
--- /dev/null
+++ b/Source/JavaScriptCore/jit/HostCallReturnValue.cpp
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "HostCallReturnValue.h"
+
+#include "CallFrame.h"
+#include "InlineASM.h"
+#include "JSObject.h"
+#include "JSValueInlineMethods.h"
+#include "ScopeChain.h"
+
+namespace JSC {
+
+// Nothing to see here.
+
+} // namespace JSC
+
diff --git a/Source/JavaScriptCore/jit/HostCallReturnValue.h b/Source/JavaScriptCore/jit/HostCallReturnValue.h
new file mode 100644
index 000000000..12fe10b10
--- /dev/null
+++ b/Source/JavaScriptCore/jit/HostCallReturnValue.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef HostCallReturnValue_h
+#define HostCallReturnValue_h
+
+#include "JSValue.h"
+#include "MacroAssemblerCodeRef.h"
+#include <wtf/Platform.h>
+
+// Unfortunately this only works on GCC-like compilers. And it's currently only used
+// by LLInt and DFG, which also are restricted to GCC-like compilers. We should
+// probably fix that at some point.
+#if COMPILER(GCC)
+
+#if CALLING_CONVENTION_IS_STDCALL
+#define HOST_CALL_RETURN_VALUE_OPTION CDECL
+#else
+#define HOST_CALL_RETURN_VALUE_OPTION
+#endif
+
+namespace JSC {
+
+extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValue();
+
+// This is a public declaration only to convince CLANG not to elide it.
+extern "C" EncodedJSValue HOST_CALL_RETURN_VALUE_OPTION getHostCallReturnValueWithExecState(ExecState*);
+
+inline void initializeHostCallReturnValue()
+{
+ getHostCallReturnValueWithExecState(0);
+}
+
+}
+
+#else // COMPILER(GCC)
+
+namespace JSC {
+inline void initializeHostCallReturnValue() { }
+}
+
+#endif // COMPILER(GCC)
+
+#endif // HostCallReturnValue_h
+
diff --git a/Source/JavaScriptCore/jit/JIT.cpp b/Source/JavaScriptCore/jit/JIT.cpp
index c8584a316..2adc596ce 100644
--- a/Source/JavaScriptCore/jit/JIT.cpp
+++ b/Source/JavaScriptCore/jit/JIT.cpp
@@ -219,7 +219,7 @@ void JIT::privateCompileMainPass()
m_labels[m_bytecodeOffset] = label();
#if ENABLE(JIT_VERBOSE)
- printf("Old JIT emitting code for bc#%u at offset 0x%lx.\n", m_bytecodeOffset, (long)debugOffset());
+ dataLog("Old JIT emitting code for bc#%u at offset 0x%lx.\n", m_bytecodeOffset, (long)debugOffset());
#endif
switch (m_interpreter->getOpcodeID(currentInstruction->u.opcode)) {
@@ -325,6 +325,8 @@ void JIT::privateCompileMainPass()
DEFINE_OP(op_profile_will_call)
DEFINE_OP(op_push_new_scope)
DEFINE_OP(op_push_scope)
+ case op_put_by_id_transition_direct:
+ case op_put_by_id_transition_normal:
DEFINE_OP(op_put_by_id)
DEFINE_OP(op_put_by_index)
DEFINE_OP(op_put_by_val)
@@ -429,7 +431,7 @@ void JIT::privateCompileSlowCases()
#endif
#if ENABLE(JIT_VERBOSE)
- printf("Old JIT emitting slow code for bc#%u at offset 0x%lx.\n", m_bytecodeOffset, (long)debugOffset());
+ dataLog("Old JIT emitting slow code for bc#%u at offset 0x%lx.\n", m_bytecodeOffset, (long)debugOffset());
#endif
switch (m_interpreter->getOpcodeID(currentInstruction->u.opcode)) {
@@ -486,6 +488,8 @@ void JIT::privateCompileSlowCases()
DEFINE_SLOWCASE_OP(op_post_inc)
DEFINE_SLOWCASE_OP(op_pre_dec)
DEFINE_SLOWCASE_OP(op_pre_inc)
+ case op_put_by_id_transition_direct:
+ case op_put_by_id_transition_normal:
DEFINE_SLOWCASE_OP(op_put_by_id)
DEFINE_SLOWCASE_OP(op_put_by_val)
DEFINE_SLOWCASE_OP(op_resolve_global)
@@ -525,6 +529,10 @@ void JIT::privateCompileSlowCases()
JITCode JIT::privateCompile(CodePtr* functionEntryArityCheck)
{
+#if ENABLE(JIT_VERBOSE_OSR)
+ printf("Compiling JIT code!\n");
+#endif
+
#if ENABLE(VALUE_PROFILER)
m_canBeOptimized = m_codeBlock->canCompileWithDFG();
#endif
@@ -693,8 +701,12 @@ JITCode JIT::privateCompile(CodePtr* functionEntryArityCheck)
info.callReturnLocation = m_codeBlock->structureStubInfo(m_methodCallCompilationInfo[i].propertyAccessIndex).callReturnLocation;
}
-#if ENABLE(DFG_JIT)
- if (m_canBeOptimized) {
+#if ENABLE(DFG_JIT) || ENABLE(LLINT)
+ if (m_canBeOptimized
+#if ENABLE(LLINT)
+ || true
+#endif
+ ) {
CompactJITCodeMap::Encoder jitCodeMapEncoder;
for (unsigned bytecodeOffset = 0; bytecodeOffset < m_labels.size(); ++bytecodeOffset) {
if (m_labels[bytecodeOffset].isSet())
@@ -710,7 +722,7 @@ JITCode JIT::privateCompile(CodePtr* functionEntryArityCheck)
CodeRef result = patchBuffer.finalizeCode();
#if ENABLE(JIT_VERBOSE)
- printf("JIT generated code for %p at [%p, %p).\n", m_codeBlock, result.executableMemory()->start(), result.executableMemory()->end());
+ dataLog("JIT generated code for %p at [%p, %p).\n", m_codeBlock, result.executableMemory()->start(), result.executableMemory()->end());
#endif
return JITCode(result, JITCode::BaselineJIT);
diff --git a/Source/JavaScriptCore/jit/JIT.h b/Source/JavaScriptCore/jit/JIT.h
index 8dd332893..a2bc4272a 100644
--- a/Source/JavaScriptCore/jit/JIT.h
+++ b/Source/JavaScriptCore/jit/JIT.h
@@ -335,7 +335,7 @@ namespace JSC {
void emitWriteBarrier(RegisterID owner, RegisterID valueTag, RegisterID scratch, RegisterID scratch2, WriteBarrierMode, WriteBarrierUseKind);
void emitWriteBarrier(JSCell* owner, RegisterID value, RegisterID scratch, WriteBarrierMode, WriteBarrierUseKind);
- template<typename ClassType, typename StructureType> void emitAllocateBasicJSObject(StructureType, RegisterID result, RegisterID storagePtr);
+ template<typename ClassType, bool destructor, typename StructureType> void emitAllocateBasicJSObject(StructureType, RegisterID result, RegisterID storagePtr);
template<typename T> void emitAllocateJSFinalObject(T structure, RegisterID result, RegisterID storagePtr);
void emitAllocateJSFunction(FunctionExecutable*, RegisterID scopeChain, RegisterID result, RegisterID storagePtr);
@@ -500,7 +500,7 @@ namespace JSC {
#if ENABLE(OPCODE_SAMPLING)
#error "OPCODE_SAMPLING is not yet supported"
#else
- static const int patchOffsetGetByIdSlowCaseCall = 56;
+ static const int patchOffsetGetByIdSlowCaseCall = 64;
#endif
static const int patchOffsetOpCallCompareToJump = 32;
static const int patchOffsetMethodCheckProtoObj = 32;
@@ -518,7 +518,7 @@ namespace JSC {
#if ENABLE(OPCODE_SAMPLING)
#error "OPCODE_SAMPLING is not yet supported"
#else
- static const int patchOffsetGetByIdSlowCaseCall = 56;
+ static const int patchOffsetGetByIdSlowCaseCall = 64;
#endif
static const int patchOffsetOpCallCompareToJump = 32;
static const int patchOffsetMethodCheckProtoObj = 32;
diff --git a/Source/JavaScriptCore/jit/JITCode.h b/Source/JavaScriptCore/jit/JITCode.h
index f63c4a1a8..3ae5ff234 100644
--- a/Source/JavaScriptCore/jit/JITCode.h
+++ b/Source/JavaScriptCore/jit/JITCode.h
@@ -48,7 +48,7 @@ namespace JSC {
JITCode() { }
#endif
public:
- enum JITType { HostCallThunk, BaselineJIT, DFGJIT };
+ enum JITType { None, HostCallThunk, InterpreterThunk, BaselineJIT, DFGJIT };
static JITType bottomTierJIT()
{
@@ -66,8 +66,19 @@ namespace JSC {
return DFGJIT;
}
+ static bool isOptimizingJIT(JITType jitType)
+ {
+ return jitType == DFGJIT;
+ }
+
+ static bool isBaselineCode(JITType jitType)
+ {
+ return jitType == InterpreterThunk || jitType == BaselineJIT;
+ }
+
#if ENABLE(JIT)
JITCode()
+ : m_jitType(None)
{
}
@@ -75,6 +86,7 @@ namespace JSC {
: m_ref(ref)
, m_jitType(jitType)
{
+ ASSERT(jitType != None);
}
bool operator !() const
diff --git a/Source/JavaScriptCore/jit/JITDriver.h b/Source/JavaScriptCore/jit/JITDriver.h
index 4b8df4751..b204c7737 100644
--- a/Source/JavaScriptCore/jit/JITDriver.h
+++ b/Source/JavaScriptCore/jit/JITDriver.h
@@ -33,15 +33,21 @@
#include "BytecodeGenerator.h"
#include "DFGDriver.h"
#include "JIT.h"
+#include "LLIntEntrypoints.h"
namespace JSC {
template<typename CodeBlockType>
inline bool jitCompileIfAppropriate(JSGlobalData& globalData, OwnPtr<CodeBlockType>& codeBlock, JITCode& jitCode, JITCode::JITType jitType)
{
+ if (jitType == codeBlock->getJITType())
+ return true;
+
if (!globalData.canUseJIT())
return true;
+ codeBlock->unlinkIncomingCalls();
+
bool dfgCompiled = false;
if (jitType == JITCode::DFGJIT)
dfgCompiled = DFG::tryCompile(globalData, codeBlock.get(), jitCode);
@@ -62,9 +68,14 @@ inline bool jitCompileIfAppropriate(JSGlobalData& globalData, OwnPtr<CodeBlockTy
inline bool jitCompileFunctionIfAppropriate(JSGlobalData& globalData, OwnPtr<FunctionCodeBlock>& codeBlock, JITCode& jitCode, MacroAssemblerCodePtr& jitCodeWithArityCheck, SharedSymbolTable*& symbolTable, JITCode::JITType jitType)
{
+ if (jitType == codeBlock->getJITType())
+ return true;
+
if (!globalData.canUseJIT())
return true;
+ codeBlock->unlinkIncomingCalls();
+
bool dfgCompiled = false;
if (jitType == JITCode::DFGJIT)
dfgCompiled = DFG::tryCompileFunction(globalData, codeBlock.get(), jitCode, jitCodeWithArityCheck);
@@ -79,7 +90,6 @@ inline bool jitCompileFunctionIfAppropriate(JSGlobalData& globalData, OwnPtr<Fun
}
jitCode = JIT::compile(&globalData, codeBlock.get(), &jitCodeWithArityCheck);
}
-
codeBlock->setJITCode(jitCode, jitCodeWithArityCheck);
return true;
diff --git a/Source/JavaScriptCore/jit/JITExceptions.cpp b/Source/JavaScriptCore/jit/JITExceptions.cpp
index 24baca41b..2edd3408f 100644
--- a/Source/JavaScriptCore/jit/JITExceptions.cpp
+++ b/Source/JavaScriptCore/jit/JITExceptions.cpp
@@ -64,7 +64,7 @@ ExceptionHandler genericThrow(JSGlobalData* globalData, ExecState* callFrame, JS
ExceptionHandler jitThrow(JSGlobalData* globalData, ExecState* callFrame, JSValue exceptionValue, ReturnAddressPtr faultLocation)
{
- return genericThrow(globalData, callFrame, exceptionValue, callFrame->codeBlock()->bytecodeOffset(faultLocation));
+ return genericThrow(globalData, callFrame, exceptionValue, callFrame->codeBlock()->bytecodeOffset(callFrame, faultLocation));
}
}
diff --git a/Source/JavaScriptCore/jit/JITInlineMethods.h b/Source/JavaScriptCore/jit/JITInlineMethods.h
index dfcfbd499..e0310569d 100644
--- a/Source/JavaScriptCore/jit/JITInlineMethods.h
+++ b/Source/JavaScriptCore/jit/JITInlineMethods.h
@@ -265,8 +265,13 @@ ALWAYS_INLINE void JIT::restoreArgumentReference()
ALWAYS_INLINE void JIT::updateTopCallFrame()
{
ASSERT(static_cast<int>(m_bytecodeOffset) >= 0);
- if (m_bytecodeOffset)
+ if (m_bytecodeOffset) {
+#if USE(JSVALUE32_64)
+ storePtr(TrustedImmPtr(m_codeBlock->instructions().begin() + m_bytecodeOffset + 1), intTagFor(RegisterFile::ArgumentCount));
+#else
store32(Imm32(m_bytecodeOffset + 1), intTagFor(RegisterFile::ArgumentCount));
+#endif
+ }
storePtr(callFrameRegister, &m_globalData->topCallFrame);
}
@@ -402,9 +407,13 @@ ALWAYS_INLINE bool JIT::isOperandConstantImmediateChar(unsigned src)
return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isString() && asString(getConstantOperand(src).asCell())->length() == 1;
}
-template <typename ClassType, typename StructureType> inline void JIT::emitAllocateBasicJSObject(StructureType structure, RegisterID result, RegisterID storagePtr)
+template <typename ClassType, bool destructor, typename StructureType> inline void JIT::emitAllocateBasicJSObject(StructureType structure, RegisterID result, RegisterID storagePtr)
{
- MarkedAllocator* allocator = &m_globalData->heap.allocatorForObject(sizeof(ClassType));
+ MarkedAllocator* allocator = 0;
+ if (destructor)
+ allocator = &m_globalData->heap.allocatorForObjectWithDestructor(sizeof(ClassType));
+ else
+ allocator = &m_globalData->heap.allocatorForObjectWithoutDestructor(sizeof(ClassType));
loadPtr(&allocator->m_firstFreeCell, result);
addSlowCase(branchTestPtr(Zero, result));
@@ -428,12 +437,12 @@ template <typename ClassType, typename StructureType> inline void JIT::emitAlloc
template <typename T> inline void JIT::emitAllocateJSFinalObject(T structure, RegisterID result, RegisterID scratch)
{
- emitAllocateBasicJSObject<JSFinalObject>(structure, result, scratch);
+ emitAllocateBasicJSObject<JSFinalObject, false, T>(structure, result, scratch);
}
inline void JIT::emitAllocateJSFunction(FunctionExecutable* executable, RegisterID scopeChain, RegisterID result, RegisterID storagePtr)
{
- emitAllocateBasicJSObject<JSFunction>(TrustedImmPtr(m_codeBlock->globalObject()->namedFunctionStructure()), result, storagePtr);
+ emitAllocateBasicJSObject<JSFunction, true>(TrustedImmPtr(m_codeBlock->globalObject()->namedFunctionStructure()), result, storagePtr);
// store the function's scope chain
storePtr(scopeChain, Address(result, JSFunction::offsetOfScopeChain()));
@@ -676,6 +685,9 @@ inline void JIT::map(unsigned bytecodeOffset, int virtualRegisterIndex, Register
m_mappedVirtualRegisterIndex = virtualRegisterIndex;
m_mappedTag = tag;
m_mappedPayload = payload;
+
+ ASSERT(!canBeOptimized() || m_mappedPayload == regT0);
+ ASSERT(!canBeOptimized() || m_mappedTag == regT1);
}
inline void JIT::unmap(RegisterID registerID)
diff --git a/Source/JavaScriptCore/jit/JITOpcodes.cpp b/Source/JavaScriptCore/jit/JITOpcodes.cpp
index 8a2077e47..bc53d2cd8 100644
--- a/Source/JavaScriptCore/jit/JITOpcodes.cpp
+++ b/Source/JavaScriptCore/jit/JITOpcodes.cpp
@@ -80,7 +80,8 @@ PassRefPtr<ExecutableMemoryHandle> JIT::privateCompileCTIMachineTrampolines(JSGl
// Also initialize ReturnPC for use by lazy linking and exceptions.
preserveReturnAddressAfterCall(regT3);
emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
-
+
+ storePtr(callFrameRegister, &m_globalData->topCallFrame);
restoreArgumentReference();
Call callLazyLinkCall = call();
restoreReturnAddressBeforeReturn(regT3);
@@ -99,7 +100,8 @@ PassRefPtr<ExecutableMemoryHandle> JIT::privateCompileCTIMachineTrampolines(JSGl
// Also initialize ReturnPC for use by lazy linking and exeptions.
preserveReturnAddressAfterCall(regT3);
emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
-
+
+ storePtr(callFrameRegister, &m_globalData->topCallFrame);
restoreArgumentReference();
Call callLazyLinkConstruct = call();
restoreReturnAddressBeforeReturn(regT3);
@@ -118,6 +120,7 @@ PassRefPtr<ExecutableMemoryHandle> JIT::privateCompileCTIMachineTrampolines(JSGl
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
Jump hasCodeBlock1 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), TrustedImm32(0));
preserveReturnAddressAfterCall(regT3);
+ storePtr(callFrameRegister, &m_globalData->topCallFrame);
restoreArgumentReference();
Call callCompileCall = call();
restoreReturnAddressBeforeReturn(regT3);
@@ -140,6 +143,7 @@ PassRefPtr<ExecutableMemoryHandle> JIT::privateCompileCTIMachineTrampolines(JSGl
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
Jump hasCodeBlock2 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), TrustedImm32(0));
preserveReturnAddressAfterCall(regT3);
+ storePtr(callFrameRegister, &m_globalData->topCallFrame);
restoreArgumentReference();
Call callCompileConstruct = call();
restoreReturnAddressBeforeReturn(regT3);
diff --git a/Source/JavaScriptCore/jit/JITOpcodes32_64.cpp b/Source/JavaScriptCore/jit/JITOpcodes32_64.cpp
index 99594c3f1..1a09302cf 100644
--- a/Source/JavaScriptCore/jit/JITOpcodes32_64.cpp
+++ b/Source/JavaScriptCore/jit/JITOpcodes32_64.cpp
@@ -79,7 +79,8 @@ PassRefPtr<ExecutableMemoryHandle> JIT::privateCompileCTIMachineTrampolines(JSGl
// Also initialize ReturnPC for use by lazy linking and exceptions.
preserveReturnAddressAfterCall(regT3);
emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
-
+
+ storePtr(callFrameRegister, &m_globalData->topCallFrame);
restoreArgumentReference();
Call callLazyLinkCall = call();
restoreReturnAddressBeforeReturn(regT3);
@@ -98,7 +99,8 @@ PassRefPtr<ExecutableMemoryHandle> JIT::privateCompileCTIMachineTrampolines(JSGl
// Also initialize ReturnPC for use by lazy linking and exeptions.
preserveReturnAddressAfterCall(regT3);
emitPutToCallFrameHeader(regT3, RegisterFile::ReturnPC);
-
+
+ storePtr(callFrameRegister, &m_globalData->topCallFrame);
restoreArgumentReference();
Call callLazyLinkConstruct = call();
restoreReturnAddressBeforeReturn(regT3);
@@ -117,6 +119,8 @@ PassRefPtr<ExecutableMemoryHandle> JIT::privateCompileCTIMachineTrampolines(JSGl
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
Jump hasCodeBlock1 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForCall)), TrustedImm32(0));
preserveReturnAddressAfterCall(regT3);
+
+ storePtr(callFrameRegister, &m_globalData->topCallFrame);
restoreArgumentReference();
Call callCompileCall = call();
restoreReturnAddressBeforeReturn(regT3);
@@ -139,6 +143,8 @@ PassRefPtr<ExecutableMemoryHandle> JIT::privateCompileCTIMachineTrampolines(JSGl
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
Jump hasCodeBlock2 = branch32(GreaterThanOrEqual, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParametersForConstruct)), TrustedImm32(0));
preserveReturnAddressAfterCall(regT3);
+
+ storePtr(callFrameRegister, &m_globalData->topCallFrame);
restoreArgumentReference();
Call callCompileConstruct = call();
restoreReturnAddressBeforeReturn(regT3);
@@ -348,7 +354,8 @@ JIT::Label JIT::privateCompileCTINativeCall(JSGlobalData* globalData, bool isCon
move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
storePtr(regT1, regT2);
poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
-
+
+ storePtr(callFrameRegister, &m_globalData->topCallFrame);
// Set the return address.
move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
restoreReturnAddressBeforeReturn(regT1);
@@ -484,7 +491,8 @@ JIT::CodeRef JIT::privateCompileCTINativeCall(JSGlobalData* globalData, NativeFu
move(TrustedImmPtr(&globalData->exceptionLocation), regT2);
storePtr(regT1, regT2);
poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
-
+
+ storePtr(callFrameRegister, &m_globalData->topCallFrame);
// Set the return address.
move(TrustedImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT1);
restoreReturnAddressBeforeReturn(regT1);
diff --git a/Source/JavaScriptCore/jit/JITPropertyAccess.cpp b/Source/JavaScriptCore/jit/JITPropertyAccess.cpp
index 9fa29e2d9..99c038e55 100644
--- a/Source/JavaScriptCore/jit/JITPropertyAccess.cpp
+++ b/Source/JavaScriptCore/jit/JITPropertyAccess.cpp
@@ -526,8 +526,16 @@ void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure
}
}
- Call callTarget;
-
+ // If we succeed in all of our checks, and the code was optimizable, then make sure we
+ // decrement the rare case counter.
+#if ENABLE(VALUE_PROFILER)
+ if (m_codeBlock->canCompileWithDFG()) {
+ sub32(
+ TrustedImm32(1),
+ AbsoluteAddress(&m_codeBlock->rareCaseProfileForBytecodeOffset(stubInfo->bytecodeIndex)->m_counter));
+ }
+#endif
+
// emit a call only if storage realloc is needed
bool willNeedStorageRealloc = oldStructure->propertyStorageCapacity() != newStructure->propertyStorageCapacity();
if (willNeedStorageRealloc) {
diff --git a/Source/JavaScriptCore/jit/JITPropertyAccess32_64.cpp b/Source/JavaScriptCore/jit/JITPropertyAccess32_64.cpp
index 2c81a5ff6..1ee2915dc 100644
--- a/Source/JavaScriptCore/jit/JITPropertyAccess32_64.cpp
+++ b/Source/JavaScriptCore/jit/JITPropertyAccess32_64.cpp
@@ -493,6 +493,16 @@ void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure
testPrototype((*it)->storedPrototype(), failureCases);
}
+ // If we succeed in all of our checks, and the code was optimizable, then make sure we
+ // decrement the rare case counter.
+#if ENABLE(VALUE_PROFILER)
+ if (m_codeBlock->canCompileWithDFG()) {
+ sub32(
+ TrustedImm32(1),
+ AbsoluteAddress(&m_codeBlock->rareCaseProfileForBytecodeOffset(stubInfo->bytecodeIndex)->m_counter));
+ }
+#endif
+
// Reallocate property storage if needed.
Call callTarget;
bool willNeedStorageRealloc = oldStructure->propertyStorageCapacity() != newStructure->propertyStorageCapacity();
diff --git a/Source/JavaScriptCore/jit/JITStubs.cpp b/Source/JavaScriptCore/jit/JITStubs.cpp
index 386d0dfa1..a0a816505 100644
--- a/Source/JavaScriptCore/jit/JITStubs.cpp
+++ b/Source/JavaScriptCore/jit/JITStubs.cpp
@@ -1446,6 +1446,7 @@ DEFINE_STUB_FUNCTION(void, op_put_by_id_direct)
PutPropertySlot slot(callFrame->codeBlock()->isStrictMode());
JSValue baseValue = stackFrame.args[0].jsValue();
ASSERT(baseValue.isObject());
+
asObject(baseValue)->putDirect(callFrame->globalData(), ident, stackFrame.args[2].jsValue(), slot);
CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
@@ -1931,16 +1932,16 @@ DEFINE_STUB_FUNCTION(void, optimize_from_loop)
unsigned bytecodeIndex = stackFrame.args[0].int32();
#if ENABLE(JIT_VERBOSE_OSR)
- printf("Entered optimize_from_loop with executeCounter = %d, reoptimizationRetryCounter = %u, optimizationDelayCounter = %u\n", codeBlock->jitExecuteCounter(), codeBlock->reoptimizationRetryCounter(), codeBlock->optimizationDelayCounter());
+ dataLog("%p: Entered optimize_from_loop with executeCounter = %d, reoptimizationRetryCounter = %u, optimizationDelayCounter = %u\n", codeBlock, codeBlock->jitExecuteCounter(), codeBlock->reoptimizationRetryCounter(), codeBlock->optimizationDelayCounter());
#endif
if (codeBlock->hasOptimizedReplacement()) {
#if ENABLE(JIT_VERBOSE_OSR)
- printf("Considering loop OSR into %p(%p) with success/fail %u/%u.\n", codeBlock, codeBlock->replacement(), codeBlock->replacement()->speculativeSuccessCounter(), codeBlock->replacement()->speculativeFailCounter());
+ dataLog("Considering loop OSR into %p(%p) with success/fail %u/%u.\n", codeBlock, codeBlock->replacement(), codeBlock->replacement()->speculativeSuccessCounter(), codeBlock->replacement()->speculativeFailCounter());
#endif
if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
#if ENABLE(JIT_VERBOSE_OSR)
- printf("Triggering reoptimization of %p(%p) (in loop).\n", codeBlock, codeBlock->replacement());
+ dataLog("Triggering reoptimization of %p(%p) (in loop).\n", codeBlock, codeBlock->replacement());
#endif
codeBlock->reoptimize();
return;
@@ -1948,7 +1949,7 @@ DEFINE_STUB_FUNCTION(void, optimize_from_loop)
} else {
if (!codeBlock->shouldOptimizeNow()) {
#if ENABLE(JIT_VERBOSE_OSR)
- printf("Delaying optimization for %p (in loop) because of insufficient profiling.\n", codeBlock);
+ dataLog("Delaying optimization for %p (in loop) because of insufficient profiling.\n", codeBlock);
#endif
return;
}
@@ -1958,14 +1959,14 @@ DEFINE_STUB_FUNCTION(void, optimize_from_loop)
JSObject* error = codeBlock->compileOptimized(callFrame, scopeChain);
#if ENABLE(JIT_VERBOSE_OSR)
if (error)
- fprintf(stderr, "WARNING: optimized compilation from loop failed.\n");
+ dataLog("WARNING: optimized compilation from loop failed.\n");
#else
UNUSED_PARAM(error);
#endif
if (codeBlock->replacement() == codeBlock) {
#if ENABLE(JIT_VERBOSE_OSR)
- printf("Optimizing %p from loop failed.\n", codeBlock);
+ dataLog("Optimizing %p from loop failed.\n", codeBlock);
#endif
ASSERT(codeBlock->getJITType() == JITCode::BaselineJIT);
@@ -1979,7 +1980,7 @@ DEFINE_STUB_FUNCTION(void, optimize_from_loop)
if (void* address = DFG::prepareOSREntry(callFrame, optimizedCodeBlock, bytecodeIndex)) {
#if ENABLE(JIT_VERBOSE_OSR)
- printf("Optimizing %p from loop succeeded, performing OSR after a delay of %u.\n", codeBlock, codeBlock->optimizationDelayCounter());
+ dataLog("Optimizing %p from loop succeeded, performing OSR after a delay of %u.\n", codeBlock, codeBlock->optimizationDelayCounter());
#endif
codeBlock->optimizeSoon();
@@ -1989,7 +1990,7 @@ DEFINE_STUB_FUNCTION(void, optimize_from_loop)
}
#if ENABLE(JIT_VERBOSE_OSR)
- printf("Optimizing %p from loop succeeded, OSR failed, after a delay of %u.\n", codeBlock, codeBlock->optimizationDelayCounter());
+ dataLog("Optimizing %p from loop succeeded, OSR failed, after a delay of %u.\n", codeBlock, codeBlock->optimizationDelayCounter());
#endif
// Count the OSR failure as a speculation failure. If this happens a lot, then
@@ -1997,7 +1998,7 @@ DEFINE_STUB_FUNCTION(void, optimize_from_loop)
optimizedCodeBlock->countSpeculationFailure();
#if ENABLE(JIT_VERBOSE_OSR)
- printf("Encountered loop OSR failure into %p(%p) with success/fail %u/%u.\n", codeBlock, codeBlock->replacement(), codeBlock->replacement()->speculativeSuccessCounter(), codeBlock->replacement()->speculativeFailCounter());
+ dataLog("Encountered loop OSR failure into %p(%p) with success/fail %u/%u.\n", codeBlock, codeBlock->replacement(), codeBlock->replacement()->speculativeSuccessCounter(), codeBlock->replacement()->speculativeFailCounter());
#endif
// We are a lot more conservative about triggering reoptimization after OSR failure than
@@ -2010,7 +2011,7 @@ DEFINE_STUB_FUNCTION(void, optimize_from_loop)
// reoptimization trigger.
if (optimizedCodeBlock->shouldReoptimizeNow()) {
#if ENABLE(JIT_VERBOSE_OSR)
- printf("Triggering reoptimization of %p(%p) (in loop after OSR fail).\n", codeBlock, codeBlock->replacement());
+ dataLog("Triggering reoptimization of %p(%p) (in loop after OSR fail).\n", codeBlock, codeBlock->replacement());
#endif
codeBlock->reoptimize();
return;
@@ -2029,20 +2030,20 @@ DEFINE_STUB_FUNCTION(void, optimize_from_ret)
CodeBlock* codeBlock = callFrame->codeBlock();
#if ENABLE(JIT_VERBOSE_OSR)
- printf("Entered optimize_from_ret with executeCounter = %d, reoptimizationRetryCounter = %u, optimizationDelayCounter = %u\n", codeBlock->jitExecuteCounter(), codeBlock->reoptimizationRetryCounter(), codeBlock->optimizationDelayCounter());
+ dataLog("Entered optimize_from_ret with executeCounter = %d, reoptimizationRetryCounter = %u, optimizationDelayCounter = %u\n", codeBlock->jitExecuteCounter(), codeBlock->reoptimizationRetryCounter(), codeBlock->optimizationDelayCounter());
#endif
if (codeBlock->hasOptimizedReplacement()) {
#if ENABLE(JIT_VERBOSE_OSR)
- printf("Returning from old JIT call frame with optimized replacement %p(%p), with success/fail %u/%u", codeBlock, codeBlock->replacement(), codeBlock->replacement()->speculativeSuccessCounter(), codeBlock->replacement()->speculativeFailCounter());
+ dataLog("Returning from old JIT call frame with optimized replacement %p(%p), with success/fail %u/%u", codeBlock, codeBlock->replacement(), codeBlock->replacement()->speculativeSuccessCounter(), codeBlock->replacement()->speculativeFailCounter());
CallFrame* callerFrame = callFrame->callerFrame();
if (callerFrame)
- printf(", callerFrame = %p, returnPC = %p, caller code block = %p", callerFrame, callFrame->returnPC().value(), callerFrame->codeBlock());
- printf("\n");
+ dataLog(", callerFrame = %p, returnPC = %p, caller code block = %p", callerFrame, callFrame->returnPC().value(), callerFrame->codeBlock());
+ dataLog("\n");
#endif
if (codeBlock->replacement()->shouldReoptimizeNow()) {
#if ENABLE(JIT_VERBOSE_OSR)
- printf("Triggering reoptimization of %p(%p) (in return).\n", codeBlock, codeBlock->replacement());
+ dataLog("Triggering reoptimization of %p(%p) (in return).\n", codeBlock, codeBlock->replacement());
#endif
codeBlock->reoptimize();
}
@@ -2053,7 +2054,7 @@ DEFINE_STUB_FUNCTION(void, optimize_from_ret)
if (!codeBlock->shouldOptimizeNow()) {
#if ENABLE(JIT_VERBOSE_OSR)
- printf("Delaying optimization for %p (in return) because of insufficient profiling.\n", codeBlock);
+ dataLog("Delaying optimization for %p (in return) because of insufficient profiling.\n", codeBlock);
#endif
return;
}
@@ -2062,11 +2063,11 @@ DEFINE_STUB_FUNCTION(void, optimize_from_ret)
JSObject* error = codeBlock->compileOptimized(callFrame, scopeChain);
if (error)
- fprintf(stderr, "WARNING: optimized compilation from ret failed.\n");
+ dataLog("WARNING: optimized compilation from ret failed.\n");
if (codeBlock->replacement() == codeBlock) {
#if ENABLE(JIT_VERBOSE_OSR)
- printf("Optimizing %p from return failed.\n", codeBlock);
+ dataLog("Optimizing %p from return failed.\n", codeBlock);
#endif
ASSERT(codeBlock->getJITType() == JITCode::BaselineJIT);
@@ -2077,7 +2078,7 @@ DEFINE_STUB_FUNCTION(void, optimize_from_ret)
ASSERT(codeBlock->replacement()->getJITType() == JITCode::DFGJIT);
#if ENABLE(JIT_VERBOSE_OSR)
- printf("Optimizing %p from return succeeded after a delay of %u.\n", codeBlock, codeBlock->optimizationDelayCounter());
+ dataLog("Optimizing %p from return succeeded after a delay of %u.\n", codeBlock, codeBlock->optimizationDelayCounter());
#endif
codeBlock->optimizeSoon();
@@ -2186,45 +2187,13 @@ DEFINE_STUB_FUNCTION(void*, op_construct_jitCompile)
return result;
}
-inline CallFrame* arityCheckFor(CallFrame* callFrame, RegisterFile* registerFile, CodeSpecializationKind kind)
-{
- JSFunction* callee = asFunction(callFrame->callee());
- ASSERT(!callee->isHostFunction());
- CodeBlock* newCodeBlock = &callee->jsExecutable()->generatedBytecodeFor(kind);
- int argumentCountIncludingThis = callFrame->argumentCountIncludingThis();
-
- // This ensures enough space for the worst case scenario of zero arguments passed by the caller.
- if (!registerFile->grow(callFrame->registers() + newCodeBlock->numParameters() + newCodeBlock->m_numCalleeRegisters))
- return 0;
-
- ASSERT(argumentCountIncludingThis < newCodeBlock->numParameters());
-
- // Too few arguments -- copy call frame and arguments, then fill in missing arguments with undefined.
- size_t delta = newCodeBlock->numParameters() - argumentCountIncludingThis;
- Register* src = callFrame->registers();
- Register* dst = callFrame->registers() + delta;
-
- int i;
- int end = -CallFrame::offsetFor(argumentCountIncludingThis);
- for (i = -1; i >= end; --i)
- dst[i] = src[i];
-
- end -= delta;
- for ( ; i >= end; --i)
- dst[i] = jsUndefined();
-
- CallFrame* newCallFrame = CallFrame::create(dst);
- ASSERT((void*)newCallFrame <= registerFile->end());
- return newCallFrame;
-}
-
DEFINE_STUB_FUNCTION(void*, op_call_arityCheck)
{
STUB_INIT_STACK_FRAME(stackFrame);
CallFrame* callFrame = stackFrame.callFrame;
- CallFrame* newCallFrame = arityCheckFor(callFrame, stackFrame.registerFile, CodeForCall);
+ CallFrame* newCallFrame = CommonSlowPaths::arityCheckFor(callFrame, stackFrame.registerFile, CodeForCall);
if (!newCallFrame)
return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS, createStackOverflowError(callFrame->callerFrame()));
@@ -2237,7 +2206,7 @@ DEFINE_STUB_FUNCTION(void*, op_construct_arityCheck)
CallFrame* callFrame = stackFrame.callFrame;
- CallFrame* newCallFrame = arityCheckFor(callFrame, stackFrame.registerFile, CodeForConstruct);
+ CallFrame* newCallFrame = CommonSlowPaths::arityCheckFor(callFrame, stackFrame.registerFile, CodeForConstruct);
if (!newCallFrame)
return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS, createStackOverflowError(callFrame->callerFrame()));
@@ -2314,6 +2283,7 @@ DEFINE_STUB_FUNCTION(EncodedJSValue, op_call_NotJSFunction)
STUB_INIT_STACK_FRAME(stackFrame);
CallFrame* callFrame = stackFrame.callFrame;
+
JSValue callee = callFrame->calleeAsValue();
CallData callData;
@@ -3600,15 +3570,15 @@ MacroAssemblerCodeRef JITThunks::ctiStub(JSGlobalData* globalData, ThunkGenerato
NativeExecutable* JITThunks::hostFunctionStub(JSGlobalData* globalData, NativeFunction function, NativeFunction constructor)
{
- std::pair<HostFunctionStubMap::iterator, bool> entry = m_hostFunctionStubMap->add(function, Weak<NativeExecutable>());
- if (!*entry.first->second)
- entry.first->second.set(*globalData, NativeExecutable::create(*globalData, JIT::compileCTINativeCall(globalData, function), function, MacroAssemblerCodeRef::createSelfManagedCodeRef(ctiNativeConstruct()), constructor, NoIntrinsic));
- return entry.first->second.get();
+ std::pair<HostFunctionStubMap::iterator, bool> result = m_hostFunctionStubMap->add(function, PassWeak<NativeExecutable>());
+ if (!result.first->second)
+ result.first->second = PassWeak<NativeExecutable>(*globalData, NativeExecutable::create(*globalData, JIT::compileCTINativeCall(globalData, function), function, MacroAssemblerCodeRef::createSelfManagedCodeRef(ctiNativeConstruct()), constructor, NoIntrinsic));
+ return result.first->second.get();
}
NativeExecutable* JITThunks::hostFunctionStub(JSGlobalData* globalData, NativeFunction function, ThunkGenerator generator, Intrinsic intrinsic)
{
- std::pair<HostFunctionStubMap::iterator, bool> entry = m_hostFunctionStubMap->add(function, Weak<NativeExecutable>());
+ std::pair<HostFunctionStubMap::iterator, bool> entry = m_hostFunctionStubMap->add(function, PassWeak<NativeExecutable>());
if (!*entry.first->second) {
MacroAssemblerCodeRef code;
if (generator) {
@@ -3618,7 +3588,7 @@ NativeExecutable* JITThunks::hostFunctionStub(JSGlobalData* globalData, NativeFu
code = MacroAssemblerCodeRef();
} else
code = JIT::compileCTINativeCall(globalData, function);
- entry.first->second.set(*globalData, NativeExecutable::create(*globalData, code, function, MacroAssemblerCodeRef::createSelfManagedCodeRef(ctiNativeConstruct()), callHostFunctionAsConstructor, intrinsic));
+ entry.first->second = PassWeak<NativeExecutable>(*globalData, NativeExecutable::create(*globalData, code, function, MacroAssemblerCodeRef::createSelfManagedCodeRef(ctiNativeConstruct()), callHostFunctionAsConstructor, intrinsic));
}
return entry.first->second.get();
}
diff --git a/Source/JavaScriptCore/jit/JITStubs.h b/Source/JavaScriptCore/jit/JITStubs.h
index fe5f522e9..890d99747 100644
--- a/Source/JavaScriptCore/jit/JITStubs.h
+++ b/Source/JavaScriptCore/jit/JITStubs.h
@@ -37,8 +37,6 @@
#include "ThunkGenerators.h"
#include <wtf/HashMap.h>
-#if ENABLE(JIT)
-
namespace JSC {
struct StructureStubInfo;
@@ -263,6 +261,8 @@ namespace JSC {
#define JITSTACKFRAME_ARGS_INDEX (OBJECT_OFFSETOF(JITStackFrame, args) / sizeof(void*))
+#if ENABLE(JIT)
+
#define STUB_ARGS_DECLARATION void** args
#define STUB_ARGS (args)
@@ -456,8 +456,8 @@ extern "C" {
void* JIT_STUB cti_vm_throw(STUB_ARGS_DECLARATION);
} // extern "C"
-} // namespace JSC
-
#endif // ENABLE(JIT)
+} // namespace JSC
+
#endif // JITStubs_h
diff --git a/Source/JavaScriptCore/jit/JSInterfaceJIT.h b/Source/JavaScriptCore/jit/JSInterfaceJIT.h
index d54dedc1a..05d1ce5ad 100644
--- a/Source/JavaScriptCore/jit/JSInterfaceJIT.h
+++ b/Source/JavaScriptCore/jit/JSInterfaceJIT.h
@@ -26,8 +26,10 @@
#ifndef JSInterfaceJIT_h
#define JSInterfaceJIT_h
+#include "BytecodeConventions.h"
#include "JITCode.h"
#include "JITStubs.h"
+#include "JSString.h"
#include "JSValue.h"
#include "MacroAssembler.h"
#include "RegisterFile.h"