summaryrefslogtreecommitdiff
path: root/Source/JavaScriptCore/llint
diff options
context:
space:
mode:
authorSimon Hausmann <simon.hausmann@nokia.com>2012-02-24 16:36:50 +0100
committerSimon Hausmann <simon.hausmann@nokia.com>2012-02-24 16:36:50 +0100
commitad0d549d4cc13433f77c1ac8f0ab379c83d93f28 (patch)
treeb34b0daceb7c8e7fdde4b4ec43650ab7caadb0a9 /Source/JavaScriptCore/llint
parent03e12282df9aa1e1fb05a8b90f1cfc2e08764cec (diff)
downloadqtwebkit-ad0d549d4cc13433f77c1ac8f0ab379c83d93f28.tar.gz
Imported WebKit commit bb52bf3c0119e8a128cd93afe5572413a8617de9 (http://svn.webkit.org/repository/webkit/trunk@108790)
Diffstat (limited to 'Source/JavaScriptCore/llint')
-rw-r--r--Source/JavaScriptCore/llint/LLIntCommon.h49
-rw-r--r--Source/JavaScriptCore/llint/LLIntData.cpp116
-rw-r--r--Source/JavaScriptCore/llint/LLIntData.h93
-rw-r--r--Source/JavaScriptCore/llint/LLIntEntrypoints.cpp86
-rw-r--r--Source/JavaScriptCore/llint/LLIntEntrypoints.h64
-rw-r--r--Source/JavaScriptCore/llint/LLIntExceptions.cpp80
-rw-r--r--Source/JavaScriptCore/llint/LLIntExceptions.h66
-rw-r--r--Source/JavaScriptCore/llint/LLIntOfflineAsmConfig.h90
-rw-r--r--Source/JavaScriptCore/llint/LLIntOffsetsExtractor.cpp84
-rw-r--r--Source/JavaScriptCore/llint/LLIntSlowPaths.cpp1558
-rw-r--r--Source/JavaScriptCore/llint/LLIntSlowPaths.h171
-rw-r--r--Source/JavaScriptCore/llint/LLIntThunks.cpp81
-rw-r--r--Source/JavaScriptCore/llint/LLIntThunks.h52
-rw-r--r--Source/JavaScriptCore/llint/LowLevelInterpreter.asm2390
-rw-r--r--Source/JavaScriptCore/llint/LowLevelInterpreter.cpp38
-rw-r--r--Source/JavaScriptCore/llint/LowLevelInterpreter.h53
16 files changed, 5071 insertions, 0 deletions
diff --git a/Source/JavaScriptCore/llint/LLIntCommon.h b/Source/JavaScriptCore/llint/LLIntCommon.h
new file mode 100644
index 000000000..6b908eae2
--- /dev/null
+++ b/Source/JavaScriptCore/llint/LLIntCommon.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef LLIntCommon_h
+#define LLIntCommon_h
+
+// Print every instruction executed.
+#define LLINT_EXECUTION_TRACING 0
+
+// Print some information for some of the more subtle slow paths.
+#define LLINT_SLOW_PATH_TRACING 0
+
+// Disable inline allocation in the interpreter. This is great if you're changing
+// how the GC allocates.
+#define LLINT_ALWAYS_ALLOCATE_SLOW 0
+
+// Enable OSR into the JIT. Disabling this while the LLInt is enabled effectively
+// turns off all JIT'ing, since in LLInt's parlance, OSR subsumes any form of JIT
+// invocation.
+#if ENABLE(JIT)
+#define LLINT_OSR_TO_JIT 1
+#else
+#define LLINT_OSR_TO_JIT 0
+#endif
+
+#endif // LLIntCommon_h
+
diff --git a/Source/JavaScriptCore/llint/LLIntData.cpp b/Source/JavaScriptCore/llint/LLIntData.cpp
new file mode 100644
index 000000000..c0fe78142
--- /dev/null
+++ b/Source/JavaScriptCore/llint/LLIntData.cpp
@@ -0,0 +1,116 @@
+/*
+ * Copyright (C) 2011 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "LLIntData.h"
+
+#if ENABLE(LLINT)
+
+#include "BytecodeConventions.h"
+#include "CodeType.h"
+#include "Instruction.h"
+#include "LowLevelInterpreter.h"
+#include "Opcode.h"
+
+namespace JSC { namespace LLInt {
+
+Data::Data()
+ : m_exceptionInstructions(new Instruction[maxOpcodeLength + 1])
+ , m_opcodeMap(new Opcode[numOpcodeIDs])
+{
+ for (int i = 0; i < maxOpcodeLength + 1; ++i)
+ m_exceptionInstructions[i].u.pointer = bitwise_cast<void*>(&llint_throw_from_slow_path_trampoline);
+#define OPCODE_ENTRY(opcode, length) m_opcodeMap[opcode] = bitwise_cast<void*>(&llint_##opcode);
+ FOR_EACH_OPCODE_ID(OPCODE_ENTRY);
+#undef OPCODE_ENTRY
+}
+
+#if COMPILER(CLANG)
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wmissing-noreturn"
+#endif
+void Data::performAssertions(JSGlobalData& globalData)
+{
+ UNUSED_PARAM(globalData);
+
+ // Assertions to match LowLevelInterpreter.asm. If you change any of this code, be
+ // prepared to change LowLevelInterpreter.asm as well!!
+ ASSERT(RegisterFile::CallFrameHeaderSize * 8 == 48);
+ ASSERT(RegisterFile::ArgumentCount * 8 == -48);
+ ASSERT(RegisterFile::CallerFrame * 8 == -40);
+ ASSERT(RegisterFile::Callee * 8 == -32);
+ ASSERT(RegisterFile::ScopeChain * 8 == -24);
+ ASSERT(RegisterFile::ReturnPC * 8 == -16);
+ ASSERT(RegisterFile::CodeBlock * 8 == -8);
+ ASSERT(CallFrame::argumentOffsetIncludingThis(0) == -RegisterFile::CallFrameHeaderSize - 1);
+#if CPU(BIG_ENDIAN)
+ ASSERT(OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag) == 0);
+ ASSERT(OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload) == 4);
+#else
+ ASSERT(OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag) == 4);
+ ASSERT(OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload) == 0);
+#endif
+ ASSERT(JSValue::Int32Tag == -1);
+ ASSERT(JSValue::BooleanTag == -2);
+ ASSERT(JSValue::NullTag == -3);
+ ASSERT(JSValue::UndefinedTag == -4);
+ ASSERT(JSValue::CellTag == -5);
+ ASSERT(JSValue::EmptyValueTag == -6);
+ ASSERT(JSValue::DeletedValueTag == -7);
+ ASSERT(JSValue::LowestTag == -7);
+ ASSERT(StringType == 5);
+ ASSERT(ObjectType == 13);
+ ASSERT(MasqueradesAsUndefined == 1);
+ ASSERT(ImplementsHasInstance == 2);
+ ASSERT(ImplementsDefaultHasInstance == 8);
+ ASSERT(&globalData.heap.allocatorForObjectWithoutDestructor(sizeof(JSFinalObject)) - &globalData.heap.firstAllocatorWithoutDestructors() == 3);
+ ASSERT(FirstConstantRegisterIndex == 0x40000000);
+ ASSERT(GlobalCode == 0);
+ ASSERT(EvalCode == 1);
+ ASSERT(FunctionCode == 2);
+
+ // FIXME: make these assertions less horrible.
+#if !ASSERT_DISABLED
+ Vector<int> testVector;
+ testVector.resize(42);
+ ASSERT(bitwise_cast<size_t*>(&testVector)[0] == 42);
+ ASSERT(bitwise_cast<int**>(&testVector)[1] == testVector.begin());
+#endif
+
+ ASSERT(StringImpl::s_hashFlag8BitBuffer == 64);
+}
+#if COMPILER(CLANG)
+#pragma clang diagnostic pop
+#endif
+
+Data::~Data()
+{
+ delete[] m_exceptionInstructions;
+ delete[] m_opcodeMap;
+}
+
+} } // namespace JSC::LLInt
+
+#endif // ENABLE(LLINT)
diff --git a/Source/JavaScriptCore/llint/LLIntData.h b/Source/JavaScriptCore/llint/LLIntData.h
new file mode 100644
index 000000000..ba8daedf1
--- /dev/null
+++ b/Source/JavaScriptCore/llint/LLIntData.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright (C) 2011 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef LLIntData_h
+#define LLIntData_h
+
+#include "Opcode.h"
+#include <wtf/Platform.h>
+
+namespace JSC {
+
+class JSGlobalData;
+struct Instruction;
+
+namespace LLInt {
+
+#if ENABLE(LLINT)
+class Data {
+public:
+ Data();
+ ~Data();
+
+ void performAssertions(JSGlobalData&);
+
+ Instruction* exceptionInstructions()
+ {
+ return m_exceptionInstructions;
+ }
+
+ Opcode* opcodeMap()
+ {
+ return m_opcodeMap;
+ }
+private:
+ Instruction* m_exceptionInstructions;
+ Opcode* m_opcodeMap;
+};
+#else // ENABLE(LLINT)
+
+#if COMPILER(CLANG)
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wmissing-noreturn"
+#endif
+
+class Data {
+public:
+ void performAssertions(JSGlobalData&) { }
+
+ Instruction* exceptionInstructions()
+ {
+ ASSERT_NOT_REACHED();
+ return 0;
+ }
+
+ Opcode* opcodeMap()
+ {
+ ASSERT_NOT_REACHED();
+ return 0;
+ }
+};
+
+#if COMPILER(CLANG)
+#pragma clang diagnostic pop
+#endif
+
+#endif // ENABLE(LLINT)
+
+} } // namespace JSC::LLInt
+
+#endif // LLIntData_h
+
diff --git a/Source/JavaScriptCore/llint/LLIntEntrypoints.cpp b/Source/JavaScriptCore/llint/LLIntEntrypoints.cpp
new file mode 100644
index 000000000..f610f4b4c
--- /dev/null
+++ b/Source/JavaScriptCore/llint/LLIntEntrypoints.cpp
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "LLIntEntrypoints.h"
+
+#if ENABLE(LLINT)
+
+#include "JITCode.h"
+#include "JSGlobalData.h"
+#include "LLIntThunks.h"
+#include "LowLevelInterpreter.h"
+
+namespace JSC { namespace LLInt {
+
+void getFunctionEntrypoint(JSGlobalData& globalData, CodeSpecializationKind kind, JITCode& jitCode, MacroAssemblerCodePtr& arityCheck)
+{
+ if (!globalData.canUseJIT()) {
+ if (kind == CodeForCall) {
+ jitCode = JITCode::HostFunction(MacroAssemblerCodeRef::createSelfManagedCodeRef(MacroAssemblerCodePtr(bitwise_cast<void*>(&llint_function_for_call_prologue))));
+ arityCheck = MacroAssemblerCodePtr(bitwise_cast<void*>(&llint_function_for_call_arity_check));
+ return;
+ }
+
+ ASSERT(kind == CodeForConstruct);
+ jitCode = JITCode::HostFunction(MacroAssemblerCodeRef::createSelfManagedCodeRef(MacroAssemblerCodePtr(bitwise_cast<void*>(&llint_function_for_construct_prologue))));
+ arityCheck = MacroAssemblerCodePtr(bitwise_cast<void*>(&llint_function_for_construct_arity_check));
+ return;
+ }
+
+ if (kind == CodeForCall) {
+ jitCode = JITCode(globalData.getCTIStub(functionForCallEntryThunkGenerator), JITCode::InterpreterThunk);
+ arityCheck = globalData.getCTIStub(functionForCallArityCheckThunkGenerator).code();
+ return;
+ }
+
+ ASSERT(kind == CodeForConstruct);
+ jitCode = JITCode(globalData.getCTIStub(functionForConstructEntryThunkGenerator), JITCode::InterpreterThunk);
+ arityCheck = globalData.getCTIStub(functionForConstructArityCheckThunkGenerator).code();
+}
+
+void getEvalEntrypoint(JSGlobalData& globalData, JITCode& jitCode)
+{
+ if (!globalData.canUseJIT()) {
+ jitCode = JITCode::HostFunction(MacroAssemblerCodeRef::createSelfManagedCodeRef(MacroAssemblerCodePtr(bitwise_cast<void*>(&llint_eval_prologue))));
+ return;
+ }
+
+ jitCode = JITCode(globalData.getCTIStub(evalEntryThunkGenerator), JITCode::InterpreterThunk);
+}
+
+void getProgramEntrypoint(JSGlobalData& globalData, JITCode& jitCode)
+{
+ if (!globalData.canUseJIT()) {
+ jitCode = JITCode::HostFunction(MacroAssemblerCodeRef::createSelfManagedCodeRef(MacroAssemblerCodePtr(bitwise_cast<void*>(&llint_program_prologue))));
+ return;
+ }
+
+ jitCode = JITCode(globalData.getCTIStub(programEntryThunkGenerator), JITCode::InterpreterThunk);
+}
+
+} } // namespace JSC::LLInt
+
+#endif // ENABLE(LLINT)
diff --git a/Source/JavaScriptCore/llint/LLIntEntrypoints.h b/Source/JavaScriptCore/llint/LLIntEntrypoints.h
new file mode 100644
index 000000000..dd7c27798
--- /dev/null
+++ b/Source/JavaScriptCore/llint/LLIntEntrypoints.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef LLIntEntrypoints_h
+#define LLIntEntrypoints_h
+
+#include <wtf/Platform.h>
+
+#if ENABLE(LLINT)
+
+#include "CodeSpecializationKind.h"
+
+namespace JSC {
+
+class EvalCodeBlock;
+class JITCode;
+class JSGlobalData;
+class MacroAssemblerCodePtr;
+class MacroAssemblerCodeRef;
+class ProgramCodeBlock;
+
+namespace LLInt {
+
+void getFunctionEntrypoint(JSGlobalData&, CodeSpecializationKind, JITCode&, MacroAssemblerCodePtr& arityCheck);
+void getEvalEntrypoint(JSGlobalData&, JITCode&);
+void getProgramEntrypoint(JSGlobalData&, JITCode&);
+
+inline void getEntrypoint(JSGlobalData& globalData, EvalCodeBlock*, JITCode& jitCode)
+{
+ getEvalEntrypoint(globalData, jitCode);
+}
+
+inline void getEntrypoint(JSGlobalData& globalData, ProgramCodeBlock*, JITCode& jitCode)
+{
+ getProgramEntrypoint(globalData, jitCode);
+}
+
+} } // namespace JSC::LLInt
+
+#endif // ENABLE(LLINT)
+
+#endif // LLIntEntrypoints_h
diff --git a/Source/JavaScriptCore/llint/LLIntExceptions.cpp b/Source/JavaScriptCore/llint/LLIntExceptions.cpp
new file mode 100644
index 000000000..a7d1a965a
--- /dev/null
+++ b/Source/JavaScriptCore/llint/LLIntExceptions.cpp
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2011 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "LLIntExceptions.h"
+
+#if ENABLE(LLINT)
+
+#include "CallFrame.h"
+#include "CodeBlock.h"
+#include "Instruction.h"
+#include "JITExceptions.h"
+#include "LLIntCommon.h"
+#include "LowLevelInterpreter.h"
+
+namespace JSC { namespace LLInt {
+
+void interpreterThrowInCaller(ExecState* exec, ReturnAddressPtr pc)
+{
+ JSGlobalData* globalData = &exec->globalData();
+#if LLINT_SLOW_PATH_TRACING
+ dataLog("Throwing exception %s.\n", globalData->exception.description());
+#endif
+ genericThrow(
+ globalData, exec, globalData->exception,
+ exec->codeBlock()->bytecodeOffset(exec, pc));
+}
+
+Instruction* returnToThrowForThrownException(ExecState* exec)
+{
+ return exec->globalData().llintData.exceptionInstructions();
+}
+
+Instruction* returnToThrow(ExecState* exec, Instruction* pc)
+{
+ JSGlobalData* globalData = &exec->globalData();
+#if LLINT_SLOW_PATH_TRACING
+ dataLog("Throwing exception %s (returnToThrow).\n", globalData->exception.description());
+#endif
+ genericThrow(globalData, exec, globalData->exception, pc - exec->codeBlock()->instructions().begin());
+
+ return globalData->llintData.exceptionInstructions();
+}
+
+void* callToThrow(ExecState* exec, Instruction* pc)
+{
+ JSGlobalData* globalData = &exec->globalData();
+#if LLINT_SLOW_PATH_TRACING
+ dataLog("Throwing exception %s (callToThrow).\n", globalData->exception.description());
+#endif
+ genericThrow(globalData, exec, globalData->exception, pc - exec->codeBlock()->instructions().begin());
+
+ return bitwise_cast<void*>(&llint_throw_during_call_trampoline);
+}
+
+} } // namespace JSC::LLInt
+
+#endif // ENABLE(LLINT)
diff --git a/Source/JavaScriptCore/llint/LLIntExceptions.h b/Source/JavaScriptCore/llint/LLIntExceptions.h
new file mode 100644
index 000000000..3baa3f4a5
--- /dev/null
+++ b/Source/JavaScriptCore/llint/LLIntExceptions.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2011 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef LLIntExceptions_h
+#define LLIntExceptions_h
+
+#include <wtf/Platform.h>
+#include <wtf/StdLibExtras.h>
+
+#if ENABLE(LLINT)
+
+#include "MacroAssemblerCodeRef.h"
+
+namespace JSC {
+
+class ExecState;
+struct Instruction;
+
+namespace LLInt {
+
+// Throw the currently active exception in the context of the caller's call frame.
+void interpreterThrowInCaller(ExecState* callerFrame, ReturnAddressPtr);
+
+// Tells you where to jump to if you want to return-to-throw, after you've already
+// set up all information needed to throw the exception.
+Instruction* returnToThrowForThrownException(ExecState*);
+
+// Saves the current PC in the global data for safe-keeping, and gives you a PC
+// that you can tell the interpreter to go to, which when advanced between 1
+// and 9 slots will give you an "instruction" that threads to the interpreter's
+// exception handler. Note that if you give it the PC for exception handling,
+// it's smart enough to just return that PC without doing anything else; this
+// lets you thread exception handling through common helper functions used by
+// other helpers.
+Instruction* returnToThrow(ExecState*, Instruction*);
+
+// Use this when you're throwing to a call thunk.
+void* callToThrow(ExecState*, Instruction*);
+
+} } // namespace JSC::LLInt
+
+#endif // ENABLE(LLINT)
+
+#endif // LLIntExceptions_h
diff --git a/Source/JavaScriptCore/llint/LLIntOfflineAsmConfig.h b/Source/JavaScriptCore/llint/LLIntOfflineAsmConfig.h
new file mode 100644
index 000000000..9fe86fac4
--- /dev/null
+++ b/Source/JavaScriptCore/llint/LLIntOfflineAsmConfig.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef LLIntOfflineAsmConfig_h
+#define LLIntOfflineAsmConfig_h
+
+#include "LLIntCommon.h"
+#include <wtf/Assertions.h>
+#include <wtf/InlineASM.h>
+#include <wtf/Platform.h>
+
+#if CPU(X86)
+#define OFFLINE_ASM_X86 1
+#else
+#define OFFLINE_ASM_X86 0
+#endif
+
+#if CPU(ARM_THUMB2)
+#define OFFLINE_ASM_ARMv7 1
+#else
+#define OFFLINE_ASM_ARMv7 0
+#endif
+
+#if !ASSERT_DISABLED
+#define OFFLINE_ASM_ASSERT_ENABLED 1
+#else
+#define OFFLINE_ASM_ASSERT_ENABLED 0
+#endif
+
+#if CPU(BIG_ENDIAN)
+#define OFFLINE_ASM_BIG_ENDIAN 1
+#else
+#define OFFLINE_ASM_BIG_ENDIAN 0
+#endif
+
+#if LLINT_OSR_TO_JIT
+#define OFFLINE_ASM_JIT_ENABLED 1
+#else
+#define OFFLINE_ASM_JIT_ENABLED 0
+#endif
+
+#if LLINT_EXECUTION_TRACING
+#define OFFLINE_ASM_EXECUTION_TRACING 1
+#else
+#define OFFLINE_ASM_EXECUTION_TRACING 0
+#endif
+
+#if LLINT_ALWAYS_ALLOCATE_SLOW
+#define OFFLINE_ASM_ALWAYS_ALLOCATE_SLOW 1
+#else
+#define OFFLINE_ASM_ALWAYS_ALLOCATE_SLOW 0
+#endif
+
+#if CPU(ARM_THUMB2)
+#define OFFLINE_ASM_GLOBAL_LABEL(label) \
+ ".globl " SYMBOL_STRING(label) "\n" \
+ HIDE_SYMBOL(name) "\n" \
+ ".thumb\n" \
+ ".thumb_func " THUMB_FUNC_PARAM(label) "\n" \
+ SYMBOL_STRING(label) ":\n"
+#else
+#define OFFLINE_ASM_GLOBAL_LABEL(label) \
+ ".globl " SYMBOL_STRING(label) "\n" \
+ HIDE_SYMBOL(name) "\n" \
+ SYMBOL_STRING(label) ":\n"
+#endif
+
+#endif // LLIntOfflineAsmConfig_h
diff --git a/Source/JavaScriptCore/llint/LLIntOffsetsExtractor.cpp b/Source/JavaScriptCore/llint/LLIntOffsetsExtractor.cpp
new file mode 100644
index 000000000..5b76cd521
--- /dev/null
+++ b/Source/JavaScriptCore/llint/LLIntOffsetsExtractor.cpp
@@ -0,0 +1,84 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+
+#include "CodeBlock.h"
+#include "Executable.h"
+#include "Heap.h"
+#include "Interpreter.h"
+#include "JITStubs.h"
+#include "JSArray.h"
+#include "JSCell.h"
+#include "JSFunction.h"
+#include "JSGlobalData.h"
+#include "JSGlobalObject.h"
+#include "JSObject.h"
+#include "JSPropertyNameIterator.h"
+#include "JSString.h"
+#include "JSTypeInfo.h"
+#include "JSVariableObject.h"
+#include "JumpTable.h"
+#include "LLIntOfflineAsmConfig.h"
+#include "MarkedSpace.h"
+#include "RegisterFile.h"
+#include "ScopeChain.h"
+#include "Structure.h"
+#include "StructureChain.h"
+#include "ValueProfile.h"
+#include <wtf/text/StringImpl.h>
+
+namespace JSC {
+
+#define OFFLINE_ASM_OFFSETOF(clazz, field) OBJECT_OFFSETOF(clazz, field)
+
+class LLIntOffsetsExtractor {
+public:
+ static const unsigned* dummy();
+};
+
+const unsigned* LLIntOffsetsExtractor::dummy()
+{
+// This is a file generated by offlineasm/generate_offsets_extractor.rb, and contains code
+// to create a table of offsets, sizes, and a header identifying what combination of
+// Platform.h macros we have set. We include it inside of a method on LLIntOffsetsExtractor
+// because the fields whose offsets we're extracting are mostly private. So we make their
+// classes friends with LLIntOffsetsExtractor, and include the header here, to get the C++
+// compiler to kindly step aside and yield to our best intentions.
+#include "LLIntDesiredOffsets.h"
+ return extractorTable;
+}
+
+} // namespace JSC
+
+int main(int, char**)
+{
+ // Out of an abundance of caution, make sure that LLIntOffsetsExtractor::dummy() is live,
+ // and the extractorTable is live, too.
+ printf("%p\n", JSC::LLIntOffsetsExtractor::dummy());
+ return 0;
+}
+
+
diff --git a/Source/JavaScriptCore/llint/LLIntSlowPaths.cpp b/Source/JavaScriptCore/llint/LLIntSlowPaths.cpp
new file mode 100644
index 000000000..3203d25d2
--- /dev/null
+++ b/Source/JavaScriptCore/llint/LLIntSlowPaths.cpp
@@ -0,0 +1,1558 @@
+/*
+ * Copyright (C) 2011, 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "LLIntSlowPaths.h"
+
+#if ENABLE(LLINT)
+
+#include "Arguments.h"
+#include "CallFrame.h"
+#include "CommonSlowPaths.h"
+#include "GetterSetter.h"
+#include "HostCallReturnValue.h"
+#include "Interpreter.h"
+#include "JIT.h"
+#include "JITDriver.h"
+#include "JSActivation.h"
+#include "JSByteArray.h"
+#include "JSGlobalObjectFunctions.h"
+#include "JSPropertyNameIterator.h"
+#include "JSStaticScopeObject.h"
+#include "JSString.h"
+#include "JSValue.h"
+#include "LLIntCommon.h"
+#include "LLIntExceptions.h"
+#include "LowLevelInterpreter.h"
+#include "Operations.h"
+
+namespace JSC { namespace LLInt {
+
+#define LLINT_BEGIN_NO_SET_PC() \
+ JSGlobalData& globalData = exec->globalData(); \
+ NativeCallFrameTracer tracer(&globalData, exec)
+
+#define LLINT_SET_PC_FOR_STUBS() \
+ exec->setCurrentVPC(pc + 1)
+
+#define LLINT_BEGIN() \
+ LLINT_BEGIN_NO_SET_PC(); \
+ LLINT_SET_PC_FOR_STUBS()
+
+#define LLINT_OP(index) (exec->uncheckedR(pc[index].u.operand))
+#define LLINT_OP_C(index) (exec->r(pc[index].u.operand))
+
+#define LLINT_RETURN_TWO(first, second) do { \
+ union { \
+ struct { \
+ void* a; \
+ void* b; \
+ } pair; \
+ int64_t i; \
+ } __rt_u; \
+ __rt_u.pair.a = first; \
+ __rt_u.pair.b = second; \
+ return __rt_u.i; \
+ } while (false)
+
+#define LLINT_END_IMPL() LLINT_RETURN_TWO(pc, exec)
+
+#define LLINT_THROW(exceptionToThrow) do { \
+ globalData.exception = (exceptionToThrow); \
+ pc = returnToThrow(exec, pc); \
+ LLINT_END_IMPL(); \
+ } while (false)
+
+#define LLINT_CHECK_EXCEPTION() do { \
+ if (UNLIKELY(globalData.exception)) { \
+ pc = returnToThrow(exec, pc); \
+ LLINT_END_IMPL(); \
+ } \
+ } while (false)
+
+#define LLINT_END() do { \
+ LLINT_CHECK_EXCEPTION(); \
+ LLINT_END_IMPL(); \
+ } while (false)
+
+#define LLINT_BRANCH(opcode, condition) do { \
+ bool __b_condition = (condition); \
+ LLINT_CHECK_EXCEPTION(); \
+ if (__b_condition) \
+ pc += pc[OPCODE_LENGTH(opcode) - 1].u.operand; \
+ else \
+ pc += OPCODE_LENGTH(opcode); \
+ LLINT_END_IMPL(); \
+ } while (false)
+
+#define LLINT_RETURN(value) do { \
+ JSValue __r_returnValue = (value); \
+ LLINT_CHECK_EXCEPTION(); \
+ LLINT_OP(1) = __r_returnValue; \
+ LLINT_END_IMPL(); \
+ } while (false)
+
+#define LLINT_RETURN_PROFILED(opcode, value) do { \
+ JSValue __rp_returnValue = (value); \
+ LLINT_CHECK_EXCEPTION(); \
+ LLINT_OP(1) = __rp_returnValue; \
+ pc[OPCODE_LENGTH(opcode) - 1].u.profile->m_buckets[0] = \
+ JSValue::encode(__rp_returnValue); \
+ LLINT_END_IMPL(); \
+ } while (false)
+
+#define LLINT_CALL_END_IMPL(exec, callTarget) LLINT_RETURN_TWO((callTarget), (exec))
+
+#define LLINT_CALL_THROW(exec, pc, exceptionToThrow) do { \
+ ExecState* __ct_exec = (exec); \
+ Instruction* __ct_pc = (pc); \
+ globalData.exception = (exceptionToThrow); \
+ LLINT_CALL_END_IMPL(__ct_exec, callToThrow(__ct_exec, __ct_pc)); \
+ } while (false)
+
+#define LLINT_CALL_CHECK_EXCEPTION(exec, pc) do { \
+ ExecState* __cce_exec = (exec); \
+ Instruction* __cce_pc = (pc); \
+ if (UNLIKELY(globalData.exception)) \
+ LLINT_CALL_END_IMPL(__cce_exec, callToThrow(__cce_exec, __cce_pc)); \
+ } while (false)
+
+#define LLINT_CALL_RETURN(exec, pc, callTarget) do { \
+ ExecState* __cr_exec = (exec); \
+ Instruction* __cr_pc = (pc); \
+ void* __cr_callTarget = (callTarget); \
+ LLINT_CALL_CHECK_EXCEPTION(__cr_exec->callerFrame(), __cr_pc); \
+ LLINT_CALL_END_IMPL(__cr_exec, __cr_callTarget); \
+ } while (false)
+
+extern "C" SlowPathReturnType llint_trace_operand(ExecState* exec, Instruction* pc, int fromWhere, int operand)
+{
+ LLINT_BEGIN();
+ dataLog("%p / %p: executing bc#%zu, op#%u: Trace(%d): %d: %d\n",
+ exec->codeBlock(),
+ exec,
+ static_cast<intptr_t>(pc - exec->codeBlock()->instructions().begin()),
+ exec->globalData().interpreter->getOpcodeID(pc[0].u.opcode),
+ fromWhere,
+ operand,
+ pc[operand].u.operand);
+ LLINT_END();
+}
+
+extern "C" SlowPathReturnType llint_trace_value(ExecState* exec, Instruction* pc, int fromWhere, int operand)
+{
+ LLINT_BEGIN();
+ JSValue value = LLINT_OP_C(operand).jsValue();
+ union {
+ struct {
+ uint32_t tag;
+ uint32_t payload;
+ } bits;
+ EncodedJSValue asValue;
+ } u;
+ u.asValue = JSValue::encode(value);
+ dataLog("%p / %p: executing bc#%zu, op#%u: Trace(%d): %d: %d: %08x:%08x: %s\n",
+ exec->codeBlock(),
+ exec,
+ static_cast<intptr_t>(pc - exec->codeBlock()->instructions().begin()),
+ exec->globalData().interpreter->getOpcodeID(pc[0].u.opcode),
+ fromWhere,
+ operand,
+ pc[operand].u.operand,
+ u.bits.tag,
+ u.bits.payload,
+ value.description());
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(trace_prologue)
+{
+ LLINT_BEGIN();
+ dataLog("%p / %p: in prologue.\n", exec->codeBlock(), exec);
+ LLINT_END();
+}
+
+static void traceFunctionPrologue(ExecState* exec, const char* comment, CodeSpecializationKind kind)
+{
+ JSFunction* callee = asFunction(exec->callee());
+ FunctionExecutable* executable = callee->jsExecutable();
+ CodeBlock* codeBlock = &executable->generatedBytecodeFor(kind);
+ dataLog("%p / %p: in %s of function %p, executable %p; numVars = %u, numParameters = %u, numCalleeRegisters = %u, caller = %p.\n",
+ codeBlock, exec, comment, callee, executable,
+ codeBlock->m_numVars, codeBlock->numParameters(), codeBlock->m_numCalleeRegisters,
+ exec->callerFrame());
+}
+
+LLINT_SLOW_PATH_DECL(trace_prologue_function_for_call)
+{
+ LLINT_BEGIN();
+ traceFunctionPrologue(exec, "call prologue", CodeForCall);
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(trace_prologue_function_for_construct)
+{
+ LLINT_BEGIN();
+ traceFunctionPrologue(exec, "construct prologue", CodeForConstruct);
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(trace_arityCheck_for_call)
+{
+ LLINT_BEGIN();
+ traceFunctionPrologue(exec, "call arity check", CodeForCall);
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(trace_arityCheck_for_construct)
+{
+ LLINT_BEGIN();
+ traceFunctionPrologue(exec, "construct arity check", CodeForConstruct);
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(trace)
+{
+ LLINT_BEGIN();
+ dataLog("%p / %p: executing bc#%zu, %s, scope %p\n",
+ exec->codeBlock(),
+ exec,
+ static_cast<intptr_t>(pc - exec->codeBlock()->instructions().begin()),
+ opcodeNames[exec->globalData().interpreter->getOpcodeID(pc[0].u.opcode)],
+ exec->scopeChain());
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(special_trace)
+{
+ LLINT_BEGIN();
+ dataLog("%p / %p: executing special case bc#%zu, op#%u, return PC is %p\n",
+ exec->codeBlock(),
+ exec,
+ static_cast<intptr_t>(pc - exec->codeBlock()->instructions().begin()),
+ exec->globalData().interpreter->getOpcodeID(pc[0].u.opcode),
+ exec->returnPC().value());
+ LLINT_END();
+}
+
+inline bool shouldJIT(ExecState* exec)
+{
+ // You can modify this to turn off JITting without rebuilding the world.
+ return exec->globalData().canUseJIT();
+}
+
+enum EntryKind { Prologue, ArityCheck };
+static SlowPathReturnType entryOSR(ExecState* exec, Instruction* pc, CodeBlock* codeBlock, const char *name, EntryKind kind)
+{
+#if ENABLE(JIT_VERBOSE_OSR)
+ dataLog("%p: Entered %s with executeCounter = %d\n", codeBlock, name, codeBlock->llintExecuteCounter());
+#endif
+
+ if (!shouldJIT(exec)) {
+ codeBlock->dontJITAnytimeSoon();
+ LLINT_RETURN_TWO(0, exec);
+ }
+ if (!codeBlock->jitCompile(exec->globalData())) {
+#if ENABLE(JIT_VERBOSE_OSR)
+ dataLog(" Code was already compiled.\n");
+#endif
+ }
+ codeBlock->jitSoon();
+ if (kind == Prologue)
+ LLINT_RETURN_TWO(codeBlock->getJITCode().executableAddressAtOffset(0), exec);
+ ASSERT(kind == ArityCheck);
+ LLINT_RETURN_TWO(codeBlock->getJITCodeWithArityCheck().executableAddress(), exec);
+}
+
+LLINT_SLOW_PATH_DECL(entry_osr)
+{
+ return entryOSR(exec, pc, exec->codeBlock(), "entry_osr", Prologue);
+}
+
+LLINT_SLOW_PATH_DECL(entry_osr_function_for_call)
+{
+ return entryOSR(exec, pc, &asFunction(exec->callee())->jsExecutable()->generatedBytecodeFor(CodeForCall), "entry_osr_function_for_call", Prologue);
+}
+
+LLINT_SLOW_PATH_DECL(entry_osr_function_for_construct)
+{
+ return entryOSR(exec, pc, &asFunction(exec->callee())->jsExecutable()->generatedBytecodeFor(CodeForConstruct), "entry_osr_function_for_construct", Prologue);
+}
+
+LLINT_SLOW_PATH_DECL(entry_osr_function_for_call_arityCheck)
+{
+ return entryOSR(exec, pc, &asFunction(exec->callee())->jsExecutable()->generatedBytecodeFor(CodeForCall), "entry_osr_function_for_call_arityCheck", ArityCheck);
+}
+
+LLINT_SLOW_PATH_DECL(entry_osr_function_for_construct_arityCheck)
+{
+ return entryOSR(exec, pc, &asFunction(exec->callee())->jsExecutable()->generatedBytecodeFor(CodeForConstruct), "entry_osr_function_for_construct_arityCheck", ArityCheck);
+}
+
+LLINT_SLOW_PATH_DECL(loop_osr)
+{
+ CodeBlock* codeBlock = exec->codeBlock();
+
+#if ENABLE(JIT_VERBOSE_OSR)
+ dataLog("%p: Entered loop_osr with executeCounter = %d\n", codeBlock, codeBlock->llintExecuteCounter());
+#endif
+
+ if (!shouldJIT(exec)) {
+ codeBlock->dontJITAnytimeSoon();
+ LLINT_RETURN_TWO(0, exec);
+ }
+
+ if (!codeBlock->jitCompile(exec->globalData())) {
+#if ENABLE(JIT_VERBOSE_OSR)
+ dataLog(" Code was already compiled.\n");
+#endif
+ }
+ codeBlock->jitSoon();
+
+ ASSERT(codeBlock->getJITType() == JITCode::BaselineJIT);
+
+ Vector<BytecodeAndMachineOffset> map;
+ codeBlock->jitCodeMap()->decode(map);
+ BytecodeAndMachineOffset* mapping = binarySearch<BytecodeAndMachineOffset, unsigned, BytecodeAndMachineOffset::getBytecodeIndex>(map.begin(), map.size(), pc - codeBlock->instructions().begin());
+ ASSERT(mapping);
+ ASSERT(mapping->m_bytecodeIndex == static_cast<unsigned>(pc - codeBlock->instructions().begin()));
+
+ void* jumpTarget = codeBlock->getJITCode().executableAddressAtOffset(mapping->m_machineCodeOffset);
+ ASSERT(jumpTarget);
+
+ LLINT_RETURN_TWO(jumpTarget, exec);
+}
+
+LLINT_SLOW_PATH_DECL(replace)
+{
+ CodeBlock* codeBlock = exec->codeBlock();
+
+#if ENABLE(JIT_VERBOSE_OSR)
+ dataLog("%p: Entered replace with executeCounter = %d\n", codeBlock, codeBlock->llintExecuteCounter());
+#endif
+
+ if (shouldJIT(exec)) {
+ if (!codeBlock->jitCompile(exec->globalData())) {
+#if ENABLE(JIT_VERBOSE_OSR)
+ dataLog(" Code was already compiled.\n");
+#endif
+ }
+ codeBlock->jitSoon();
+ } else
+ codeBlock->dontJITAnytimeSoon();
+ LLINT_END_IMPL();
+}
+
+LLINT_SLOW_PATH_DECL(register_file_check)
+{
+ LLINT_BEGIN();
+#if LLINT_SLOW_PATH_TRACING
+ dataLog("Checking stack height with exec = %p.\n", exec);
+ dataLog("CodeBlock = %p.\n", exec->codeBlock());
+ dataLog("Num callee registers = %u.\n", exec->codeBlock()->m_numCalleeRegisters);
+ dataLog("Num vars = %u.\n", exec->codeBlock()->m_numVars);
+ dataLog("Current end is at %p.\n", exec->globalData().interpreter->registerFile().end());
+#endif
+ ASSERT(&exec->registers()[exec->codeBlock()->m_numCalleeRegisters] > exec->globalData().interpreter->registerFile().end());
+ if (UNLIKELY(!globalData.interpreter->registerFile().grow(&exec->registers()[exec->codeBlock()->m_numCalleeRegisters]))) {
+ ReturnAddressPtr returnPC = exec->returnPC();
+ exec = exec->callerFrame();
+ globalData.exception = createStackOverflowError(exec);
+ interpreterThrowInCaller(exec, returnPC);
+ pc = returnToThrowForThrownException(exec);
+ }
+ LLINT_END_IMPL();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_call_arityCheck)
+{
+ LLINT_BEGIN();
+ ExecState* newExec = CommonSlowPaths::arityCheckFor(exec, &globalData.interpreter->registerFile(), CodeForCall);
+ if (!newExec) {
+ ReturnAddressPtr returnPC = exec->returnPC();
+ exec = exec->callerFrame();
+ globalData.exception = createStackOverflowError(exec);
+ interpreterThrowInCaller(exec, returnPC);
+ LLINT_RETURN_TWO(bitwise_cast<void*>(static_cast<uintptr_t>(1)), exec);
+ }
+ LLINT_RETURN_TWO(0, newExec);
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_construct_arityCheck)
+{
+ LLINT_BEGIN();
+ ExecState* newExec = CommonSlowPaths::arityCheckFor(exec, &globalData.interpreter->registerFile(), CodeForConstruct);
+ if (!newExec) {
+ ReturnAddressPtr returnPC = exec->returnPC();
+ exec = exec->callerFrame();
+ globalData.exception = createStackOverflowError(exec);
+ interpreterThrowInCaller(exec, returnPC);
+ LLINT_RETURN_TWO(bitwise_cast<void*>(static_cast<uintptr_t>(1)), exec);
+ }
+ LLINT_RETURN_TWO(0, newExec);
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_create_activation)
+{
+ LLINT_BEGIN();
+#if LLINT_SLOW_PATH_TRACING
+ dataLog("Creating an activation, exec = %p!\n", exec);
+#endif
+ JSActivation* activation = JSActivation::create(globalData, exec, static_cast<FunctionExecutable*>(exec->codeBlock()->ownerExecutable()));
+ exec->setScopeChain(exec->scopeChain()->push(activation));
+ LLINT_RETURN(JSValue(activation));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_create_arguments)
+{
+ LLINT_BEGIN();
+ JSValue arguments = JSValue(Arguments::create(globalData, exec));
+ LLINT_CHECK_EXCEPTION();
+ exec->uncheckedR(pc[1].u.operand) = arguments;
+ exec->uncheckedR(unmodifiedArgumentsRegister(pc[1].u.operand)) = arguments;
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_create_this)
+{
+ LLINT_BEGIN();
+ JSFunction* constructor = asFunction(exec->callee());
+
+#if !ASSERT_DISABLED
+ ConstructData constructData;
+ ASSERT(constructor->methodTable()->getConstructData(constructor, constructData) == ConstructTypeJS);
+#endif
+
+ Structure* structure;
+ JSValue proto = LLINT_OP(2).jsValue();
+ if (proto.isObject())
+ structure = asObject(proto)->inheritorID(globalData);
+ else
+ structure = constructor->scope()->globalObject->emptyObjectStructure();
+
+ LLINT_RETURN(constructEmptyObject(exec, structure));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_convert_this)
+{
+ LLINT_BEGIN();
+ JSValue v1 = LLINT_OP(1).jsValue();
+ ASSERT(v1.isPrimitive());
+ LLINT_RETURN(v1.toThisObject(exec));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_new_object)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(constructEmptyObject(exec));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_new_array)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(constructArray(exec, bitwise_cast<JSValue*>(&LLINT_OP(2)), pc[3].u.operand));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_new_array_buffer)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(constructArray(exec, exec->codeBlock()->constantBuffer(pc[2].u.operand), pc[3].u.operand));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_new_regexp)
+{
+ LLINT_BEGIN();
+ RegExp* regExp = exec->codeBlock()->regexp(pc[2].u.operand);
+ if (!regExp->isValid())
+ LLINT_THROW(createSyntaxError(exec, "Invalid flag supplied to RegExp constructor."));
+ LLINT_RETURN(RegExpObject::create(globalData, exec->lexicalGlobalObject(), exec->lexicalGlobalObject()->regExpStructure(), regExp));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_not)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsBoolean(!LLINT_OP_C(2).jsValue().toBoolean(exec)));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_eq)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsBoolean(JSValue::equal(exec, LLINT_OP_C(2).jsValue(), LLINT_OP_C(3).jsValue())));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_neq)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsBoolean(!JSValue::equal(exec, LLINT_OP_C(2).jsValue(), LLINT_OP_C(3).jsValue())));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_stricteq)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsBoolean(JSValue::strictEqual(exec, LLINT_OP_C(2).jsValue(), LLINT_OP_C(3).jsValue())));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_nstricteq)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsBoolean(!JSValue::strictEqual(exec, LLINT_OP_C(2).jsValue(), LLINT_OP_C(3).jsValue())));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_less)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsBoolean(jsLess<true>(exec, LLINT_OP_C(2).jsValue(), LLINT_OP_C(3).jsValue())));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_lesseq)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsBoolean(jsLessEq<true>(exec, LLINT_OP_C(2).jsValue(), LLINT_OP_C(3).jsValue())));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_greater)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsBoolean(jsLess<false>(exec, LLINT_OP_C(3).jsValue(), LLINT_OP_C(2).jsValue())));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_greatereq)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsBoolean(jsLessEq<false>(exec, LLINT_OP_C(3).jsValue(), LLINT_OP_C(2).jsValue())));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_pre_inc)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsNumber(LLINT_OP(1).jsValue().toNumber(exec) + 1));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_pre_dec)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsNumber(LLINT_OP(1).jsValue().toNumber(exec) - 1));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_post_inc)
+{
+ LLINT_BEGIN();
+ double result = LLINT_OP(2).jsValue().toNumber(exec);
+ LLINT_OP(2) = jsNumber(result + 1);
+ LLINT_RETURN(jsNumber(result));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_post_dec)
+{
+ LLINT_BEGIN();
+ double result = LLINT_OP(2).jsValue().toNumber(exec);
+ LLINT_OP(2) = jsNumber(result - 1);
+ LLINT_RETURN(jsNumber(result));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_to_jsnumber)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsNumber(LLINT_OP_C(2).jsValue().toNumber(exec)));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_negate)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsNumber(-LLINT_OP_C(2).jsValue().toNumber(exec)));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_add)
+{
+ LLINT_BEGIN();
+ JSValue v1 = LLINT_OP_C(2).jsValue();
+ JSValue v2 = LLINT_OP_C(3).jsValue();
+
+#if LLINT_SLOW_PATH_TRACING
+ dataLog("Trying to add %s", v1.description());
+ dataLog(" to %s.\n", v2.description());
+#endif
+
+ if (v1.isString() && !v2.isObject())
+ LLINT_RETURN(jsString(exec, asString(v1), v2.toString(exec)));
+
+ if (v1.isNumber() && v2.isNumber())
+ LLINT_RETURN(jsNumber(v1.asNumber() + v2.asNumber()));
+
+ LLINT_RETURN(jsAddSlowCase(exec, v1, v2));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_mul)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsNumber(LLINT_OP_C(2).jsValue().toNumber(exec) * LLINT_OP_C(3).jsValue().toNumber(exec)));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_sub)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsNumber(LLINT_OP_C(2).jsValue().toNumber(exec) - LLINT_OP_C(3).jsValue().toNumber(exec)));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_div)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsNumber(LLINT_OP_C(2).jsValue().toNumber(exec) / LLINT_OP_C(3).jsValue().toNumber(exec)));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_mod)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsNumber(fmod(LLINT_OP_C(2).jsValue().toNumber(exec), LLINT_OP_C(3).jsValue().toNumber(exec))));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_lshift)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsNumber(LLINT_OP_C(2).jsValue().toInt32(exec) << (LLINT_OP_C(3).jsValue().toUInt32(exec) & 31)));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_rshift)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsNumber(LLINT_OP_C(2).jsValue().toInt32(exec) >> (LLINT_OP_C(3).jsValue().toUInt32(exec) & 31)));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_urshift)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsNumber(LLINT_OP_C(2).jsValue().toUInt32(exec) >> (LLINT_OP_C(3).jsValue().toUInt32(exec) & 31)));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_bitand)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsNumber(LLINT_OP_C(2).jsValue().toInt32(exec) & LLINT_OP_C(3).jsValue().toInt32(exec)));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_bitor)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsNumber(LLINT_OP_C(2).jsValue().toInt32(exec) | LLINT_OP_C(3).jsValue().toInt32(exec)));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_bitxor)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsNumber(LLINT_OP_C(2).jsValue().toInt32(exec) ^ LLINT_OP_C(3).jsValue().toInt32(exec)));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_bitnot)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsNumber(~LLINT_OP_C(2).jsValue().toInt32(exec)));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_check_has_instance)
+{
+ LLINT_BEGIN();
+ JSValue baseVal = LLINT_OP_C(1).jsValue();
+#ifndef NDEBUG
+ TypeInfo typeInfo(UnspecifiedType);
+ ASSERT(!baseVal.isObject()
+ || !(typeInfo = asObject(baseVal)->structure()->typeInfo()).implementsHasInstance());
+#endif
+ LLINT_THROW(createInvalidParamError(exec, "instanceof", baseVal));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_instanceof)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsBoolean(CommonSlowPaths::opInstanceOfSlow(exec, LLINT_OP_C(2).jsValue(), LLINT_OP_C(3).jsValue(), LLINT_OP_C(4).jsValue())));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_typeof)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsTypeStringForValue(exec, LLINT_OP_C(2).jsValue()));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_is_undefined)
+{
+ LLINT_BEGIN();
+ JSValue v = LLINT_OP_C(2).jsValue();
+ LLINT_RETURN(jsBoolean(v.isCell() ? v.asCell()->structure()->typeInfo().masqueradesAsUndefined() : v.isUndefined()));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_is_boolean)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsBoolean(LLINT_OP_C(2).jsValue().isBoolean()));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_is_number)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsBoolean(LLINT_OP_C(2).jsValue().isNumber()));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_is_string)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsBoolean(isJSString(LLINT_OP_C(2).jsValue())));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_is_object)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsBoolean(jsIsObjectType(LLINT_OP_C(2).jsValue())));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_is_function)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsBoolean(jsIsFunctionType(LLINT_OP_C(2).jsValue())));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_in)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsBoolean(CommonSlowPaths::opIn(exec, LLINT_OP_C(2).jsValue(), LLINT_OP_C(3).jsValue())));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_resolve)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN_PROFILED(op_resolve, CommonSlowPaths::opResolve(exec, exec->codeBlock()->identifier(pc[2].u.operand)));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_resolve_skip)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN_PROFILED(
+ op_resolve_skip,
+ CommonSlowPaths::opResolveSkip(
+ exec,
+ exec->codeBlock()->identifier(pc[2].u.operand),
+ pc[3].u.operand));
+}
+
+static JSValue resolveGlobal(ExecState* exec, Instruction* pc)
+{
+ CodeBlock* codeBlock = exec->codeBlock();
+ JSGlobalObject* globalObject = codeBlock->globalObject();
+ ASSERT(globalObject->isGlobalObject());
+ int property = pc[2].u.operand;
+ Structure* structure = pc[3].u.structure.get();
+
+ ASSERT_UNUSED(structure, structure != globalObject->structure());
+
+ Identifier& ident = codeBlock->identifier(property);
+ PropertySlot slot(globalObject);
+
+ if (globalObject->getPropertySlot(exec, ident, slot)) {
+ JSValue result = slot.getValue(exec, ident);
+ if (slot.isCacheableValue() && !globalObject->structure()->isUncacheableDictionary()
+ && slot.slotBase() == globalObject) {
+ pc[3].u.structure.set(
+ exec->globalData(), codeBlock->ownerExecutable(), globalObject->structure());
+ pc[4] = slot.cachedOffset();
+ }
+
+ return result;
+ }
+
+ exec->globalData().exception = createUndefinedVariableError(exec, ident);
+ return JSValue();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_resolve_global)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN_PROFILED(op_resolve_global, resolveGlobal(exec, pc));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_resolve_global_dynamic)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN_PROFILED(op_resolve_global_dynamic, resolveGlobal(exec, pc));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_resolve_for_resolve_global_dynamic)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN_PROFILED(op_resolve_global_dynamic, CommonSlowPaths::opResolve(exec, exec->codeBlock()->identifier(pc[2].u.operand)));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_resolve_base)
+{
+ LLINT_BEGIN();
+ Identifier& ident = exec->codeBlock()->identifier(pc[2].u.operand);
+ if (pc[3].u.operand) {
+ JSValue base = JSC::resolveBase(exec, ident, exec->scopeChain(), true);
+ if (!base)
+ LLINT_THROW(createErrorForInvalidGlobalAssignment(exec, ident.ustring()));
+ LLINT_RETURN(base);
+ }
+
+ LLINT_RETURN_PROFILED(op_resolve_base, JSC::resolveBase(exec, ident, exec->scopeChain(), false));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_ensure_property_exists)
+{
+ LLINT_BEGIN();
+ JSObject* object = asObject(LLINT_OP(1).jsValue());
+ PropertySlot slot(object);
+ Identifier& ident = exec->codeBlock()->identifier(pc[2].u.operand);
+ if (!object->getPropertySlot(exec, ident, slot))
+ LLINT_THROW(createErrorForInvalidGlobalAssignment(exec, ident.ustring()));
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_resolve_with_base)
+{
+ LLINT_BEGIN();
+ JSValue result = CommonSlowPaths::opResolveWithBase(exec, exec->codeBlock()->identifier(pc[3].u.operand), LLINT_OP(1));
+ LLINT_CHECK_EXCEPTION();
+ LLINT_OP(2) = result;
+ // FIXME: technically should have profiling, but we don't do it because the DFG won't use it.
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_resolve_with_this)
+{
+ LLINT_BEGIN();
+ JSValue result = CommonSlowPaths::opResolveWithThis(exec, exec->codeBlock()->identifier(pc[3].u.operand), LLINT_OP(1));
+ LLINT_CHECK_EXCEPTION();
+ LLINT_OP(2) = result;
+ // FIXME: technically should have profiling, but we don't do it because the DFG won't use it.
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_get_by_id)
+{
+ LLINT_BEGIN();
+ CodeBlock* codeBlock = exec->codeBlock();
+ Identifier& ident = codeBlock->identifier(pc[3].u.operand);
+ JSValue baseValue = LLINT_OP_C(2).jsValue();
+ PropertySlot slot(baseValue);
+
+ JSValue result = baseValue.get(exec, ident, slot);
+ LLINT_CHECK_EXCEPTION();
+ LLINT_OP(1) = result;
+
+ if (baseValue.isCell()
+ && slot.isCacheable()
+ && slot.slotBase() == baseValue
+ && slot.cachedPropertyType() == PropertySlot::Value) {
+
+ JSCell* baseCell = baseValue.asCell();
+ Structure* structure = baseCell->structure();
+
+ if (!structure->isUncacheableDictionary()
+ && !structure->typeInfo().prohibitsPropertyCaching()) {
+ pc[4].u.structure.set(
+ globalData, codeBlock->ownerExecutable(), structure);
+ pc[5].u.operand = slot.cachedOffset() * sizeof(JSValue);
+ }
+ }
+
+ pc[OPCODE_LENGTH(op_get_by_id) - 1].u.profile->m_buckets[0] = JSValue::encode(result);
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_get_arguments_length)
+{
+ LLINT_BEGIN();
+ CodeBlock* codeBlock = exec->codeBlock();
+ Identifier& ident = codeBlock->identifier(pc[3].u.operand);
+ JSValue baseValue = LLINT_OP(2).jsValue();
+ PropertySlot slot(baseValue);
+ LLINT_RETURN(baseValue.get(exec, ident, slot));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_put_by_id)
+{
+ LLINT_BEGIN();
+ CodeBlock* codeBlock = exec->codeBlock();
+ Identifier& ident = codeBlock->identifier(pc[2].u.operand);
+
+ JSValue baseValue = LLINT_OP_C(1).jsValue();
+ PutPropertySlot slot(codeBlock->isStrictMode());
+ if (pc[8].u.operand)
+ asObject(baseValue)->putDirect(globalData, ident, LLINT_OP_C(3).jsValue(), slot);
+ else
+ baseValue.put(exec, ident, LLINT_OP_C(3).jsValue(), slot);
+ LLINT_CHECK_EXCEPTION();
+
+ if (baseValue.isCell()
+ && slot.isCacheable()) {
+
+ JSCell* baseCell = baseValue.asCell();
+ Structure* structure = baseCell->structure();
+
+ if (!structure->isUncacheableDictionary()
+ && !structure->typeInfo().prohibitsPropertyCaching()
+ && baseCell == slot.base()) {
+
+ if (slot.type() == PutPropertySlot::NewProperty) {
+ if (!structure->isDictionary() && structure->previousID()->propertyStorageCapacity() == structure->propertyStorageCapacity()) {
+ // This is needed because some of the methods we call
+ // below may GC.
+ pc[0].u.opcode = bitwise_cast<void*>(&llint_op_put_by_id);
+
+ normalizePrototypeChain(exec, baseCell);
+
+ ASSERT(structure->previousID()->isObject());
+ pc[4].u.structure.set(
+ globalData, codeBlock->ownerExecutable(), structure->previousID());
+ pc[5].u.operand = slot.cachedOffset() * sizeof(JSValue);
+ pc[6].u.structure.set(
+ globalData, codeBlock->ownerExecutable(), structure);
+ StructureChain* chain = structure->prototypeChain(exec);
+ ASSERT(chain);
+ pc[7].u.structureChain.set(
+ globalData, codeBlock->ownerExecutable(), chain);
+
+ if (pc[8].u.operand)
+ pc[0].u.opcode = bitwise_cast<void*>(&llint_op_put_by_id_transition_direct);
+ else
+ pc[0].u.opcode = bitwise_cast<void*>(&llint_op_put_by_id_transition_normal);
+ }
+ } else {
+ pc[0].u.opcode = bitwise_cast<void*>(&llint_op_put_by_id);
+ pc[4].u.structure.set(
+ globalData, codeBlock->ownerExecutable(), structure);
+ pc[5].u.operand = slot.cachedOffset() * sizeof(JSValue);
+ }
+ }
+ }
+
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_del_by_id)
+{
+ LLINT_BEGIN();
+ CodeBlock* codeBlock = exec->codeBlock();
+ JSObject* baseObject = LLINT_OP_C(2).jsValue().toObject(exec);
+ bool couldDelete = baseObject->methodTable()->deleteProperty(baseObject, exec, codeBlock->identifier(pc[3].u.operand));
+ LLINT_CHECK_EXCEPTION();
+ if (!couldDelete && codeBlock->isStrictMode())
+ LLINT_THROW(createTypeError(exec, "Unable to delete property."));
+ LLINT_RETURN(jsBoolean(couldDelete));
+}
+
+inline JSValue getByVal(ExecState* exec, JSValue baseValue, JSValue subscript)
+{
+ if (LIKELY(baseValue.isCell() && subscript.isString())) {
+ if (JSValue result = baseValue.asCell()->fastGetOwnProperty(exec, asString(subscript)->value(exec)))
+ return result;
+ }
+
+ if (subscript.isUInt32()) {
+ uint32_t i = subscript.asUInt32();
+ if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
+ return asString(baseValue)->getIndex(exec, i);
+
+ if (isJSByteArray(baseValue) && asByteArray(baseValue)->canAccessIndex(i))
+ return asByteArray(baseValue)->getIndex(exec, i);
+
+ return baseValue.get(exec, i);
+ }
+
+ Identifier property(exec, subscript.toString(exec)->value(exec));
+ return baseValue.get(exec, property);
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_get_by_val)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN_PROFILED(op_get_by_val, getByVal(exec, LLINT_OP_C(2).jsValue(), LLINT_OP_C(3).jsValue()));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_get_argument_by_val)
+{
+ LLINT_BEGIN();
+ JSValue arguments = LLINT_OP(2).jsValue();
+ if (!arguments) {
+ arguments = Arguments::create(globalData, exec);
+ LLINT_CHECK_EXCEPTION();
+ LLINT_OP(2) = arguments;
+ exec->uncheckedR(unmodifiedArgumentsRegister(pc[2].u.operand)) = arguments;
+ }
+
+ LLINT_RETURN(getByVal(exec, arguments, LLINT_OP_C(3).jsValue()));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_get_by_pname)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(getByVal(exec, LLINT_OP(2).jsValue(), LLINT_OP(3).jsValue()));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_put_by_val)
+{
+ LLINT_BEGIN();
+
+ JSValue baseValue = LLINT_OP_C(1).jsValue();
+ JSValue subscript = LLINT_OP_C(2).jsValue();
+ JSValue value = LLINT_OP_C(3).jsValue();
+
+ if (LIKELY(subscript.isUInt32())) {
+ uint32_t i = subscript.asUInt32();
+ if (isJSArray(baseValue)) {
+ JSArray* jsArray = asArray(baseValue);
+ if (jsArray->canSetIndex(i))
+ jsArray->setIndex(globalData, i, value);
+ else
+ JSArray::putByIndex(jsArray, exec, i, value);
+ LLINT_END();
+ }
+ if (isJSByteArray(baseValue)
+ && asByteArray(baseValue)->canAccessIndex(i)) {
+ JSByteArray* jsByteArray = asByteArray(baseValue);
+ if (value.isInt32()) {
+ jsByteArray->setIndex(i, value.asInt32());
+ LLINT_END();
+ }
+ if (value.isNumber()) {
+ jsByteArray->setIndex(i, value.asNumber());
+ LLINT_END();
+ }
+ }
+ baseValue.put(exec, i, value);
+ LLINT_END();
+ }
+
+ Identifier property(exec, subscript.toString(exec)->value(exec));
+ LLINT_CHECK_EXCEPTION();
+ PutPropertySlot slot(exec->codeBlock()->isStrictMode());
+ baseValue.put(exec, property, value, slot);
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_del_by_val)
+{
+ LLINT_BEGIN();
+ JSValue baseValue = LLINT_OP_C(2).jsValue();
+ JSObject* baseObject = baseValue.toObject(exec);
+
+ JSValue subscript = LLINT_OP_C(3).jsValue();
+
+ bool couldDelete;
+
+ uint32_t i;
+ if (subscript.getUInt32(i))
+ couldDelete = baseObject->methodTable()->deletePropertyByIndex(baseObject, exec, i);
+ else {
+ LLINT_CHECK_EXCEPTION();
+ Identifier property(exec, subscript.toString(exec)->value(exec));
+ LLINT_CHECK_EXCEPTION();
+ couldDelete = baseObject->methodTable()->deleteProperty(baseObject, exec, property);
+ }
+
+ if (!couldDelete && exec->codeBlock()->isStrictMode())
+ LLINT_THROW(createTypeError(exec, "Unable to delete property."));
+
+ LLINT_RETURN(jsBoolean(couldDelete));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_put_by_index)
+{
+ LLINT_BEGIN();
+ LLINT_OP_C(1).jsValue().put(exec, pc[2].u.operand, LLINT_OP_C(3).jsValue());
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_put_getter_setter)
+{
+ LLINT_BEGIN();
+ ASSERT(LLINT_OP(1).jsValue().isObject());
+ JSObject* baseObj = asObject(LLINT_OP(1).jsValue());
+
+ GetterSetter* accessor = GetterSetter::create(exec);
+ LLINT_CHECK_EXCEPTION();
+
+ JSValue getter = LLINT_OP(3).jsValue();
+ JSValue setter = LLINT_OP(4).jsValue();
+ ASSERT(getter.isObject() || getter.isUndefined());
+ ASSERT(setter.isObject() || setter.isUndefined());
+ ASSERT(getter.isObject() || setter.isObject());
+
+ if (!getter.isUndefined())
+ accessor->setGetter(globalData, asObject(getter));
+ if (!setter.isUndefined())
+ accessor->setSetter(globalData, asObject(setter));
+ baseObj->putDirectAccessor(
+ globalData,
+ exec->codeBlock()->identifier(pc[2].u.operand),
+ accessor, Accessor);
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_jmp_scopes)
+{
+ LLINT_BEGIN();
+ unsigned count = pc[1].u.operand;
+ ScopeChainNode* tmp = exec->scopeChain();
+ while (count--)
+ tmp = tmp->pop();
+ exec->setScopeChain(tmp);
+ pc += pc[2].u.operand;
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_jtrue)
+{
+ LLINT_BEGIN();
+ LLINT_BRANCH(op_jtrue, LLINT_OP_C(1).jsValue().toBoolean(exec));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_jfalse)
+{
+ LLINT_BEGIN();
+ LLINT_BRANCH(op_jfalse, !LLINT_OP_C(1).jsValue().toBoolean(exec));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_jless)
+{
+ LLINT_BEGIN();
+ LLINT_BRANCH(op_jless, jsLess<true>(exec, LLINT_OP_C(1).jsValue(), LLINT_OP_C(2).jsValue()));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_jnless)
+{
+ LLINT_BEGIN();
+ LLINT_BRANCH(op_jnless, !jsLess<true>(exec, LLINT_OP_C(1).jsValue(), LLINT_OP_C(2).jsValue()));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_jgreater)
+{
+ LLINT_BEGIN();
+ LLINT_BRANCH(op_jgreater, jsLess<false>(exec, LLINT_OP_C(2).jsValue(), LLINT_OP_C(1).jsValue()));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_jngreater)
+{
+ LLINT_BEGIN();
+ LLINT_BRANCH(op_jngreater, !jsLess<false>(exec, LLINT_OP_C(2).jsValue(), LLINT_OP_C(1).jsValue()));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_jlesseq)
+{
+ LLINT_BEGIN();
+ LLINT_BRANCH(op_jlesseq, jsLessEq<true>(exec, LLINT_OP_C(1).jsValue(), LLINT_OP_C(2).jsValue()));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_jnlesseq)
+{
+ LLINT_BEGIN();
+ LLINT_BRANCH(op_jnlesseq, !jsLessEq<true>(exec, LLINT_OP_C(1).jsValue(), LLINT_OP_C(2).jsValue()));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_jgreatereq)
+{
+ LLINT_BEGIN();
+ LLINT_BRANCH(op_jgreatereq, jsLessEq<false>(exec, LLINT_OP_C(2).jsValue(), LLINT_OP_C(1).jsValue()));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_jngreatereq)
+{
+ LLINT_BEGIN();
+ LLINT_BRANCH(op_jngreatereq, !jsLessEq<false>(exec, LLINT_OP_C(2).jsValue(), LLINT_OP_C(1).jsValue()));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_switch_imm)
+{
+ LLINT_BEGIN();
+ JSValue scrutinee = LLINT_OP_C(3).jsValue();
+ ASSERT(scrutinee.isDouble());
+ double value = scrutinee.asDouble();
+ int32_t intValue = static_cast<int32_t>(value);
+ int defaultOffset = pc[2].u.operand;
+ if (value == intValue) {
+ CodeBlock* codeBlock = exec->codeBlock();
+ pc += codeBlock->immediateSwitchJumpTable(pc[1].u.operand).offsetForValue(intValue, defaultOffset);
+ } else
+ pc += defaultOffset;
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_switch_string)
+{
+ LLINT_BEGIN();
+ JSValue scrutinee = LLINT_OP_C(3).jsValue();
+ int defaultOffset = pc[2].u.operand;
+ if (!scrutinee.isString())
+ pc += defaultOffset;
+ else {
+ CodeBlock* codeBlock = exec->codeBlock();
+ pc += codeBlock->stringSwitchJumpTable(pc[1].u.operand).offsetForValue(asString(scrutinee)->value(exec).impl(), defaultOffset);
+ }
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_new_func)
+{
+ LLINT_BEGIN();
+ CodeBlock* codeBlock = exec->codeBlock();
+ ASSERT(codeBlock->codeType() != FunctionCode
+ || !codeBlock->needsFullScopeChain()
+ || exec->uncheckedR(codeBlock->activationRegister()).jsValue());
+#if LLINT_SLOW_PATH_TRACING
+ dataLog("Creating function!\n");
+#endif
+ LLINT_RETURN(codeBlock->functionDecl(pc[2].u.operand)->make(exec, exec->scopeChain()));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_new_func_exp)
+{
+ LLINT_BEGIN();
+ CodeBlock* codeBlock = exec->codeBlock();
+ FunctionExecutable* function = codeBlock->functionExpr(pc[2].u.operand);
+ JSFunction* func = function->make(exec, exec->scopeChain());
+
+ if (!function->name().isNull()) {
+ JSStaticScopeObject* functionScopeObject = JSStaticScopeObject::create(exec, function->name(), func, ReadOnly | DontDelete);
+ func->setScope(globalData, func->scope()->push(functionScopeObject));
+ }
+
+ LLINT_RETURN(func);
+}
+
+static SlowPathReturnType handleHostCall(ExecState* execCallee, Instruction* pc, JSValue callee, CodeSpecializationKind kind)
+{
+ ExecState* exec = execCallee->callerFrame();
+ JSGlobalData& globalData = exec->globalData();
+
+ execCallee->setScopeChain(exec->scopeChain());
+ execCallee->setCodeBlock(0);
+ execCallee->clearReturnPC();
+
+ if (kind == CodeForCall) {
+ CallData callData;
+ CallType callType = getCallData(callee, callData);
+
+ ASSERT(callType != CallTypeJS);
+
+ if (callType == CallTypeHost) {
+ globalData.hostCallReturnValue = JSValue::decode(callData.native.function(execCallee));
+
+ LLINT_CALL_RETURN(execCallee, pc, reinterpret_cast<void*>(getHostCallReturnValue));
+ }
+
+#if LLINT_SLOW_PATH_TRACING
+ dataLog("Call callee is not a function: %s\n", callee.description());
+#endif
+
+ ASSERT(callType == CallTypeNone);
+ LLINT_CALL_THROW(exec, pc, createNotAFunctionError(exec, callee));
+ }
+
+ ASSERT(kind == CodeForConstruct);
+
+ ConstructData constructData;
+ ConstructType constructType = getConstructData(callee, constructData);
+
+ ASSERT(constructType != ConstructTypeJS);
+
+ if (constructType == ConstructTypeHost) {
+ globalData.hostCallReturnValue = JSValue::decode(constructData.native.function(execCallee));
+
+ LLINT_CALL_RETURN(execCallee, pc, reinterpret_cast<void*>(getHostCallReturnValue));
+ }
+
+#if LLINT_SLOW_PATH_TRACING
+ dataLog("Constructor callee is not a function: %s\n", callee.description());
+#endif
+
+ ASSERT(constructType == ConstructTypeNone);
+ LLINT_CALL_THROW(exec, pc, createNotAConstructorError(exec, callee));
+}
+
+inline SlowPathReturnType setUpCall(ExecState* execCallee, Instruction* pc, CodeSpecializationKind kind, JSValue calleeAsValue, LLIntCallLinkInfo* callLinkInfo = 0)
+{
+#if LLINT_SLOW_PATH_TRACING
+ dataLog("Performing call with recorded PC = %p\n", execCallee->callerFrame()->currentVPC());
+#endif
+
+ JSCell* calleeAsFunctionCell = getJSFunction(calleeAsValue);
+ if (!calleeAsFunctionCell)
+ return handleHostCall(execCallee, pc, calleeAsValue, kind);
+
+ JSFunction* callee = asFunction(calleeAsFunctionCell);
+ ScopeChainNode* scope = callee->scopeUnchecked();
+ JSGlobalData& globalData = *scope->globalData;
+ execCallee->setScopeChain(scope);
+ ExecutableBase* executable = callee->executable();
+
+ MacroAssemblerCodePtr codePtr;
+ CodeBlock* codeBlock = 0;
+ if (executable->isHostFunction())
+ codePtr = executable->generatedJITCodeFor(kind).addressForCall();
+ else {
+ FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
+ JSObject* error = functionExecutable->compileFor(execCallee, callee->scope(), kind);
+ if (error)
+ LLINT_CALL_THROW(execCallee->callerFrame(), pc, error);
+ codeBlock = &functionExecutable->generatedBytecodeFor(kind);
+ ASSERT(codeBlock);
+ if (execCallee->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()))
+ codePtr = functionExecutable->generatedJITCodeWithArityCheckFor(kind);
+ else
+ codePtr = functionExecutable->generatedJITCodeFor(kind).addressForCall();
+ }
+
+ if (callLinkInfo) {
+ if (callLinkInfo->isOnList())
+ callLinkInfo->remove();
+ ExecState* execCaller = execCallee->callerFrame();
+ callLinkInfo->callee.set(globalData, execCaller->codeBlock()->ownerExecutable(), callee);
+ callLinkInfo->lastSeenCallee.set(globalData, execCaller->codeBlock()->ownerExecutable(), callee);
+ callLinkInfo->machineCodeTarget = codePtr;
+ if (codeBlock)
+ codeBlock->linkIncomingCall(callLinkInfo);
+ }
+
+ LLINT_CALL_RETURN(execCallee, pc, codePtr.executableAddress());
+}
+
+inline SlowPathReturnType genericCall(ExecState* exec, Instruction* pc, CodeSpecializationKind kind)
+{
+ // This needs to:
+ // - Set up a call frame.
+ // - Figure out what to call and compile it if necessary.
+ // - If possible, link the call's inline cache.
+ // - Return a tuple of machine code address to call and the new call frame.
+
+ JSValue calleeAsValue = LLINT_OP_C(1).jsValue();
+
+ ExecState* execCallee = exec + pc[3].u.operand;
+
+ execCallee->setArgumentCountIncludingThis(pc[2].u.operand);
+ execCallee->uncheckedR(RegisterFile::Callee) = calleeAsValue;
+ execCallee->setCallerFrame(exec);
+
+ ASSERT(pc[4].u.callLinkInfo);
+ return setUpCall(execCallee, pc, kind, calleeAsValue, pc[4].u.callLinkInfo);
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_call)
+{
+ LLINT_BEGIN_NO_SET_PC();
+ return genericCall(exec, pc, CodeForCall);
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_construct)
+{
+ LLINT_BEGIN_NO_SET_PC();
+ return genericCall(exec, pc, CodeForConstruct);
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_call_varargs)
+{
+ LLINT_BEGIN();
+ // This needs to:
+ // - Set up a call frame while respecting the variable arguments.
+ // - Figure out what to call and compile it if necessary.
+ // - Return a tuple of machine code address to call and the new call frame.
+
+ JSValue calleeAsValue = LLINT_OP_C(1).jsValue();
+
+ ExecState* execCallee = loadVarargs(
+ exec, &globalData.interpreter->registerFile(),
+ LLINT_OP_C(2).jsValue(), LLINT_OP_C(3).jsValue(), pc[4].u.operand);
+ LLINT_CALL_CHECK_EXCEPTION(exec, pc);
+
+ execCallee->uncheckedR(RegisterFile::Callee) = calleeAsValue;
+ execCallee->setCallerFrame(exec);
+ exec->uncheckedR(RegisterFile::ArgumentCount).tag() = bitwise_cast<int32_t>(pc + OPCODE_LENGTH(op_call_varargs));
+
+ return setUpCall(execCallee, pc, CodeForCall, calleeAsValue);
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_call_eval)
+{
+ LLINT_BEGIN_NO_SET_PC();
+ JSValue calleeAsValue = LLINT_OP(1).jsValue();
+
+ ExecState* execCallee = exec + pc[3].u.operand;
+
+ execCallee->setArgumentCountIncludingThis(pc[2].u.operand);
+ execCallee->setCallerFrame(exec);
+ execCallee->uncheckedR(RegisterFile::Callee) = calleeAsValue;
+ execCallee->setScopeChain(exec->scopeChain());
+ execCallee->setReturnPC(bitwise_cast<Instruction*>(&llint_generic_return_point));
+ execCallee->setCodeBlock(0);
+ exec->uncheckedR(RegisterFile::ArgumentCount).tag() = bitwise_cast<int32_t>(pc + OPCODE_LENGTH(op_call_eval));
+
+ if (!isHostFunction(calleeAsValue, globalFuncEval))
+ return setUpCall(execCallee, pc, CodeForCall, calleeAsValue);
+
+ globalData.hostCallReturnValue = eval(execCallee);
+ LLINT_CALL_RETURN(execCallee, pc, reinterpret_cast<void*>(getHostCallReturnValue));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_tear_off_activation)
+{
+ LLINT_BEGIN();
+ ASSERT(exec->codeBlock()->needsFullScopeChain());
+ JSValue activationValue = LLINT_OP(1).jsValue();
+ if (!activationValue) {
+ if (JSValue v = exec->uncheckedR(unmodifiedArgumentsRegister(pc[2].u.operand)).jsValue()) {
+ if (!exec->codeBlock()->isStrictMode())
+ asArguments(v)->tearOff(exec);
+ }
+ LLINT_END();
+ }
+ JSActivation* activation = asActivation(activationValue);
+ activation->tearOff(globalData);
+ if (JSValue v = exec->uncheckedR(unmodifiedArgumentsRegister(pc[2].u.operand)).jsValue())
+ asArguments(v)->didTearOffActivation(globalData, activation);
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_tear_off_arguments)
+{
+ LLINT_BEGIN();
+ ASSERT(exec->codeBlock()->usesArguments() && !exec->codeBlock()->needsFullScopeChain());
+ asArguments(exec->uncheckedR(unmodifiedArgumentsRegister(pc[1].u.operand)).jsValue())->tearOff(exec);
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_strcat)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(jsString(exec, &LLINT_OP(2), pc[3].u.operand));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_to_primitive)
+{
+ LLINT_BEGIN();
+ LLINT_RETURN(LLINT_OP_C(2).jsValue().toPrimitive(exec));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_get_pnames)
+{
+ LLINT_BEGIN();
+ JSValue v = LLINT_OP(2).jsValue();
+ if (v.isUndefinedOrNull()) {
+ pc += pc[5].u.operand;
+ LLINT_END();
+ }
+
+ JSObject* o = v.toObject(exec);
+ Structure* structure = o->structure();
+ JSPropertyNameIterator* jsPropertyNameIterator = structure->enumerationCache();
+ if (!jsPropertyNameIterator || jsPropertyNameIterator->cachedPrototypeChain() != structure->prototypeChain(exec))
+ jsPropertyNameIterator = JSPropertyNameIterator::create(exec, o);
+
+ LLINT_OP(1) = JSValue(jsPropertyNameIterator);
+ LLINT_OP(2) = JSValue(o);
+ LLINT_OP(3) = Register::withInt(0);
+ LLINT_OP(4) = Register::withInt(jsPropertyNameIterator->size());
+
+ pc += OPCODE_LENGTH(op_get_pnames);
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_next_pname)
+{
+ LLINT_BEGIN();
+ JSObject* base = asObject(LLINT_OP(2).jsValue());
+ JSString* property = asString(LLINT_OP(1).jsValue());
+ if (base->hasProperty(exec, Identifier(exec, property->value(exec)))) {
+ // Go to target.
+ pc += pc[6].u.operand;
+ } // Else, don't change the PC, so the interpreter will reloop.
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_push_scope)
+{
+ LLINT_BEGIN();
+ JSValue v = LLINT_OP(1).jsValue();
+ JSObject* o = v.toObject(exec);
+ LLINT_CHECK_EXCEPTION();
+
+ LLINT_OP(1) = o;
+ exec->setScopeChain(exec->scopeChain()->push(o));
+
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_pop_scope)
+{
+ LLINT_BEGIN();
+ exec->setScopeChain(exec->scopeChain()->pop());
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_push_new_scope)
+{
+ LLINT_BEGIN();
+ CodeBlock* codeBlock = exec->codeBlock();
+ JSObject* scope = JSStaticScopeObject::create(exec, codeBlock->identifier(pc[2].u.operand), LLINT_OP(3).jsValue(), DontDelete);
+ exec->setScopeChain(exec->scopeChain()->push(scope));
+ LLINT_RETURN(scope);
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_throw)
+{
+ LLINT_BEGIN();
+ LLINT_THROW(LLINT_OP_C(1).jsValue());
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_throw_reference_error)
+{
+ LLINT_BEGIN();
+ LLINT_THROW(createReferenceError(exec, LLINT_OP_C(1).jsValue().toString(exec)->value(exec)));
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_debug)
+{
+ LLINT_BEGIN();
+ int debugHookID = pc[1].u.operand;
+ int firstLine = pc[2].u.operand;
+ int lastLine = pc[3].u.operand;
+
+ globalData.interpreter->debug(exec, static_cast<DebugHookID>(debugHookID), firstLine, lastLine);
+
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_profile_will_call)
+{
+ LLINT_BEGIN();
+ (*Profiler::enabledProfilerReference())->willExecute(exec, LLINT_OP(1).jsValue());
+ LLINT_END();
+}
+
+LLINT_SLOW_PATH_DECL(slow_path_profile_did_call)
+{
+ LLINT_BEGIN();
+ (*Profiler::enabledProfilerReference())->didExecute(exec, LLINT_OP(1).jsValue());
+ LLINT_END();
+}
+
+} } // namespace JSC::LLInt
+
+#endif // ENABLE(LLINT)
diff --git a/Source/JavaScriptCore/llint/LLIntSlowPaths.h b/Source/JavaScriptCore/llint/LLIntSlowPaths.h
new file mode 100644
index 000000000..fe684d306
--- /dev/null
+++ b/Source/JavaScriptCore/llint/LLIntSlowPaths.h
@@ -0,0 +1,171 @@
+/*
+ * Copyright (C) 2011 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef LLIntSlowPaths_h
+#define LLIntSlowPaths_h
+
+#include <wtf/Platform.h>
+#include <wtf/StdLibExtras.h>
+
+#if ENABLE(LLINT)
+
+namespace JSC {
+
+class ExecState;
+struct Instruction;
+
+namespace LLInt {
+
+typedef int64_t SlowPathReturnType;
+
+extern "C" SlowPathReturnType llint_trace_operand(ExecState*, Instruction*, int fromWhere, int operand);
+extern "C" SlowPathReturnType llint_trace_value(ExecState*, Instruction*, int fromWhere, int operand);
+
+#define LLINT_SLOW_PATH_DECL(name) \
+ extern "C" SlowPathReturnType llint_##name(ExecState* exec, Instruction* pc)
+
+LLINT_SLOW_PATH_DECL(trace_prologue);
+LLINT_SLOW_PATH_DECL(trace_prologue_function_for_call);
+LLINT_SLOW_PATH_DECL(trace_prologue_function_for_construct);
+LLINT_SLOW_PATH_DECL(trace_arityCheck_for_call);
+LLINT_SLOW_PATH_DECL(trace_arityCheck_for_construct);
+LLINT_SLOW_PATH_DECL(trace);
+LLINT_SLOW_PATH_DECL(special_trace);
+LLINT_SLOW_PATH_DECL(entry_osr);
+LLINT_SLOW_PATH_DECL(entry_osr_function_for_call);
+LLINT_SLOW_PATH_DECL(entry_osr_function_for_construct);
+LLINT_SLOW_PATH_DECL(entry_osr_function_for_call_arityCheck);
+LLINT_SLOW_PATH_DECL(entry_osr_function_for_construct_arityCheck);
+LLINT_SLOW_PATH_DECL(loop_osr);
+LLINT_SLOW_PATH_DECL(replace);
+LLINT_SLOW_PATH_DECL(register_file_check);
+LLINT_SLOW_PATH_DECL(slow_path_call_arityCheck);
+LLINT_SLOW_PATH_DECL(slow_path_construct_arityCheck);
+LLINT_SLOW_PATH_DECL(slow_path_create_activation);
+LLINT_SLOW_PATH_DECL(slow_path_create_arguments);
+LLINT_SLOW_PATH_DECL(slow_path_create_this);
+LLINT_SLOW_PATH_DECL(slow_path_convert_this);
+LLINT_SLOW_PATH_DECL(slow_path_new_object);
+LLINT_SLOW_PATH_DECL(slow_path_new_array);
+LLINT_SLOW_PATH_DECL(slow_path_new_array_buffer);
+LLINT_SLOW_PATH_DECL(slow_path_new_regexp);
+LLINT_SLOW_PATH_DECL(slow_path_not);
+LLINT_SLOW_PATH_DECL(slow_path_eq);
+LLINT_SLOW_PATH_DECL(slow_path_neq);
+LLINT_SLOW_PATH_DECL(slow_path_stricteq);
+LLINT_SLOW_PATH_DECL(slow_path_nstricteq);
+LLINT_SLOW_PATH_DECL(slow_path_less);
+LLINT_SLOW_PATH_DECL(slow_path_lesseq);
+LLINT_SLOW_PATH_DECL(slow_path_greater);
+LLINT_SLOW_PATH_DECL(slow_path_greatereq);
+LLINT_SLOW_PATH_DECL(slow_path_pre_inc);
+LLINT_SLOW_PATH_DECL(slow_path_pre_dec);
+LLINT_SLOW_PATH_DECL(slow_path_post_inc);
+LLINT_SLOW_PATH_DECL(slow_path_post_dec);
+LLINT_SLOW_PATH_DECL(slow_path_to_jsnumber);
+LLINT_SLOW_PATH_DECL(slow_path_negate);
+LLINT_SLOW_PATH_DECL(slow_path_add);
+LLINT_SLOW_PATH_DECL(slow_path_mul);
+LLINT_SLOW_PATH_DECL(slow_path_sub);
+LLINT_SLOW_PATH_DECL(slow_path_div);
+LLINT_SLOW_PATH_DECL(slow_path_mod);
+LLINT_SLOW_PATH_DECL(slow_path_lshift);
+LLINT_SLOW_PATH_DECL(slow_path_rshift);
+LLINT_SLOW_PATH_DECL(slow_path_urshift);
+LLINT_SLOW_PATH_DECL(slow_path_bitand);
+LLINT_SLOW_PATH_DECL(slow_path_bitor);
+LLINT_SLOW_PATH_DECL(slow_path_bitxor);
+LLINT_SLOW_PATH_DECL(slow_path_bitnot);
+LLINT_SLOW_PATH_DECL(slow_path_check_has_instance);
+LLINT_SLOW_PATH_DECL(slow_path_instanceof);
+LLINT_SLOW_PATH_DECL(slow_path_typeof);
+LLINT_SLOW_PATH_DECL(slow_path_is_undefined);
+LLINT_SLOW_PATH_DECL(slow_path_is_boolean);
+LLINT_SLOW_PATH_DECL(slow_path_is_number);
+LLINT_SLOW_PATH_DECL(slow_path_is_string);
+LLINT_SLOW_PATH_DECL(slow_path_is_object);
+LLINT_SLOW_PATH_DECL(slow_path_is_function);
+LLINT_SLOW_PATH_DECL(slow_path_in);
+LLINT_SLOW_PATH_DECL(slow_path_resolve);
+LLINT_SLOW_PATH_DECL(slow_path_resolve_skip);
+LLINT_SLOW_PATH_DECL(slow_path_resolve_global);
+LLINT_SLOW_PATH_DECL(slow_path_resolve_global_dynamic);
+LLINT_SLOW_PATH_DECL(slow_path_resolve_for_resolve_global_dynamic);
+LLINT_SLOW_PATH_DECL(slow_path_resolve_base);
+LLINT_SLOW_PATH_DECL(slow_path_ensure_property_exists);
+LLINT_SLOW_PATH_DECL(slow_path_resolve_with_base);
+LLINT_SLOW_PATH_DECL(slow_path_resolve_with_this);
+LLINT_SLOW_PATH_DECL(slow_path_get_by_id);
+LLINT_SLOW_PATH_DECL(slow_path_get_arguments_length);
+LLINT_SLOW_PATH_DECL(slow_path_put_by_id);
+LLINT_SLOW_PATH_DECL(slow_path_del_by_id);
+LLINT_SLOW_PATH_DECL(slow_path_get_by_val);
+LLINT_SLOW_PATH_DECL(slow_path_get_argument_by_val);
+LLINT_SLOW_PATH_DECL(slow_path_get_by_pname);
+LLINT_SLOW_PATH_DECL(slow_path_put_by_val);
+LLINT_SLOW_PATH_DECL(slow_path_del_by_val);
+LLINT_SLOW_PATH_DECL(slow_path_put_by_index);
+LLINT_SLOW_PATH_DECL(slow_path_put_getter_setter);
+LLINT_SLOW_PATH_DECL(slow_path_jmp_scopes);
+LLINT_SLOW_PATH_DECL(slow_path_jtrue);
+LLINT_SLOW_PATH_DECL(slow_path_jfalse);
+LLINT_SLOW_PATH_DECL(slow_path_jless);
+LLINT_SLOW_PATH_DECL(slow_path_jnless);
+LLINT_SLOW_PATH_DECL(slow_path_jgreater);
+LLINT_SLOW_PATH_DECL(slow_path_jngreater);
+LLINT_SLOW_PATH_DECL(slow_path_jlesseq);
+LLINT_SLOW_PATH_DECL(slow_path_jnlesseq);
+LLINT_SLOW_PATH_DECL(slow_path_jgreatereq);
+LLINT_SLOW_PATH_DECL(slow_path_jngreatereq);
+LLINT_SLOW_PATH_DECL(slow_path_switch_imm);
+LLINT_SLOW_PATH_DECL(slow_path_switch_char);
+LLINT_SLOW_PATH_DECL(slow_path_switch_string);
+LLINT_SLOW_PATH_DECL(slow_path_new_func);
+LLINT_SLOW_PATH_DECL(slow_path_new_func_exp);
+LLINT_SLOW_PATH_DECL(slow_path_call);
+LLINT_SLOW_PATH_DECL(slow_path_construct);
+LLINT_SLOW_PATH_DECL(slow_path_call_varargs);
+LLINT_SLOW_PATH_DECL(slow_path_call_eval);
+LLINT_SLOW_PATH_DECL(slow_path_tear_off_activation);
+LLINT_SLOW_PATH_DECL(slow_path_tear_off_arguments);
+LLINT_SLOW_PATH_DECL(slow_path_strcat);
+LLINT_SLOW_PATH_DECL(slow_path_to_primitive);
+LLINT_SLOW_PATH_DECL(slow_path_get_pnames);
+LLINT_SLOW_PATH_DECL(slow_path_next_pname);
+LLINT_SLOW_PATH_DECL(slow_path_push_scope);
+LLINT_SLOW_PATH_DECL(slow_path_pop_scope);
+LLINT_SLOW_PATH_DECL(slow_path_push_new_scope);
+LLINT_SLOW_PATH_DECL(slow_path_throw);
+LLINT_SLOW_PATH_DECL(slow_path_throw_reference_error);
+LLINT_SLOW_PATH_DECL(slow_path_debug);
+LLINT_SLOW_PATH_DECL(slow_path_profile_will_call);
+LLINT_SLOW_PATH_DECL(slow_path_profile_did_call);
+
+} } // namespace JSC::LLInt
+
+#endif // ENABLE(LLINT)
+
+#endif // LLIntSlowPaths_h
+
diff --git a/Source/JavaScriptCore/llint/LLIntThunks.cpp b/Source/JavaScriptCore/llint/LLIntThunks.cpp
new file mode 100644
index 000000000..ddb0c46c2
--- /dev/null
+++ b/Source/JavaScriptCore/llint/LLIntThunks.cpp
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "LLIntThunks.h"
+
+#if ENABLE(LLINT)
+
+#include "JSInterfaceJIT.h"
+#include "LinkBuffer.h"
+#include "LowLevelInterpreter.h"
+
+namespace JSC { namespace LLInt {
+
+static MacroAssemblerCodeRef generateThunkWithJumpTo(JSGlobalData* globalData, void (*target)())
+{
+ JSInterfaceJIT jit;
+
+ // FIXME: there's probably a better way to do it on X86, but I'm not sure I care.
+ jit.move(JSInterfaceJIT::TrustedImmPtr(bitwise_cast<void*>(target)), JSInterfaceJIT::regT0);
+ jit.jump(JSInterfaceJIT::regT0);
+
+ LinkBuffer patchBuffer(*globalData, &jit, GLOBAL_THUNK_ID);
+ return patchBuffer.finalizeCode();
+}
+
+MacroAssemblerCodeRef functionForCallEntryThunkGenerator(JSGlobalData* globalData)
+{
+ return generateThunkWithJumpTo(globalData, llint_function_for_call_prologue);
+}
+
+MacroAssemblerCodeRef functionForConstructEntryThunkGenerator(JSGlobalData* globalData)
+{
+ return generateThunkWithJumpTo(globalData, llint_function_for_construct_prologue);
+}
+
+MacroAssemblerCodeRef functionForCallArityCheckThunkGenerator(JSGlobalData* globalData)
+{
+ return generateThunkWithJumpTo(globalData, llint_function_for_call_arity_check);
+}
+
+MacroAssemblerCodeRef functionForConstructArityCheckThunkGenerator(JSGlobalData* globalData)
+{
+ return generateThunkWithJumpTo(globalData, llint_function_for_construct_arity_check);
+}
+
+MacroAssemblerCodeRef evalEntryThunkGenerator(JSGlobalData* globalData)
+{
+ return generateThunkWithJumpTo(globalData, llint_eval_prologue);
+}
+
+MacroAssemblerCodeRef programEntryThunkGenerator(JSGlobalData* globalData)
+{
+ return generateThunkWithJumpTo(globalData, llint_program_prologue);
+}
+
+} } // namespace JSC::LLInt
+
+#endif // ENABLE(LLINT)
diff --git a/Source/JavaScriptCore/llint/LLIntThunks.h b/Source/JavaScriptCore/llint/LLIntThunks.h
new file mode 100644
index 000000000..ee119e0b9
--- /dev/null
+++ b/Source/JavaScriptCore/llint/LLIntThunks.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef LLIntThunks_h
+#define LLIntThunks_h
+
+#include <wtf/Platform.h>
+
+#if ENABLE(LLINT)
+
+#include "MacroAssemblerCodeRef.h"
+
+namespace JSC {
+
+class JSGlobalData;
+
+namespace LLInt {
+
+MacroAssemblerCodeRef functionForCallEntryThunkGenerator(JSGlobalData*);
+MacroAssemblerCodeRef functionForConstructEntryThunkGenerator(JSGlobalData*);
+MacroAssemblerCodeRef functionForCallArityCheckThunkGenerator(JSGlobalData*);
+MacroAssemblerCodeRef functionForConstructArityCheckThunkGenerator(JSGlobalData*);
+MacroAssemblerCodeRef evalEntryThunkGenerator(JSGlobalData*);
+MacroAssemblerCodeRef programEntryThunkGenerator(JSGlobalData*);
+
+} } // namespace JSC::LLInt
+
+#endif // ENABLE(LLINT)
+
+#endif // LLIntThunks_h
diff --git a/Source/JavaScriptCore/llint/LowLevelInterpreter.asm b/Source/JavaScriptCore/llint/LowLevelInterpreter.asm
new file mode 100644
index 000000000..a9f83f680
--- /dev/null
+++ b/Source/JavaScriptCore/llint/LowLevelInterpreter.asm
@@ -0,0 +1,2390 @@
+# Copyright (C) 2011, 2012 Apple Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+# 1. Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+# THE POSSIBILITY OF SUCH DAMAGE.
+
+
+# Crash course on the language that this is written in (which I just call
+# "assembly" even though it's more than that):
+#
+# - Mostly gas-style operand ordering. The last operand tends to be the
+# destination. So "a := b" is written as "mov b, a". But unlike gas,
+# comparisons are in-order, so "if (a < b)" is written as
+# "bilt a, b, ...".
+#
+# - "b" = byte, "h" = 16-bit word, "i" = 32-bit word, "p" = pointer.
+# Currently this is just 32-bit so "i" and "p" are interchangeable
+# except when an op supports one but not the other.
+#
+# - In general, valid operands for macro invocations and instructions are
+# registers (eg "t0"), addresses (eg "4[t0]"), base-index addresses
+# (eg "7[t0, t1, 2]"), absolute addresses (eg "0xa0000000[]"), or labels
+# (eg "_foo" or ".foo"). Macro invocations can also take anonymous
+# macros as operands. Instructions cannot take anonymous macros.
+#
+# - Labels must have names that begin with either "_" or ".". A "." label
+# is local and gets renamed before code gen to minimize namespace
+# pollution. A "_" label is an extern symbol (i.e. ".globl"). The "_"
+# may or may not be removed during code gen depending on whether the asm
+# conventions for C name mangling on the target platform mandate a "_"
+# prefix.
+#
+# - A "macro" is a lambda expression, which may be either anonymous or
+# named. But this has caveats. "macro" can take zero or more arguments,
+# which may be macros or any valid operands, but it can only return
+# code. But you can do Turing-complete things via continuation passing
+# style: "macro foo (a, b) b(a) end foo(foo, foo)". Actually, don't do
+# that, since you'll just crash the assembler.
+#
+# - An "if" is a conditional on settings. Any identifier supplied in the
+# predicate of an "if" is assumed to be a #define that is available
+# during code gen. So you can't use "if" for computation in a macro, but
+# you can use it to select different pieces of code for different
+# platforms.
+#
+# - Arguments to macros follow lexical scoping rather than dynamic scoping.
+# Const's also follow lexical scoping and may override (hide) arguments
+# or other consts. All variables (arguments and constants) can be bound
+# to operands. Additionally, arguments (but not constants) can be bound
+# to macros.
+
+
+# Below we have a bunch of constant declarations. Each constant must have
+# a corresponding ASSERT() in LLIntData.cpp.
+
+# These declarations must match interpreter/RegisterFile.h.
+const CallFrameHeaderSize = 48
+const ArgumentCount = -48
+const CallerFrame = -40
+const Callee = -32
+const ScopeChain = -24
+const ReturnPC = -16
+const CodeBlock = -8
+
+const ThisArgumentOffset = -CallFrameHeaderSize - 8
+
+# Declare some aliases for the registers we will use.
+const PC = t4
+
+# Offsets needed for reasoning about value representation.
+if BIG_ENDIAN
+ const TagOffset = 0
+ const PayloadOffset = 4
+else
+ const TagOffset = 4
+ const PayloadOffset = 0
+end
+
+# Value representation constants.
+const Int32Tag = -1
+const BooleanTag = -2
+const NullTag = -3
+const UndefinedTag = -4
+const CellTag = -5
+const EmptyValueTag = -6
+const DeletedValueTag = -7
+const LowestTag = DeletedValueTag
+
+# Type constants.
+const StringType = 5
+const ObjectType = 13
+
+# Type flags constants.
+const MasqueradesAsUndefined = 1
+const ImplementsHasInstance = 2
+const ImplementsDefaultHasInstance = 8
+
+# Heap allocation constants.
+const JSFinalObjectSizeClassIndex = 3
+
+# Bytecode operand constants.
+const FirstConstantRegisterIndex = 0x40000000
+
+# Code type constants.
+const GlobalCode = 0
+const EvalCode = 1
+const FunctionCode = 2
+
+# The interpreter steals the tag word of the argument count.
+const LLIntReturnPC = ArgumentCount + TagOffset
+
+# This must match wtf/Vector.h.
+const VectorSizeOffset = 0
+const VectorBufferOffset = 4
+
+# String flags.
+const HashFlags8BitBuffer = 64
+
+# Utilities
+macro crash()
+ storei 0, 0xbbadbeef[]
+ move 0, t0
+ call t0
+end
+
+macro assert(assertion)
+ if ASSERT_ENABLED
+ assertion(.ok)
+ crash()
+ .ok:
+ end
+end
+
+macro preserveReturnAddressAfterCall(destinationRegister)
+ if ARMv7
+ move lr, destinationRegister
+ elsif X86
+ pop destinationRegister
+ else
+ error
+ end
+end
+
+macro restoreReturnAddressBeforeReturn(sourceRegister)
+ if ARMv7
+ move sourceRegister, lr
+ elsif X86
+ push sourceRegister
+ else
+ error
+ end
+end
+
+macro dispatch(advance)
+ addp advance * 4, PC
+ jmp [PC]
+end
+
+macro dispatchBranchWithOffset(pcOffset)
+ lshifti 2, pcOffset
+ addp pcOffset, PC
+ jmp [PC]
+end
+
+macro dispatchBranch(pcOffset)
+ loadi pcOffset, t0
+ dispatchBranchWithOffset(t0)
+end
+
+macro dispatchAfterCall()
+ loadi ArgumentCount + TagOffset[cfr], PC
+ jmp [PC]
+end
+
+macro cCall2(function, arg1, arg2)
+ if ARMv7
+ move arg1, t0
+ move arg2, t1
+ elsif X86
+ poke arg1, 0
+ poke arg2, 1
+ else
+ error
+ end
+ call function
+end
+
+# This barely works. arg3 and arg4 should probably be immediates.
+macro cCall4(function, arg1, arg2, arg3, arg4)
+ if ARMv7
+ move arg1, t0
+ move arg2, t1
+ move arg3, t2
+ move arg4, t3
+ elsif X86
+ poke arg1, 0
+ poke arg2, 1
+ poke arg3, 2
+ poke arg4, 3
+ else
+ error
+ end
+ call function
+end
+
+macro callSlowPath(slow_path)
+ cCall2(slow_path, cfr, PC)
+ move t0, PC
+ move t1, cfr
+end
+
+# Debugging operation if you'd like to print an operand in the instruction stream. fromWhere
+# should be an immediate integer - any integer you like; use it to identify the place you're
+# debugging from. operand should likewise be an immediate, and should identify the operand
+# in the instruction stream you'd like to print out.
+macro traceOperand(fromWhere, operand)
+ cCall4(_llint_trace_operand, cfr, PC, fromWhere, operand)
+ move t0, PC
+ move t1, cfr
+end
+
+# Debugging operation if you'd like to print the value of an operand in the instruction
+# stream. Same as traceOperand(), but assumes that the operand is a register, and prints its
+# value.
+macro traceValue(fromWhere, operand)
+ cCall4(_llint_trace_value, cfr, PC, fromWhere, operand)
+ move t0, PC
+ move t1, cfr
+end
+
+macro traceExecution()
+ if EXECUTION_TRACING
+ callSlowPath(_llint_trace)
+ end
+end
+
+# Call a slow_path for call opcodes.
+macro callCallSlowPath(advance, slow_path, action)
+ addp advance * 4, PC, t0
+ storep t0, ArgumentCount + TagOffset[cfr]
+ cCall2(slow_path, cfr, PC)
+ move t1, cfr
+ action(t0)
+end
+
+macro slowPathForCall(advance, slow_path)
+ callCallSlowPath(
+ advance,
+ slow_path,
+ macro (callee)
+ call callee
+ dispatchAfterCall()
+ end)
+end
+
+macro checkSwitchToJIT(increment, action)
+ if JIT_ENABLED
+ loadp CodeBlock[cfr], t0
+ baddis increment, CodeBlock::m_llintExecuteCounter[t0], .continue
+ action()
+ .continue:
+ end
+end
+
+macro checkSwitchToJITForLoop()
+ checkSwitchToJIT(
+ 1,
+ macro ()
+ storei PC, ArgumentCount + TagOffset[cfr]
+ cCall2(_llint_loop_osr, cfr, PC)
+ move t1, cfr
+ btpz t0, .recover
+ jmp t0
+ .recover:
+ loadi ArgumentCount + TagOffset[cfr], PC
+ end)
+end
+
+macro checkSwitchToJITForEpilogue()
+ checkSwitchToJIT(
+ 10,
+ macro ()
+ callSlowPath(_llint_replace)
+ end)
+end
+
+macro assertNotConstant(index)
+ assert(macro (ok) bilt index, FirstConstantRegisterIndex, ok end)
+end
+
+# Index, tag, and payload must be different registers. Index is not
+# changed.
+macro loadConstantOrVariable(index, tag, payload)
+ bigteq index, FirstConstantRegisterIndex, .constant
+ loadi TagOffset[cfr, index, 8], tag
+ loadi PayloadOffset[cfr, index, 8], payload
+ jmp .done
+.constant:
+ loadp CodeBlock[cfr], payload
+ loadp CodeBlock::m_constantRegisters + VectorBufferOffset[payload], payload
+ # There is a bit of evil here: if the index contains a value >= FirstConstantRegisterIndex,
+ # then value << 3 will be equal to (value - FirstConstantRegisterIndex) << 3.
+ loadp TagOffset[payload, index, 8], tag
+ loadp PayloadOffset[payload, index, 8], payload
+.done:
+end
+
+# Index and payload may be the same register. Index may be clobbered.
+macro loadConstantOrVariable2Reg(index, tag, payload)
+ bigteq index, FirstConstantRegisterIndex, .constant
+ loadi TagOffset[cfr, index, 8], tag
+ loadi PayloadOffset[cfr, index, 8], payload
+ jmp .done
+.constant:
+ loadp CodeBlock[cfr], tag
+ loadp CodeBlock::m_constantRegisters + VectorBufferOffset[tag], tag
+ # There is a bit of evil here: if the index contains a value >= FirstConstantRegisterIndex,
+ # then value << 3 will be equal to (value - FirstConstantRegisterIndex) << 3.
+ lshifti 3, index
+ addp index, tag
+ loadp PayloadOffset[tag], payload
+ loadp TagOffset[tag], tag
+.done:
+end
+
+macro loadConstantOrVariablePayloadTagCustom(index, tagCheck, payload)
+ bigteq index, FirstConstantRegisterIndex, .constant
+ tagCheck(TagOffset[cfr, index, 8])
+ loadi PayloadOffset[cfr, index, 8], payload
+ jmp .done
+.constant:
+ loadp CodeBlock[cfr], payload
+ loadp CodeBlock::m_constantRegisters + VectorBufferOffset[payload], payload
+ # There is a bit of evil here: if the index contains a value >= FirstConstantRegisterIndex,
+ # then value << 3 will be equal to (value - FirstConstantRegisterIndex) << 3.
+ tagCheck(TagOffset[payload, index, 8])
+ loadp PayloadOffset[payload, index, 8], payload
+.done:
+end
+
+# Index and payload must be different registers. Index is not mutated. Use
+# this if you know what the tag of the variable should be. Doing the tag
+# test as part of loading the variable reduces register use, but may not
+# be faster than doing loadConstantOrVariable followed by a branch on the
+# tag.
+macro loadConstantOrVariablePayload(index, expectedTag, payload, slow)
+ loadConstantOrVariablePayloadTagCustom(
+ index,
+ macro (actualTag) bineq actualTag, expectedTag, slow end,
+ payload)
+end
+
+macro loadConstantOrVariablePayloadUnchecked(index, payload)
+ loadConstantOrVariablePayloadTagCustom(
+ index,
+ macro (actualTag) end,
+ payload)
+end
+
+macro writeBarrier(tag, payload)
+ # Nothing to do, since we don't have a generational or incremental collector.
+end
+
+macro valueProfile(tag, payload, profile)
+ if JIT_ENABLED
+ storei tag, ValueProfile::m_buckets + TagOffset[profile]
+ storei payload, ValueProfile::m_buckets + PayloadOffset[profile]
+ end
+end
+
+
+# Indicate the beginning of LLInt.
+_llint_begin:
+ crash()
+
+
+# Entrypoints into the interpreter
+
+macro functionForCallCodeBlockGetter(targetRegister)
+ loadp Callee[cfr], targetRegister
+ loadp JSFunction::m_executable[targetRegister], targetRegister
+ loadp FunctionExecutable::m_codeBlockForCall[targetRegister], targetRegister
+end
+
+macro functionForConstructCodeBlockGetter(targetRegister)
+ loadp Callee[cfr], targetRegister
+ loadp JSFunction::m_executable[targetRegister], targetRegister
+ loadp FunctionExecutable::m_codeBlockForConstruct[targetRegister], targetRegister
+end
+
+macro notFunctionCodeBlockGetter(targetRegister)
+ loadp CodeBlock[cfr], targetRegister
+end
+
+macro functionCodeBlockSetter(sourceRegister)
+ storep sourceRegister, CodeBlock[cfr]
+end
+
+macro notFunctionCodeBlockSetter(sourceRegister)
+ # Nothing to do!
+end
+
+# Do the bare minimum required to execute code. Sets up the PC, leave the CodeBlock*
+# in t1. May also trigger prologue entry OSR.
+macro prologue(codeBlockGetter, codeBlockSetter, osrSlowPath, traceSlowPath)
+ preserveReturnAddressAfterCall(t2)
+
+ # Set up the call frame and check if we should OSR.
+ storep t2, ReturnPC[cfr]
+ if EXECUTION_TRACING
+ callSlowPath(traceSlowPath)
+ end
+ codeBlockGetter(t1)
+ if JIT_ENABLED
+ baddis 5, CodeBlock::m_llintExecuteCounter[t1], .continue
+ cCall2(osrSlowPath, cfr, PC)
+ move t1, cfr
+ btpz t0, .recover
+ loadp ReturnPC[cfr], t2
+ restoreReturnAddressBeforeReturn(t2)
+ jmp t0
+ .recover:
+ codeBlockGetter(t1)
+ .continue:
+ end
+ codeBlockSetter(t1)
+
+ # Set up the PC.
+ loadp CodeBlock::m_instructions[t1], t0
+ loadp CodeBlock::Instructions::m_instructions + VectorBufferOffset[t0], PC
+end
+
+# Expects that CodeBlock is in t1, which is what prologue() leaves behind.
+# Must call dispatch(0) after calling this.
+macro functionInitialization(profileArgSkip)
+ if JIT_ENABLED
+ # Profile the arguments. Unfortunately, we have no choice but to do this. This
+ # code is pretty horrendous because of the difference in ordering between
+ # arguments and value profiles, the desire to have a simple loop-down-to-zero
+ # loop, and the desire to use only three registers so as to preserve the PC and
+ # the code block. It is likely that this code should be rewritten in a more
+ # optimal way for architectures that have more than five registers available
+ # for arbitrary use in the interpreter.
+ loadi CodeBlock::m_numParameters[t1], t0
+ addi -profileArgSkip, t0 # Use addi because that's what has the peephole
+ assert(macro (ok) bigteq t0, 0, ok end)
+ btiz t0, .argumentProfileDone
+ loadp CodeBlock::m_argumentValueProfiles + VectorBufferOffset[t1], t3
+ muli sizeof ValueProfile, t0, t2 # Aaaaahhhh! Need strength reduction!
+ negi t0
+ lshifti 3, t0
+ addp t2, t3
+ .argumentProfileLoop:
+ loadi ThisArgumentOffset + TagOffset + 8 - profileArgSkip * 8[cfr, t0], t2
+ subp sizeof ValueProfile, t3
+ storei t2, profileArgSkip * sizeof ValueProfile + ValueProfile::m_buckets + TagOffset[t3]
+ loadi ThisArgumentOffset + PayloadOffset + 8 - profileArgSkip * 8[cfr, t0], t2
+ storei t2, profileArgSkip * sizeof ValueProfile + ValueProfile::m_buckets + PayloadOffset[t3]
+ baddinz 8, t0, .argumentProfileLoop
+ .argumentProfileDone:
+ end
+
+ # Check stack height.
+ loadi CodeBlock::m_numCalleeRegisters[t1], t0
+ loadp CodeBlock::m_globalData[t1], t2
+ loadp JSGlobalData::interpreter[t2], t2 # FIXME: Can get to the RegisterFile from the JITStackFrame
+ lshifti 3, t0
+ addp t0, cfr, t0
+ bpaeq Interpreter::m_registerFile + RegisterFile::m_end[t2], t0, .stackHeightOK
+
+ # Stack height check failed - need to call a slow_path.
+ callSlowPath(_llint_register_file_check)
+.stackHeightOK:
+end
+
+# Expects that CodeBlock is in t1, which is what prologue() leaves behind.
+macro functionArityCheck(doneLabel, slow_path)
+ loadi PayloadOffset + ArgumentCount[cfr], t0
+ biaeq t0, CodeBlock::m_numParameters[t1], doneLabel
+ cCall2(slow_path, cfr, PC) # This slow_path has a simple protocol: t0 = 0 => no error, t0 != 0 => error
+ move t1, cfr
+ btiz t0, .continue
+ loadp JITStackFrame::globalData[sp], t1
+ loadp JSGlobalData::callFrameForThrow[t1], t0
+ jmp JSGlobalData::targetMachinePCForThrow[t1]
+.continue:
+ # Reload CodeBlock and PC, since the slow_path clobbered it.
+ loadp CodeBlock[cfr], t1
+ loadp CodeBlock::m_instructions[t1], t0
+ loadp CodeBlock::Instructions::m_instructions + VectorBufferOffset[t0], PC
+ jmp doneLabel
+end
+
+_llint_program_prologue:
+ prologue(notFunctionCodeBlockGetter, notFunctionCodeBlockSetter, _llint_entry_osr, _llint_trace_prologue)
+ dispatch(0)
+
+
+_llint_eval_prologue:
+ prologue(notFunctionCodeBlockGetter, notFunctionCodeBlockSetter, _llint_entry_osr, _llint_trace_prologue)
+ dispatch(0)
+
+
+_llint_function_for_call_prologue:
+ prologue(functionForCallCodeBlockGetter, functionCodeBlockSetter, _llint_entry_osr_function_for_call, _llint_trace_prologue_function_for_call)
+.functionForCallBegin:
+ functionInitialization(0)
+ dispatch(0)
+
+
+_llint_function_for_construct_prologue:
+ prologue(functionForConstructCodeBlockGetter, functionCodeBlockSetter, _llint_entry_osr_function_for_construct, _llint_trace_prologue_function_for_construct)
+.functionForConstructBegin:
+ functionInitialization(1)
+ dispatch(0)
+
+
+_llint_function_for_call_arity_check:
+ prologue(functionForCallCodeBlockGetter, functionCodeBlockSetter, _llint_entry_osr_function_for_call_arityCheck, _llint_trace_arityCheck_for_call)
+ functionArityCheck(.functionForCallBegin, _llint_slow_path_call_arityCheck)
+
+
+_llint_function_for_construct_arity_check:
+ prologue(functionForConstructCodeBlockGetter, functionCodeBlockSetter, _llint_entry_osr_function_for_construct_arityCheck, _llint_trace_arityCheck_for_construct)
+ functionArityCheck(.functionForConstructBegin, _llint_slow_path_construct_arityCheck)
+
+# Instruction implementations
+
+_llint_op_enter:
+ traceExecution()
+ loadp CodeBlock[cfr], t2
+ loadi CodeBlock::m_numVars[t2], t2
+ btiz t2, .opEnterDone
+ move UndefinedTag, t0
+ move 0, t1
+.opEnterLoop:
+ subi 1, t2
+ storei t0, TagOffset[cfr, t2, 8]
+ storei t1, PayloadOffset[cfr, t2, 8]
+ btinz t2, .opEnterLoop
+.opEnterDone:
+ dispatch(1)
+
+
+_llint_op_create_activation:
+ traceExecution()
+ loadi 4[PC], t0
+ bineq TagOffset[cfr, t0, 8], EmptyValueTag, .opCreateActivationDone
+ callSlowPath(_llint_slow_path_create_activation)
+.opCreateActivationDone:
+ dispatch(2)
+
+
+_llint_op_init_lazy_reg:
+ traceExecution()
+ loadi 4[PC], t0
+ storei EmptyValueTag, TagOffset[cfr, t0, 8]
+ storei 0, PayloadOffset[cfr, t0, 8]
+ dispatch(2)
+
+
+_llint_op_create_arguments:
+ traceExecution()
+ loadi 4[PC], t0
+ bineq TagOffset[cfr, t0, 8], EmptyValueTag, .opCreateArgumentsDone
+ callSlowPath(_llint_slow_path_create_arguments)
+.opCreateArgumentsDone:
+ dispatch(2)
+
+
+macro allocateBasicJSObject(sizeClassIndex, classInfoOffset, structure, result, scratch1, scratch2, slowCase)
+ if ALWAYS_ALLOCATE_SLOW
+ jmp slowCase
+ else
+ const offsetOfMySizeClass =
+ JSGlobalData::heap +
+ Heap::m_objectSpace +
+ MarkedSpace::m_normalSpace +
+ MarkedSpace::Subspace::preciseAllocators +
+ sizeClassIndex * sizeof MarkedAllocator
+
+ # FIXME: we can get the global data in one load from the stack.
+ loadp CodeBlock[cfr], scratch1
+ loadp CodeBlock::m_globalData[scratch1], scratch1
+
+ # Get the object from the free list.
+ loadp offsetOfMySizeClass + MarkedAllocator::m_firstFreeCell[scratch1], result
+ btpz result, slowCase
+
+ # Remove the object from the free list.
+ loadp [result], scratch2
+ storep scratch2, offsetOfMySizeClass + MarkedAllocator::m_firstFreeCell[scratch1]
+
+ # Initialize the object.
+ loadp classInfoOffset[scratch1], scratch2
+ storep scratch2, [result]
+ storep structure, JSCell::m_structure[result]
+ storep 0, JSObject::m_inheritorID[result]
+ addp sizeof JSObject, result, scratch1
+ storep scratch1, JSObject::m_propertyStorage[result]
+ end
+end
+
+_llint_op_create_this:
+ traceExecution()
+ loadi 8[PC], t0
+ assertNotConstant(t0)
+ bineq TagOffset[cfr, t0, 8], CellTag, .opCreateThisSlow
+ loadi PayloadOffset[cfr, t0, 8], t0
+ loadp JSCell::m_structure[t0], t1
+ bbb Structure::m_typeInfo + TypeInfo::m_type[t1], ObjectType, .opCreateThisSlow
+ loadp JSObject::m_inheritorID[t0], t2
+ btpz t2, .opCreateThisSlow
+ allocateBasicJSObject(JSFinalObjectSizeClassIndex, JSGlobalData::jsFinalObjectClassInfo, t2, t0, t1, t3, .opCreateThisSlow)
+ loadi 4[PC], t1
+ storei CellTag, TagOffset[cfr, t1, 8]
+ storei t0, PayloadOffset[cfr, t1, 8]
+ dispatch(3)
+
+.opCreateThisSlow:
+ callSlowPath(_llint_slow_path_create_this)
+ dispatch(3)
+
+
+_llint_op_get_callee:
+ traceExecution()
+ loadi 4[PC], t0
+ loadp PayloadOffset + Callee[cfr], t1
+ storei CellTag, TagOffset[cfr, t0, 8]
+ storei t1, PayloadOffset[cfr, t0, 8]
+ dispatch(2)
+
+
+_llint_op_convert_this:
+ traceExecution()
+ loadi 4[PC], t0
+ bineq TagOffset[cfr, t0, 8], CellTag, .opConvertThisSlow
+ loadi PayloadOffset[cfr, t0, 8], t0
+ loadp JSCell::m_structure[t0], t0
+ bbb Structure::m_typeInfo + TypeInfo::m_type[t0], ObjectType, .opConvertThisSlow
+ dispatch(2)
+
+.opConvertThisSlow:
+ callSlowPath(_llint_slow_path_convert_this)
+ dispatch(2)
+
+
+_llint_op_new_object:
+ traceExecution()
+ loadp CodeBlock[cfr], t0
+ loadp CodeBlock::m_globalObject[t0], t0
+ loadp JSGlobalObject::m_emptyObjectStructure[t0], t1
+ allocateBasicJSObject(JSFinalObjectSizeClassIndex, JSGlobalData::jsFinalObjectClassInfo, t1, t0, t2, t3, .opNewObjectSlow)
+ loadi 4[PC], t1
+ storei CellTag, TagOffset[cfr, t1, 8]
+ storei t0, PayloadOffset[cfr, t1, 8]
+ dispatch(2)
+
+.opNewObjectSlow:
+ callSlowPath(_llint_slow_path_new_object)
+ dispatch(2)
+
+
+_llint_op_new_array:
+ traceExecution()
+ callSlowPath(_llint_slow_path_new_array)
+ dispatch(4)
+
+
+_llint_op_new_array_buffer:
+ traceExecution()
+ callSlowPath(_llint_slow_path_new_array_buffer)
+ dispatch(4)
+
+
+_llint_op_new_regexp:
+ traceExecution()
+ callSlowPath(_llint_slow_path_new_regexp)
+ dispatch(3)
+
+
+_llint_op_mov:
+ traceExecution()
+ loadi 8[PC], t1
+ loadi 4[PC], t0
+ loadConstantOrVariable(t1, t2, t3)
+ storei t2, TagOffset[cfr, t0, 8]
+ storei t3, PayloadOffset[cfr, t0, 8]
+ dispatch(3)
+
+
+_llint_op_not:
+ traceExecution()
+ loadi 8[PC], t0
+ loadi 4[PC], t1
+ loadConstantOrVariable(t0, t2, t3)
+ bineq t2, BooleanTag, .opNotSlow
+ xori 1, t3
+ storei t2, TagOffset[cfr, t1, 8]
+ storei t3, PayloadOffset[cfr, t1, 8]
+ dispatch(3)
+
+.opNotSlow:
+ callSlowPath(_llint_slow_path_not)
+ dispatch(3)
+
+
+_llint_op_eq:
+ traceExecution()
+ loadi 12[PC], t2
+ loadi 8[PC], t0
+ loadConstantOrVariable(t2, t3, t1)
+ loadConstantOrVariable2Reg(t0, t2, t0)
+ bineq t2, t3, .opEqSlow
+ bieq t2, CellTag, .opEqSlow
+ bib t2, LowestTag, .opEqSlow
+ loadi 4[PC], t2
+ cieq t0, t1, t0
+ storei BooleanTag, TagOffset[cfr, t2, 8]
+ storei t0, PayloadOffset[cfr, t2, 8]
+ dispatch(4)
+
+.opEqSlow:
+ callSlowPath(_llint_slow_path_eq)
+ dispatch(4)
+
+
+_llint_op_eq_null:
+ traceExecution()
+ loadi 8[PC], t0
+ loadi 4[PC], t3
+ assertNotConstant(t0)
+ loadi TagOffset[cfr, t0, 8], t1
+ loadi PayloadOffset[cfr, t0, 8], t0
+ bineq t1, CellTag, .opEqNullImmediate
+ loadp JSCell::m_structure[t0], t1
+ tbnz Structure::m_typeInfo + TypeInfo::m_flags[t1], MasqueradesAsUndefined, t1
+ jmp .opEqNullNotImmediate
+.opEqNullImmediate:
+ cieq t1, NullTag, t2
+ cieq t1, UndefinedTag, t1
+ ori t2, t1
+.opEqNullNotImmediate:
+ storei BooleanTag, TagOffset[cfr, t3, 8]
+ storei t1, PayloadOffset[cfr, t3, 8]
+ dispatch(3)
+
+
+_llint_op_neq:
+ traceExecution()
+ loadi 12[PC], t2
+ loadi 8[PC], t0
+ loadConstantOrVariable(t2, t3, t1)
+ loadConstantOrVariable2Reg(t0, t2, t0)
+ bineq t2, t3, .opNeqSlow
+ bieq t2, CellTag, .opNeqSlow
+ bib t2, LowestTag, .opNeqSlow
+ loadi 4[PC], t2
+ cineq t0, t1, t0
+ storei BooleanTag, TagOffset[cfr, t2, 8]
+ storei t0, PayloadOffset[cfr, t2, 8]
+ dispatch(4)
+
+.opNeqSlow:
+ callSlowPath(_llint_slow_path_neq)
+ dispatch(4)
+
+
+_llint_op_neq_null:
+ traceExecution()
+ loadi 8[PC], t0
+ loadi 4[PC], t3
+ assertNotConstant(t0)
+ loadi TagOffset[cfr, t0, 8], t1
+ loadi PayloadOffset[cfr, t0, 8], t0
+ bineq t1, CellTag, .opNeqNullImmediate
+ loadp JSCell::m_structure[t0], t1
+ tbz Structure::m_typeInfo + TypeInfo::m_flags[t1], MasqueradesAsUndefined, t1
+ jmp .opNeqNullNotImmediate
+.opNeqNullImmediate:
+ cineq t1, NullTag, t2
+ cineq t1, UndefinedTag, t1
+ andi t2, t1
+.opNeqNullNotImmediate:
+ storei BooleanTag, TagOffset[cfr, t3, 8]
+ storei t1, PayloadOffset[cfr, t3, 8]
+ dispatch(3)
+
+
+macro strictEq(equalityOperation, slow_path)
+ loadi 12[PC], t2
+ loadi 8[PC], t0
+ loadConstantOrVariable(t2, t3, t1)
+ loadConstantOrVariable2Reg(t0, t2, t0)
+ bineq t2, t3, .slow
+ bib t2, LowestTag, .slow
+ bineq t2, CellTag, .notString
+ loadp JSCell::m_structure[t0], t2
+ loadp JSCell::m_structure[t1], t3
+ bbneq Structure::m_typeInfo + TypeInfo::m_type[t2], StringType, .notString
+ bbeq Structure::m_typeInfo + TypeInfo::m_type[t3], StringType, .slow
+.notString:
+ loadi 4[PC], t2
+ equalityOperation(t0, t1, t0)
+ storei BooleanTag, TagOffset[cfr, t2, 8]
+ storei t0, PayloadOffset[cfr, t2, 8]
+ dispatch(4)
+
+.slow:
+ callSlowPath(slow_path)
+ dispatch(4)
+end
+
+_llint_op_stricteq:
+ traceExecution()
+ strictEq(macro (left, right, result) cieq left, right, result end, _llint_slow_path_stricteq)
+
+
+_llint_op_nstricteq:
+ traceExecution()
+ strictEq(macro (left, right, result) cineq left, right, result end, _llint_slow_path_nstricteq)
+
+
+_llint_op_less:
+ traceExecution()
+ callSlowPath(_llint_slow_path_less)
+ dispatch(4)
+
+
+_llint_op_lesseq:
+ traceExecution()
+ callSlowPath(_llint_slow_path_lesseq)
+ dispatch(4)
+
+
+_llint_op_greater:
+ traceExecution()
+ callSlowPath(_llint_slow_path_greater)
+ dispatch(4)
+
+
+_llint_op_greatereq:
+ traceExecution()
+ callSlowPath(_llint_slow_path_greatereq)
+ dispatch(4)
+
+
+_llint_op_pre_inc:
+ traceExecution()
+ loadi 4[PC], t0
+ bineq TagOffset[cfr, t0, 8], Int32Tag, .opPreIncSlow
+ loadi PayloadOffset[cfr, t0, 8], t1
+ baddio 1, t1, .opPreIncSlow
+ storei t1, PayloadOffset[cfr, t0, 8]
+ dispatch(2)
+
+.opPreIncSlow:
+ callSlowPath(_llint_slow_path_pre_inc)
+ dispatch(2)
+
+
+_llint_op_pre_dec:
+ traceExecution()
+ loadi 4[PC], t0
+ bineq TagOffset[cfr, t0, 8], Int32Tag, .opPreDecSlow
+ loadi PayloadOffset[cfr, t0, 8], t1
+ bsubio 1, t1, .opPreDecSlow
+ storei t1, PayloadOffset[cfr, t0, 8]
+ dispatch(2)
+
+.opPreDecSlow:
+ callSlowPath(_llint_slow_path_pre_dec)
+ dispatch(2)
+
+
+_llint_op_post_inc:
+ traceExecution()
+ loadi 8[PC], t0
+ loadi 4[PC], t1
+ bineq TagOffset[cfr, t0, 8], Int32Tag, .opPostIncSlow
+ bieq t0, t1, .opPostIncDone
+ loadi PayloadOffset[cfr, t0, 8], t2
+ move t2, t3
+ baddio 1, t3, .opPostIncSlow
+ storei Int32Tag, TagOffset[cfr, t1, 8]
+ storei t2, PayloadOffset[cfr, t1, 8]
+ storei t3, PayloadOffset[cfr, t0, 8]
+.opPostIncDone:
+ dispatch(3)
+
+.opPostIncSlow:
+ callSlowPath(_llint_slow_path_post_inc)
+ dispatch(3)
+
+
+_llint_op_post_dec:
+ traceExecution()
+ loadi 8[PC], t0
+ loadi 4[PC], t1
+ bineq TagOffset[cfr, t0, 8], Int32Tag, .opPostDecSlow
+ bieq t0, t1, .opPostDecDone
+ loadi PayloadOffset[cfr, t0, 8], t2
+ move t2, t3
+ bsubio 1, t3, .opPostDecSlow
+ storei Int32Tag, TagOffset[cfr, t1, 8]
+ storei t2, PayloadOffset[cfr, t1, 8]
+ storei t3, PayloadOffset[cfr, t0, 8]
+.opPostDecDone:
+ dispatch(3)
+
+.opPostDecSlow:
+ callSlowPath(_llint_slow_path_post_dec)
+ dispatch(3)
+
+
+_llint_op_to_jsnumber:
+ traceExecution()
+ loadi 8[PC], t0
+ loadi 4[PC], t1
+ loadConstantOrVariable(t0, t2, t3)
+ bieq t2, Int32Tag, .opToJsnumberIsInt
+ biaeq t2, EmptyValueTag, .opToJsnumberSlow
+.opToJsnumberIsInt:
+ storei t2, TagOffset[cfr, t1, 8]
+ storei t3, PayloadOffset[cfr, t1, 8]
+ dispatch(3)
+
+.opToJsnumberSlow:
+ callSlowPath(_llint_slow_path_to_jsnumber)
+ dispatch(3)
+
+
+_llint_op_negate:
+ traceExecution()
+ loadi 8[PC], t0
+ loadi 4[PC], t3
+ loadConstantOrVariable(t0, t1, t2)
+ bineq t1, Int32Tag, .opNegateSrcNotInt
+ btiz t2, 0x7fffffff, .opNegateSlow
+ negi t2
+ storei Int32Tag, TagOffset[cfr, t3, 8]
+ storei t2, PayloadOffset[cfr, t3, 8]
+ dispatch(3)
+.opNegateSrcNotInt:
+ bia t1, LowestTag, .opNegateSlow
+ xori 0x80000000, t1
+ storei t1, TagOffset[cfr, t3, 8]
+ storei t2, PayloadOffset[cfr, t3, 8]
+ dispatch(3)
+
+.opNegateSlow:
+ callSlowPath(_llint_slow_path_negate)
+ dispatch(3)
+
+
+macro binaryOpCustomStore(integerOperationAndStore, doubleOperation, slow_path)
+ loadi 12[PC], t2
+ loadi 8[PC], t0
+ loadConstantOrVariable(t2, t3, t1)
+ loadConstantOrVariable2Reg(t0, t2, t0)
+ bineq t2, Int32Tag, .op1NotInt
+ bineq t3, Int32Tag, .op2NotInt
+ loadi 4[PC], t2
+ integerOperationAndStore(t3, t1, t0, .slow, t2)
+ dispatch(5)
+
+.op1NotInt:
+ # First operand is definitely not an int, the second operand could be anything.
+ bia t2, LowestTag, .slow
+ bib t3, LowestTag, .op1NotIntOp2Double
+ bineq t3, Int32Tag, .slow
+ ci2d t1, ft1
+ jmp .op1NotIntReady
+.op1NotIntOp2Double:
+ fii2d t1, t3, ft1
+.op1NotIntReady:
+ loadi 4[PC], t1
+ fii2d t0, t2, ft0
+ doubleOperation(ft1, ft0)
+ stored ft0, [cfr, t1, 8]
+ dispatch(5)
+
+.op2NotInt:
+ # First operand is definitely an int, the second operand is definitely not.
+ loadi 4[PC], t2
+ bia t3, LowestTag, .slow
+ ci2d t0, ft0
+ fii2d t1, t3, ft1
+ doubleOperation(ft1, ft0)
+ stored ft0, [cfr, t2, 8]
+ dispatch(5)
+
+.slow:
+ callSlowPath(slow_path)
+ dispatch(5)
+end
+
+macro binaryOp(integerOperation, doubleOperation, slow_path)
+ binaryOpCustomStore(
+ macro (int32Tag, left, right, slow, index)
+ integerOperation(left, right, slow)
+ storei int32Tag, TagOffset[cfr, index, 8]
+ storei right, PayloadOffset[cfr, index, 8]
+ end,
+ doubleOperation, slow_path)
+end
+
+_llint_op_add:
+ traceExecution()
+ binaryOp(
+ macro (left, right, slow) baddio left, right, slow end,
+ macro (left, right) addd left, right end,
+ _llint_slow_path_add)
+
+
+_llint_op_mul:
+ traceExecution()
+ binaryOpCustomStore(
+ macro (int32Tag, left, right, slow, index)
+ const scratch = int32Tag # We know that we can reuse the int32Tag register since it has a constant.
+ move right, scratch
+ bmulio left, scratch, slow
+ btinz scratch, .done
+ bilt left, 0, slow
+ bilt right, 0, slow
+ .done:
+ storei Int32Tag, TagOffset[cfr, index, 8]
+ storei scratch, PayloadOffset[cfr, index, 8]
+ end,
+ macro (left, right) muld left, right end,
+ _llint_slow_path_mul)
+
+
+_llint_op_sub:
+ traceExecution()
+ binaryOp(
+ macro (left, right, slow) bsubio left, right, slow end,
+ macro (left, right) subd left, right end,
+ _llint_slow_path_sub)
+
+
+_llint_op_div:
+ traceExecution()
+ binaryOpCustomStore(
+ macro (int32Tag, left, right, slow, index)
+ ci2d left, ft0
+ ci2d right, ft1
+ divd ft0, ft1
+ bcd2i ft1, right, .notInt
+ storei int32Tag, TagOffset[cfr, index, 8]
+ storei right, PayloadOffset[cfr, index, 8]
+ jmp .done
+ .notInt:
+ stored ft1, [cfr, index, 8]
+ .done:
+ end,
+ macro (left, right) divd left, right end,
+ _llint_slow_path_div)
+
+
+_llint_op_mod:
+ traceExecution()
+ callSlowPath(_llint_slow_path_mod)
+ dispatch(4)
+
+
+macro bitOp(operation, slow_path, advance)
+ loadi 12[PC], t2
+ loadi 8[PC], t0
+ loadConstantOrVariable(t2, t3, t1)
+ loadConstantOrVariable2Reg(t0, t2, t0)
+ bineq t3, Int32Tag, .slow
+ bineq t2, Int32Tag, .slow
+ loadi 4[PC], t2
+ operation(t1, t0, .slow)
+ storei t3, TagOffset[cfr, t2, 8]
+ storei t0, PayloadOffset[cfr, t2, 8]
+ dispatch(advance)
+
+.slow:
+ callSlowPath(slow_path)
+ dispatch(advance)
+end
+
+_llint_op_lshift:
+ traceExecution()
+ bitOp(
+ macro (left, right, slow) lshifti left, right end,
+ _llint_slow_path_lshift,
+ 4)
+
+
+_llint_op_rshift:
+ traceExecution()
+ bitOp(
+ macro (left, right, slow) rshifti left, right end,
+ _llint_slow_path_rshift,
+ 4)
+
+
+_llint_op_urshift:
+ traceExecution()
+ bitOp(
+ macro (left, right, slow)
+ urshifti left, right
+ bilt right, 0, slow
+ end,
+ _llint_slow_path_urshift,
+ 4)
+
+
+_llint_op_bitand:
+ traceExecution()
+ bitOp(
+ macro (left, right, slow) andi left, right end,
+ _llint_slow_path_bitand,
+ 5)
+
+
+_llint_op_bitxor:
+ traceExecution()
+ bitOp(
+ macro (left, right, slow) xori left, right end,
+ _llint_slow_path_bitxor,
+ 5)
+
+
+_llint_op_bitor:
+ traceExecution()
+ bitOp(
+ macro (left, right, slow) ori left, right end,
+ _llint_slow_path_bitor,
+ 5)
+
+
+_llint_op_bitnot:
+ traceExecution()
+ loadi 8[PC], t1
+ loadi 4[PC], t0
+ loadConstantOrVariable(t1, t2, t3)
+ bineq t2, Int32Tag, .opBitnotSlow
+ noti t3
+ storei t2, TagOffset[cfr, t0, 8]
+ storei t3, PayloadOffset[cfr, t0, 8]
+ dispatch(3)
+
+.opBitnotSlow:
+ callSlowPath(_llint_slow_path_bitnot)
+ dispatch(3)
+
+
+_llint_op_check_has_instance:
+ traceExecution()
+ loadi 4[PC], t1
+ loadConstantOrVariablePayload(t1, CellTag, t0, .opCheckHasInstanceSlow)
+ loadp JSCell::m_structure[t0], t0
+ btbz Structure::m_typeInfo + TypeInfo::m_flags[t0], ImplementsHasInstance, .opCheckHasInstanceSlow
+ dispatch(2)
+
+.opCheckHasInstanceSlow:
+ callSlowPath(_llint_slow_path_check_has_instance)
+ dispatch(2)
+
+
+_llint_op_instanceof:
+ traceExecution()
+ # Check that baseVal implements the default HasInstance behavior.
+ # FIXME: This should be deprecated.
+ loadi 12[PC], t1
+ loadConstantOrVariablePayloadUnchecked(t1, t0)
+ loadp JSCell::m_structure[t0], t0
+ btbz Structure::m_typeInfo + TypeInfo::m_flags[t0], ImplementsDefaultHasInstance, .opInstanceofSlow
+
+ # Actually do the work.
+ loadi 16[PC], t0
+ loadi 4[PC], t3
+ loadConstantOrVariablePayload(t0, CellTag, t1, .opInstanceofSlow)
+ loadp JSCell::m_structure[t1], t2
+ bbb Structure::m_typeInfo + TypeInfo::m_type[t2], ObjectType, .opInstanceofSlow
+ loadi 8[PC], t0
+ loadConstantOrVariablePayload(t0, CellTag, t2, .opInstanceofSlow)
+
+ # Register state: t1 = prototype, t2 = value
+ move 1, t0
+.opInstanceofLoop:
+ loadp JSCell::m_structure[t2], t2
+ loadi Structure::m_prototype + PayloadOffset[t2], t2
+ bpeq t2, t1, .opInstanceofDone
+ btinz t2, .opInstanceofLoop
+
+ move 0, t0
+.opInstanceofDone:
+ storei BooleanTag, TagOffset[cfr, t3, 8]
+ storei t0, PayloadOffset[cfr, t3, 8]
+ dispatch(5)
+
+.opInstanceofSlow:
+ callSlowPath(_llint_slow_path_instanceof)
+ dispatch(5)
+
+
+_llint_op_typeof:
+ traceExecution()
+ callSlowPath(_llint_slow_path_typeof)
+ dispatch(3)
+
+
+_llint_op_is_undefined:
+ traceExecution()
+ callSlowPath(_llint_slow_path_is_undefined)
+ dispatch(3)
+
+
+_llint_op_is_boolean:
+ traceExecution()
+ callSlowPath(_llint_slow_path_is_boolean)
+ dispatch(3)
+
+
+_llint_op_is_number:
+ traceExecution()
+ callSlowPath(_llint_slow_path_is_number)
+ dispatch(3)
+
+
+_llint_op_is_string:
+ traceExecution()
+ callSlowPath(_llint_slow_path_is_string)
+ dispatch(3)
+
+
+_llint_op_is_object:
+ traceExecution()
+ callSlowPath(_llint_slow_path_is_object)
+ dispatch(3)
+
+
+_llint_op_is_function:
+ traceExecution()
+ callSlowPath(_llint_slow_path_is_function)
+ dispatch(3)
+
+
+_llint_op_in:
+ traceExecution()
+ callSlowPath(_llint_slow_path_in)
+ dispatch(4)
+
+
+_llint_op_resolve:
+ traceExecution()
+ callSlowPath(_llint_slow_path_resolve)
+ dispatch(4)
+
+
+_llint_op_resolve_skip:
+ traceExecution()
+ callSlowPath(_llint_slow_path_resolve_skip)
+ dispatch(5)
+
+
+macro resolveGlobal(size, slow)
+ # Operands are as follows:
+ # 4[PC] Destination for the load.
+ # 8[PC] Property identifier index in the code block.
+ # 12[PC] Structure pointer, initialized to 0 by bytecode generator.
+ # 16[PC] Offset in global object, initialized to 0 by bytecode generator.
+ loadp CodeBlock[cfr], t0
+ loadp CodeBlock::m_globalObject[t0], t0
+ loadp JSCell::m_structure[t0], t1
+ bpneq t1, 12[PC], slow
+ loadi 16[PC], t1
+ loadp JSObject::m_propertyStorage[t0], t0
+ loadi TagOffset[t0, t1, 8], t2
+ loadi PayloadOffset[t0, t1, 8], t3
+ loadi 4[PC], t0
+ storei t2, TagOffset[cfr, t0, 8]
+ storei t3, PayloadOffset[cfr, t0, 8]
+ loadi (size - 1) * 4[PC], t0
+ valueProfile(t2, t3, t0)
+end
+
+_llint_op_resolve_global:
+ traceExecution()
+ resolveGlobal(6, .opResolveGlobalSlow)
+ dispatch(6)
+
+.opResolveGlobalSlow:
+ callSlowPath(_llint_slow_path_resolve_global)
+ dispatch(6)
+
+
+# Gives you the scope in t0, while allowing you to optionally perform additional checks on the
+# scopes as they are traversed. scopeCheck() is called with two arguments: the register
+# holding the scope, and a register that can be used for scratch. Note that this does not
+# use t3, so you can hold stuff in t3 if need be.
+macro getScope(deBruijinIndexOperand, scopeCheck)
+ loadp ScopeChain + PayloadOffset[cfr], t0
+ loadi deBruijinIndexOperand, t2
+
+ btiz t2, .done
+
+ loadp CodeBlock[cfr], t1
+ bineq CodeBlock::m_codeType[t1], FunctionCode, .loop
+ btbz CodeBlock::m_needsFullScopeChain[t1], .loop
+
+ loadi CodeBlock::m_activationRegister[t1], t1
+
+ # Need to conditionally skip over one scope.
+ bieq TagOffset[cfr, t1, 8], EmptyValueTag, .noActivation
+ scopeCheck(t0, t1)
+ loadp ScopeChainNode::next[t0], t0
+.noActivation:
+ subi 1, t2
+
+ btiz t2, .done
+.loop:
+ scopeCheck(t0, t1)
+ loadp ScopeChainNode::next[t0], t0
+ subi 1, t2
+ btinz t2, .loop
+
+.done:
+end
+
+_llint_op_resolve_global_dynamic:
+ traceExecution()
+ loadp JITStackFrame::globalData[sp], t3
+ loadp JSGlobalData::activationStructure[t3], t3
+ getScope(
+ 20[PC],
+ macro (scope, scratch)
+ loadp ScopeChainNode::object[scope], scratch
+ bpneq JSCell::m_structure[scratch], t3, .opResolveGlobalDynamicSuperSlow
+ end)
+ resolveGlobal(7, .opResolveGlobalDynamicSlow)
+ dispatch(7)
+
+.opResolveGlobalDynamicSuperSlow:
+ callSlowPath(_llint_slow_path_resolve_for_resolve_global_dynamic)
+ dispatch(7)
+
+.opResolveGlobalDynamicSlow:
+ callSlowPath(_llint_slow_path_resolve_global_dynamic)
+ dispatch(7)
+
+
+_llint_op_get_scoped_var:
+ traceExecution()
+ # Operands are as follows:
+ # 4[PC] Destination for the load.
+ # 8[PC] Index of register in the scope.
+ # 12[PC] De Bruijin index.
+ getScope(12[PC], macro (scope, scratch) end)
+ loadi 4[PC], t1
+ loadi 8[PC], t2
+ loadp ScopeChainNode::object[t0], t0
+ loadp JSVariableObject::m_registers[t0], t0
+ loadi TagOffset[t0, t2, 8], t3
+ loadi PayloadOffset[t0, t2, 8], t0
+ storei t3, TagOffset[cfr, t1, 8]
+ storei t0, PayloadOffset[cfr, t1, 8]
+ loadi 16[PC], t1
+ valueProfile(t3, t0, t1)
+ dispatch(5)
+
+
+_llint_op_put_scoped_var:
+ traceExecution()
+ getScope(8[PC], macro (scope, scratch) end)
+ loadi 12[PC], t1
+ loadConstantOrVariable(t1, t3, t2)
+ loadi 4[PC], t1
+ writeBarrier(t3, t2)
+ loadp ScopeChainNode::object[t0], t0
+ loadp JSVariableObject::m_registers[t0], t0
+ storei t3, TagOffset[t0, t1, 8]
+ storei t2, PayloadOffset[t0, t1, 8]
+ dispatch(4)
+
+
+_llint_op_get_global_var:
+ traceExecution()
+ loadi 8[PC], t1
+ loadi 4[PC], t3
+ loadp CodeBlock[cfr], t0
+ loadp CodeBlock::m_globalObject[t0], t0
+ loadp JSGlobalObject::m_registers[t0], t0
+ loadi TagOffset[t0, t1, 8], t2
+ loadi PayloadOffset[t0, t1, 8], t1
+ storei t2, TagOffset[cfr, t3, 8]
+ storei t1, PayloadOffset[cfr, t3, 8]
+ loadi 12[PC], t3
+ valueProfile(t2, t1, t3)
+ dispatch(4)
+
+
+_llint_op_put_global_var:
+ traceExecution()
+ loadi 8[PC], t1
+ loadp CodeBlock[cfr], t0
+ loadp CodeBlock::m_globalObject[t0], t0
+ loadp JSGlobalObject::m_registers[t0], t0
+ loadConstantOrVariable(t1, t2, t3)
+ loadi 4[PC], t1
+ writeBarrier(t2, t3)
+ storei t2, TagOffset[t0, t1, 8]
+ storei t3, PayloadOffset[t0, t1, 8]
+ dispatch(3)
+
+
+_llint_op_resolve_base:
+ traceExecution()
+ callSlowPath(_llint_slow_path_resolve_base)
+ dispatch(5)
+
+
+_llint_op_ensure_property_exists:
+ traceExecution()
+ callSlowPath(_llint_slow_path_ensure_property_exists)
+ dispatch(3)
+
+
+_llint_op_resolve_with_base:
+ traceExecution()
+ callSlowPath(_llint_slow_path_resolve_with_base)
+ dispatch(5)
+
+
+_llint_op_resolve_with_this:
+ traceExecution()
+ callSlowPath(_llint_slow_path_resolve_with_this)
+ dispatch(5)
+
+
+_llint_op_get_by_id:
+ traceExecution()
+ # We only do monomorphic get_by_id caching for now, and we do not modify the
+ # opcode. We do, however, allow for the cache to change anytime if fails, since
+ # ping-ponging is free. At best we get lucky and the get_by_id will continue
+ # to take fast path on the new cache. At worst we take slow path, which is what
+ # we would have been doing anyway.
+ loadi 8[PC], t0
+ loadi 16[PC], t1
+ loadConstantOrVariablePayload(t0, CellTag, t3, .opGetByIdSlow)
+ loadi 20[PC], t2
+ loadp JSObject::m_propertyStorage[t3], t0
+ bpneq JSCell::m_structure[t3], t1, .opGetByIdSlow
+ loadi 4[PC], t1
+ loadi TagOffset[t0, t2], t3
+ loadi PayloadOffset[t0, t2], t2
+ storei t3, TagOffset[cfr, t1, 8]
+ storei t2, PayloadOffset[cfr, t1, 8]
+ loadi 32[PC], t1
+ valueProfile(t3, t2, t1)
+ dispatch(9)
+
+.opGetByIdSlow:
+ callSlowPath(_llint_slow_path_get_by_id)
+ dispatch(9)
+
+
+_llint_op_get_arguments_length:
+ traceExecution()
+ loadi 8[PC], t0
+ loadi 4[PC], t1
+ bineq TagOffset[cfr, t0, 8], EmptyValueTag, .opGetArgumentsLengthSlow
+ loadi ArgumentCount + PayloadOffset[cfr], t2
+ subi 1, t2
+ storei Int32Tag, TagOffset[cfr, t1, 8]
+ storei t2, PayloadOffset[cfr, t1, 8]
+ dispatch(4)
+
+.opGetArgumentsLengthSlow:
+ callSlowPath(_llint_slow_path_get_arguments_length)
+ dispatch(4)
+
+
+_llint_op_put_by_id:
+ traceExecution()
+ loadi 4[PC], t3
+ loadi 16[PC], t1
+ loadConstantOrVariablePayload(t3, CellTag, t0, .opPutByIdSlow)
+ loadi 12[PC], t2
+ loadp JSObject::m_propertyStorage[t0], t3
+ bpneq JSCell::m_structure[t0], t1, .opPutByIdSlow
+ loadi 20[PC], t1
+ loadConstantOrVariable2Reg(t2, t0, t2)
+ writeBarrier(t0, t2)
+ storei t0, TagOffset[t3, t1]
+ storei t2, PayloadOffset[t3, t1]
+ dispatch(9)
+
+.opPutByIdSlow:
+ callSlowPath(_llint_slow_path_put_by_id)
+ dispatch(9)
+
+
+macro putByIdTransition(additionalChecks)
+ traceExecution()
+ loadi 4[PC], t3
+ loadi 16[PC], t1
+ loadConstantOrVariablePayload(t3, CellTag, t0, .opPutByIdSlow)
+ loadi 12[PC], t2
+ bpneq JSCell::m_structure[t0], t1, .opPutByIdSlow
+ additionalChecks(t1, t3, .opPutByIdSlow)
+ loadi 20[PC], t1
+ loadp JSObject::m_propertyStorage[t0], t3
+ addp t1, t3
+ loadConstantOrVariable2Reg(t2, t1, t2)
+ writeBarrier(t1, t2)
+ storei t1, TagOffset[t3]
+ loadi 24[PC], t1
+ storei t2, PayloadOffset[t3]
+ storep t1, JSCell::m_structure[t0]
+ dispatch(9)
+end
+
+_llint_op_put_by_id_transition_direct:
+ putByIdTransition(macro (oldStructure, scratch, slow) end)
+
+
+_llint_op_put_by_id_transition_normal:
+ putByIdTransition(
+ macro (oldStructure, scratch, slow)
+ const protoCell = oldStructure # Reusing the oldStructure register for the proto
+
+ loadp 28[PC], scratch
+ assert(macro (ok) btpnz scratch, ok end)
+ loadp StructureChain::m_vector[scratch], scratch
+ assert(macro (ok) btpnz scratch, ok end)
+ bieq Structure::m_prototype + TagOffset[oldStructure], NullTag, .done
+ .loop:
+ loadi Structure::m_prototype + PayloadOffset[oldStructure], protoCell
+ loadp JSCell::m_structure[protoCell], oldStructure
+ bpneq oldStructure, [scratch], slow
+ addp 4, scratch
+ bineq Structure::m_prototype + TagOffset[oldStructure], NullTag, .loop
+ .done:
+ end)
+
+
+_llint_op_del_by_id:
+ traceExecution()
+ callSlowPath(_llint_slow_path_del_by_id)
+ dispatch(4)
+
+
+_llint_op_get_by_val:
+ traceExecution()
+ loadp CodeBlock[cfr], t1
+ loadi 8[PC], t2
+ loadi 12[PC], t3
+ loadp CodeBlock::m_globalData[t1], t1
+ loadConstantOrVariablePayload(t2, CellTag, t0, .opGetByValSlow)
+ loadp JSGlobalData::jsArrayClassInfo[t1], t2
+ loadConstantOrVariablePayload(t3, Int32Tag, t1, .opGetByValSlow)
+ bpneq [t0], t2, .opGetByValSlow
+ loadp JSArray::m_storage[t0], t3
+ biaeq t1, JSArray::m_vectorLength[t0], .opGetByValSlow
+ loadi 4[PC], t0
+ loadi ArrayStorage::m_vector + TagOffset[t3, t1, 8], t2
+ loadi ArrayStorage::m_vector + PayloadOffset[t3, t1, 8], t1
+ bieq t2, EmptyValueTag, .opGetByValSlow
+ storei t2, TagOffset[cfr, t0, 8]
+ storei t1, PayloadOffset[cfr, t0, 8]
+ loadi 16[PC], t0
+ valueProfile(t2, t1, t0)
+ dispatch(5)
+
+.opGetByValSlow:
+ callSlowPath(_llint_slow_path_get_by_val)
+ dispatch(5)
+
+
+_llint_op_get_argument_by_val:
+ traceExecution()
+ loadi 8[PC], t0
+ loadi 12[PC], t1
+ bineq TagOffset[cfr, t0, 8], EmptyValueTag, .opGetArgumentByValSlow
+ loadConstantOrVariablePayload(t1, Int32Tag, t2, .opGetArgumentByValSlow)
+ addi 1, t2
+ loadi ArgumentCount + PayloadOffset[cfr], t1
+ biaeq t2, t1, .opGetArgumentByValSlow
+ negi t2
+ loadi 4[PC], t3
+ loadi ThisArgumentOffset + TagOffset[cfr, t2, 8], t0
+ loadi ThisArgumentOffset + PayloadOffset[cfr, t2, 8], t1
+ storei t0, TagOffset[cfr, t3, 8]
+ storei t1, PayloadOffset[cfr, t3, 8]
+ dispatch(5)
+
+.opGetArgumentByValSlow:
+ callSlowPath(_llint_slow_path_get_argument_by_val)
+ dispatch(5)
+
+
+_llint_op_get_by_pname:
+ traceExecution()
+ loadi 12[PC], t0
+ loadConstantOrVariablePayload(t0, CellTag, t1, .opGetByPnameSlow)
+ loadi 16[PC], t0
+ bpneq t1, PayloadOffset[cfr, t0, 8], .opGetByPnameSlow
+ loadi 8[PC], t0
+ loadConstantOrVariablePayload(t0, CellTag, t2, .opGetByPnameSlow)
+ loadi 20[PC], t0
+ loadi PayloadOffset[cfr, t0, 8], t3
+ loadp JSCell::m_structure[t2], t0
+ bpneq t0, JSPropertyNameIterator::m_cachedStructure[t3], .opGetByPnameSlow
+ loadi 24[PC], t0
+ loadi [cfr, t0, 8], t0
+ subi 1, t0
+ biaeq t0, JSPropertyNameIterator::m_numCacheableSlots[t3], .opGetByPnameSlow
+ loadp JSObject::m_propertyStorage[t2], t2
+ loadi TagOffset[t2, t0, 8], t1
+ loadi PayloadOffset[t2, t0, 8], t3
+ loadi 4[PC], t0
+ storei t1, TagOffset[cfr, t0, 8]
+ storei t3, PayloadOffset[cfr, t0, 8]
+ dispatch(7)
+
+.opGetByPnameSlow:
+ callSlowPath(_llint_slow_path_get_by_pname)
+ dispatch(7)
+
+
+_llint_op_put_by_val:
+ traceExecution()
+ loadi 4[PC], t0
+ loadConstantOrVariablePayload(t0, CellTag, t1, .opPutByValSlow)
+ loadi 8[PC], t0
+ loadConstantOrVariablePayload(t0, Int32Tag, t2, .opPutByValSlow)
+ loadp CodeBlock[cfr], t0
+ loadp CodeBlock::m_globalData[t0], t0
+ loadp JSGlobalData::jsArrayClassInfo[t0], t0
+ bpneq [t1], t0, .opPutByValSlow
+ biaeq t2, JSArray::m_vectorLength[t1], .opPutByValSlow
+ loadp JSArray::m_storage[t1], t0
+ bieq ArrayStorage::m_vector + TagOffset[t0, t2, 8], EmptyValueTag, .opPutByValEmpty
+.opPutByValStoreResult:
+ loadi 12[PC], t3
+ loadConstantOrVariable2Reg(t3, t1, t3)
+ writeBarrier(t1, t3)
+ storei t1, ArrayStorage::m_vector + TagOffset[t0, t2, 8]
+ storei t3, ArrayStorage::m_vector + PayloadOffset[t0, t2, 8]
+ dispatch(4)
+
+.opPutByValEmpty:
+ addi 1, ArrayStorage::m_numValuesInVector[t0]
+ bib t2, ArrayStorage::m_length[t0], .opPutByValStoreResult
+ addi 1, t2, t1
+ storei t1, ArrayStorage::m_length[t0]
+ jmp .opPutByValStoreResult
+
+.opPutByValSlow:
+ callSlowPath(_llint_slow_path_put_by_val)
+ dispatch(4)
+
+
+_llint_op_del_by_val:
+ traceExecution()
+ callSlowPath(_llint_slow_path_del_by_val)
+ dispatch(4)
+
+
+_llint_op_put_by_index:
+ traceExecution()
+ callSlowPath(_llint_slow_path_put_by_index)
+ dispatch(4)
+
+
+_llint_op_put_getter_setter:
+ traceExecution()
+ callSlowPath(_llint_slow_path_put_getter_setter)
+ dispatch(5)
+
+
+_llint_op_loop:
+ nop
+_llint_op_jmp:
+ traceExecution()
+ dispatchBranch(4[PC])
+
+
+_llint_op_jmp_scopes:
+ traceExecution()
+ callSlowPath(_llint_slow_path_jmp_scopes)
+ dispatch(0)
+
+
+macro jumpTrueOrFalse(conditionOp, slow)
+ loadi 4[PC], t1
+ loadConstantOrVariablePayload(t1, BooleanTag, t0, .slow)
+ conditionOp(t0, .target)
+ dispatch(3)
+
+.target:
+ dispatchBranch(8[PC])
+
+.slow:
+ callSlowPath(slow)
+ dispatch(0)
+end
+
+_llint_op_loop_if_true:
+ nop
+_llint_op_jtrue:
+ traceExecution()
+ jumpTrueOrFalse(
+ macro (value, target) btinz value, target end,
+ _llint_slow_path_jtrue)
+
+
+_llint_op_loop_if_false:
+ nop
+_llint_op_jfalse:
+ traceExecution()
+ jumpTrueOrFalse(
+ macro (value, target) btiz value, target end,
+ _llint_slow_path_jfalse)
+
+
+macro equalNull(cellHandler, immediateHandler)
+ loadi 4[PC], t0
+ loadi TagOffset[cfr, t0, 8], t1
+ loadi PayloadOffset[cfr, t0, 8], t0
+ bineq t1, CellTag, .immediate
+ loadp JSCell::m_structure[t0], t2
+ cellHandler(Structure::m_typeInfo + TypeInfo::m_flags[t2], .target)
+ dispatch(3)
+
+.target:
+ dispatchBranch(8[PC])
+
+.immediate:
+ ori 1, t1
+ immediateHandler(t1, .target)
+ dispatch(3)
+end
+
+_llint_op_jeq_null:
+ traceExecution()
+ equalNull(
+ macro (value, target) btbnz value, MasqueradesAsUndefined, target end,
+ macro (value, target) bieq value, NullTag, target end)
+
+
+_llint_op_jneq_null:
+ traceExecution()
+ equalNull(
+ macro (value, target) btbz value, MasqueradesAsUndefined, target end,
+ macro (value, target) bineq value, NullTag, target end)
+
+
+_llint_op_jneq_ptr:
+ traceExecution()
+ loadi 4[PC], t0
+ loadi 8[PC], t1
+ bineq TagOffset[cfr, t0, 8], CellTag, .opJneqPtrBranch
+ bpeq PayloadOffset[cfr, t0, 8], t1, .opJneqPtrFallThrough
+.opJneqPtrBranch:
+ dispatchBranch(12[PC])
+.opJneqPtrFallThrough:
+ dispatch(4)
+
+
+macro compare(integerCompare, doubleCompare, slow_path)
+ loadi 4[PC], t2
+ loadi 8[PC], t3
+ loadConstantOrVariable(t2, t0, t1)
+ loadConstantOrVariable2Reg(t3, t2, t3)
+ bineq t0, Int32Tag, .op1NotInt
+ bineq t2, Int32Tag, .op2NotInt
+ integerCompare(t1, t3, .jumpTarget)
+ dispatch(4)
+
+.op1NotInt:
+ bia t0, LowestTag, .slow
+ bib t2, LowestTag, .op1NotIntOp2Double
+ bineq t2, Int32Tag, .slow
+ ci2d t3, ft1
+ jmp .op1NotIntReady
+.op1NotIntOp2Double:
+ fii2d t3, t2, ft1
+.op1NotIntReady:
+ fii2d t1, t0, ft0
+ doubleCompare(ft0, ft1, .jumpTarget)
+ dispatch(4)
+
+.op2NotInt:
+ ci2d t1, ft0
+ bia t2, LowestTag, .slow
+ fii2d t3, t2, ft1
+ doubleCompare(ft0, ft1, .jumpTarget)
+ dispatch(4)
+
+.jumpTarget:
+ dispatchBranch(12[PC])
+
+.slow:
+ callSlowPath(slow_path)
+ dispatch(0)
+end
+
+_llint_op_loop_if_less:
+ nop
+_llint_op_jless:
+ traceExecution()
+ compare(
+ macro (left, right, target) bilt left, right, target end,
+ macro (left, right, target) bdlt left, right, target end,
+ _llint_slow_path_jless)
+
+
+_llint_op_jnless:
+ traceExecution()
+ compare(
+ macro (left, right, target) bigteq left, right, target end,
+ macro (left, right, target) bdgtequn left, right, target end,
+ _llint_slow_path_jnless)
+
+
+_llint_op_loop_if_greater:
+ nop
+_llint_op_jgreater:
+ traceExecution()
+ compare(
+ macro (left, right, target) bigt left, right, target end,
+ macro (left, right, target) bdgt left, right, target end,
+ _llint_slow_path_jgreater)
+
+
+_llint_op_jngreater:
+ traceExecution()
+ compare(
+ macro (left, right, target) bilteq left, right, target end,
+ macro (left, right, target) bdltequn left, right, target end,
+ _llint_slow_path_jngreater)
+
+
+_llint_op_loop_if_lesseq:
+ nop
+_llint_op_jlesseq:
+ traceExecution()
+ compare(
+ macro (left, right, target) bilteq left, right, target end,
+ macro (left, right, target) bdlteq left, right, target end,
+ _llint_slow_path_jlesseq)
+
+
+_llint_op_jnlesseq:
+ traceExecution()
+ compare(
+ macro (left, right, target) bigt left, right, target end,
+ macro (left, right, target) bdgtun left, right, target end,
+ _llint_slow_path_jnlesseq)
+
+
+_llint_op_loop_if_greatereq:
+ nop
+_llint_op_jgreatereq:
+ traceExecution()
+ compare(
+ macro (left, right, target) bigteq left, right, target end,
+ macro (left, right, target) bdgteq left, right, target end,
+ _llint_slow_path_jgreatereq)
+
+
+_llint_op_jngreatereq:
+ traceExecution()
+ compare(
+ macro (left, right, target) bilt left, right, target end,
+ macro (left, right, target) bdltun left, right, target end,
+ _llint_slow_path_jngreatereq)
+
+
+_llint_op_loop_hint:
+ traceExecution()
+ checkSwitchToJITForLoop()
+ dispatch(1)
+
+
+_llint_op_switch_imm:
+ traceExecution()
+ loadi 12[PC], t2
+ loadi 4[PC], t3
+ loadConstantOrVariable(t2, t1, t0)
+ loadp CodeBlock[cfr], t2
+ loadp CodeBlock::m_rareData[t2], t2
+ muli sizeof SimpleJumpTable, t3 # FIXME: would be nice to peephole this!
+ loadp CodeBlock::RareData::m_immediateSwitchJumpTables + VectorBufferOffset[t2], t2
+ addp t3, t2
+ bineq t1, Int32Tag, .opSwitchImmNotInt
+ subi SimpleJumpTable::min[t2], t0
+ biaeq t0, SimpleJumpTable::branchOffsets + VectorSizeOffset[t2], .opSwitchImmFallThrough
+ loadp SimpleJumpTable::branchOffsets + VectorBufferOffset[t2], t3
+ loadi [t3, t0, 4], t1
+ btiz t1, .opSwitchImmFallThrough
+ dispatchBranchWithOffset(t1)
+
+.opSwitchImmNotInt:
+ bib t1, LowestTag, .opSwitchImmSlow # Go to slow path if it's a double.
+.opSwitchImmFallThrough:
+ dispatchBranch(8[PC])
+
+.opSwitchImmSlow:
+ callSlowPath(_llint_slow_path_switch_imm)
+ dispatch(0)
+
+
+_llint_op_switch_char:
+ traceExecution()
+ loadi 12[PC], t2
+ loadi 4[PC], t3
+ loadConstantOrVariable(t2, t1, t0)
+ loadp CodeBlock[cfr], t2
+ loadp CodeBlock::m_rareData[t2], t2
+ muli sizeof SimpleJumpTable, t3
+ loadp CodeBlock::RareData::m_characterSwitchJumpTables + VectorBufferOffset[t2], t2
+ addp t3, t2
+ bineq t1, CellTag, .opSwitchCharFallThrough
+ loadp JSCell::m_structure[t0], t1
+ bbneq Structure::m_typeInfo + TypeInfo::m_type[t1], StringType, .opSwitchCharFallThrough
+ loadp JSString::m_value[t0], t0
+ bineq StringImpl::m_length[t0], 1, .opSwitchCharFallThrough
+ loadp StringImpl::m_data8[t0], t1
+ btinz StringImpl::m_hashAndFlags[t0], HashFlags8BitBuffer, .opSwitchChar8Bit
+ loadh [t1], t0
+ jmp .opSwitchCharReady
+.opSwitchChar8Bit:
+ loadb [t1], t0
+.opSwitchCharReady:
+ subi SimpleJumpTable::min[t2], t0
+ biaeq t0, SimpleJumpTable::branchOffsets + VectorSizeOffset[t2], .opSwitchCharFallThrough
+ loadp SimpleJumpTable::branchOffsets + VectorBufferOffset[t2], t2
+ loadi [t2, t0, 4], t1
+ btiz t1, .opSwitchImmFallThrough
+ dispatchBranchWithOffset(t1)
+
+.opSwitchCharFallThrough:
+ dispatchBranch(8[PC])
+
+
+_llint_op_switch_string:
+ traceExecution()
+ callSlowPath(_llint_slow_path_switch_string)
+ dispatch(0)
+
+
+_llint_op_new_func:
+ traceExecution()
+ btiz 12[PC], .opNewFuncUnchecked
+ loadi 4[PC], t1
+ bineq TagOffset[cfr, t1, 8], EmptyValueTag, .opNewFuncDone
+.opNewFuncUnchecked:
+ callSlowPath(_llint_slow_path_new_func)
+.opNewFuncDone:
+ dispatch(4)
+
+
+_llint_op_new_func_exp:
+ traceExecution()
+ callSlowPath(_llint_slow_path_new_func_exp)
+ dispatch(3)
+
+
+macro doCall(slow_path)
+ loadi 4[PC], t0
+ loadi 16[PC], t1
+ loadp LLIntCallLinkInfo::callee[t1], t2
+ loadConstantOrVariablePayload(t0, CellTag, t3, .opCallSlow)
+ bineq t3, t2, .opCallSlow
+ loadi 12[PC], t3
+ addp 24, PC
+ lshifti 3, t3
+ addp cfr, t3 # t3 contains the new value of cfr
+ loadp JSFunction::m_scopeChain[t2], t0
+ storei t2, Callee + PayloadOffset[t3]
+ storei t0, ScopeChain + PayloadOffset[t3]
+ loadi 8 - 24[PC], t2
+ storei PC, ArgumentCount + TagOffset[cfr]
+ storep cfr, CallerFrame[t3]
+ storei t2, ArgumentCount + PayloadOffset[t3]
+ storei CellTag, Callee + TagOffset[t3]
+ storei CellTag, ScopeChain + TagOffset[t3]
+ move t3, cfr
+ call LLIntCallLinkInfo::machineCodeTarget[t1]
+ dispatchAfterCall()
+
+.opCallSlow:
+ slowPathForCall(6, slow_path)
+end
+
+_llint_op_call:
+ traceExecution()
+ doCall(_llint_slow_path_call)
+
+
+_llint_op_construct:
+ traceExecution()
+ doCall(_llint_slow_path_construct)
+
+
+_llint_op_call_varargs:
+ traceExecution()
+ slowPathForCall(6, _llint_slow_path_call_varargs)
+
+
+_llint_op_call_eval:
+ traceExecution()
+
+ # Eval is executed in one of two modes:
+ #
+ # 1) We find that we're really invoking eval() in which case the
+ # execution is perfomed entirely inside the slow_path, and it
+ # returns the PC of a function that just returns the return value
+ # that the eval returned.
+ #
+ # 2) We find that we're invoking something called eval() that is not
+ # the real eval. Then the slow_path returns the PC of the thing to
+ # call, and we call it.
+ #
+ # This allows us to handle two cases, which would require a total of
+ # up to four pieces of state that cannot be easily packed into two
+ # registers (C functions can return up to two registers, easily):
+ #
+ # - The call frame register. This may or may not have been modified
+ # by the slow_path, but the convention is that it returns it. It's not
+ # totally clear if that's necessary, since the cfr is callee save.
+ # But that's our style in this here interpreter so we stick with it.
+ #
+ # - A bit to say if the slow_path successfully executed the eval and has
+ # the return value, or did not execute the eval but has a PC for us
+ # to call.
+ #
+ # - Either:
+ # - The JS return value (two registers), or
+ #
+ # - The PC to call.
+ #
+ # It turns out to be easier to just always have this return the cfr
+ # and a PC to call, and that PC may be a dummy thunk that just
+ # returns the JS value that the eval returned.
+
+ slowPathForCall(4, _llint_slow_path_call_eval)
+
+
+_llint_generic_return_point:
+ dispatchAfterCall()
+
+
+_llint_op_tear_off_activation:
+ traceExecution()
+ loadi 4[PC], t0
+ loadi 8[PC], t1
+ bineq TagOffset[cfr, t0, 8], EmptyValueTag, .opTearOffActivationCreated
+ bieq TagOffset[cfr, t1, 8], EmptyValueTag, .opTearOffActivationNotCreated
+.opTearOffActivationCreated:
+ callSlowPath(_llint_slow_path_tear_off_activation)
+.opTearOffActivationNotCreated:
+ dispatch(3)
+
+
+_llint_op_tear_off_arguments:
+ traceExecution()
+ loadi 4[PC], t0
+ subi 1, t0 # Get the unmodifiedArgumentsRegister
+ bieq TagOffset[cfr, t0, 8], EmptyValueTag, .opTearOffArgumentsNotCreated
+ callSlowPath(_llint_slow_path_tear_off_arguments)
+.opTearOffArgumentsNotCreated:
+ dispatch(2)
+
+
+macro doReturn()
+ loadp ReturnPC[cfr], t2
+ loadp CallerFrame[cfr], cfr
+ restoreReturnAddressBeforeReturn(t2)
+ ret
+end
+
+_llint_op_ret:
+ traceExecution()
+ checkSwitchToJITForEpilogue()
+ loadi 4[PC], t2
+ loadConstantOrVariable(t2, t1, t0)
+ doReturn()
+
+
+_llint_op_call_put_result:
+ loadi 4[PC], t2
+ loadi 8[PC], t3
+ storei t1, TagOffset[cfr, t2, 8]
+ storei t0, PayloadOffset[cfr, t2, 8]
+ valueProfile(t1, t0, t3)
+ traceExecution() # Needs to be here because it would clobber t1, t0
+ dispatch(3)
+
+
+_llint_op_ret_object_or_this:
+ traceExecution()
+ checkSwitchToJITForEpilogue()
+ loadi 4[PC], t2
+ loadConstantOrVariable(t2, t1, t0)
+ bineq t1, CellTag, .opRetObjectOrThisNotObject
+ loadp JSCell::m_structure[t0], t2
+ bbb Structure::m_typeInfo + TypeInfo::m_type[t2], ObjectType, .opRetObjectOrThisNotObject
+ doReturn()
+
+.opRetObjectOrThisNotObject:
+ loadi 8[PC], t2
+ loadConstantOrVariable(t2, t1, t0)
+ doReturn()
+
+
+_llint_op_method_check:
+ traceExecution()
+ # We ignore method checks and use normal get_by_id optimizations.
+ dispatch(1)
+
+
+_llint_op_strcat:
+ traceExecution()
+ callSlowPath(_llint_slow_path_strcat)
+ dispatch(4)
+
+
+_llint_op_to_primitive:
+ traceExecution()
+ loadi 8[PC], t2
+ loadi 4[PC], t3
+ loadConstantOrVariable(t2, t1, t0)
+ bineq t1, CellTag, .opToPrimitiveIsImm
+ loadp JSCell::m_structure[t0], t2
+ bbneq Structure::m_typeInfo + TypeInfo::m_type[t2], StringType, .opToPrimitiveSlowCase
+.opToPrimitiveIsImm:
+ storei t1, TagOffset[cfr, t3, 8]
+ storei t0, PayloadOffset[cfr, t3, 8]
+ dispatch(3)
+
+.opToPrimitiveSlowCase:
+ callSlowPath(_llint_slow_path_to_primitive)
+ dispatch(3)
+
+
+_llint_op_get_pnames:
+ traceExecution()
+ callSlowPath(_llint_slow_path_get_pnames)
+ dispatch(0) # The slow_path either advances the PC or jumps us to somewhere else.
+
+
+_llint_op_next_pname:
+ traceExecution()
+ loadi 12[PC], t1
+ loadi 16[PC], t2
+ loadi PayloadOffset[cfr, t1, 8], t0
+ bieq t0, PayloadOffset[cfr, t2, 8], .opNextPnameEnd
+ loadi 20[PC], t2
+ loadi PayloadOffset[cfr, t2, 8], t2
+ loadp JSPropertyNameIterator::m_jsStrings[t2], t3
+ loadi [t3, t0, 8], t3
+ addi 1, t0
+ storei t0, PayloadOffset[cfr, t1, 8]
+ loadi 4[PC], t1
+ storei CellTag, TagOffset[cfr, t1, 8]
+ storei t3, PayloadOffset[cfr, t1, 8]
+ loadi 8[PC], t3
+ loadi PayloadOffset[cfr, t3, 8], t3
+ loadp JSCell::m_structure[t3], t1
+ bpneq t1, JSPropertyNameIterator::m_cachedStructure[t2], .opNextPnameSlow
+ loadp JSPropertyNameIterator::m_cachedPrototypeChain[t2], t0
+ loadp StructureChain::m_vector[t0], t0
+ btpz [t0], .opNextPnameTarget
+.opNextPnameCheckPrototypeLoop:
+ bieq Structure::m_prototype + TagOffset[t1], NullTag, .opNextPnameSlow
+ loadp Structure::m_prototype + PayloadOffset[t1], t2
+ loadp JSCell::m_structure[t2], t1
+ bpneq t1, [t0], .opNextPnameSlow
+ addp 4, t0
+ btpnz [t0], .opNextPnameCheckPrototypeLoop
+.opNextPnameTarget:
+ dispatchBranch(24[PC])
+
+.opNextPnameEnd:
+ dispatch(7)
+
+.opNextPnameSlow:
+ callSlowPath(_llint_slow_path_next_pname) # This either keeps the PC where it was (causing us to loop) or sets it to target.
+ dispatch(0)
+
+
+_llint_op_push_scope:
+ traceExecution()
+ callSlowPath(_llint_slow_path_push_scope)
+ dispatch(2)
+
+
+_llint_op_pop_scope:
+ traceExecution()
+ callSlowPath(_llint_slow_path_pop_scope)
+ dispatch(1)
+
+
+_llint_op_push_new_scope:
+ traceExecution()
+ callSlowPath(_llint_slow_path_push_new_scope)
+ dispatch(4)
+
+
+_llint_op_catch:
+ # This is where we end up from the JIT's throw trampoline (because the
+ # machine code return address will be set to _llint_op_catch), and from
+ # the interpreter's throw trampoline (see _llint_throw_trampoline).
+ # The JIT throwing protocol calls for the cfr to be in t0. The throwing
+ # code must have known that we were throwing to the interpreter, and have
+ # set JSGlobalData::targetInterpreterPCForThrow.
+ move t0, cfr
+ loadp JITStackFrame::globalData[sp], t3
+ loadi JSGlobalData::targetInterpreterPCForThrow[t3], PC
+ loadi JSGlobalData::exception + PayloadOffset[t3], t0
+ loadi JSGlobalData::exception + TagOffset[t3], t1
+ storei 0, JSGlobalData::exception + PayloadOffset[t3]
+ storei EmptyValueTag, JSGlobalData::exception + TagOffset[t3]
+ loadi 4[PC], t2
+ storei t0, PayloadOffset[cfr, t2, 8]
+ storei t1, TagOffset[cfr, t2, 8]
+ traceExecution() # This needs to be here because we don't want to clobber t0, t1, t2, t3 above.
+ dispatch(2)
+
+
+_llint_op_throw:
+ traceExecution()
+ callSlowPath(_llint_slow_path_throw)
+ dispatch(2)
+
+
+_llint_op_throw_reference_error:
+ traceExecution()
+ callSlowPath(_llint_slow_path_throw_reference_error)
+ dispatch(2)
+
+
+_llint_op_jsr:
+ traceExecution()
+ loadi 4[PC], t0
+ addi 3 * 4, PC, t1
+ storei t1, [cfr, t0, 8]
+ dispatchBranch(8[PC])
+
+
+_llint_op_sret:
+ traceExecution()
+ loadi 4[PC], t0
+ loadp [cfr, t0, 8], PC
+ dispatch(0)
+
+
+_llint_op_debug:
+ traceExecution()
+ callSlowPath(_llint_slow_path_debug)
+ dispatch(4)
+
+
+_llint_op_profile_will_call:
+ traceExecution()
+ loadp JITStackFrame::enabledProfilerReference[sp], t0
+ btpz [t0], .opProfileWillCallDone
+ callSlowPath(_llint_slow_path_profile_will_call)
+.opProfileWillCallDone:
+ dispatch(2)
+
+
+_llint_op_profile_did_call:
+ traceExecution()
+ loadp JITStackFrame::enabledProfilerReference[sp], t0
+ btpz [t0], .opProfileWillCallDone
+ callSlowPath(_llint_slow_path_profile_did_call)
+.opProfileDidCallDone:
+ dispatch(2)
+
+
+_llint_op_end:
+ traceExecution()
+ checkSwitchToJITForEpilogue()
+ loadi 4[PC], t0
+ loadi TagOffset[cfr, t0, 8], t1
+ loadi PayloadOffset[cfr, t0, 8], t0
+ doReturn()
+
+
+_llint_throw_from_slow_path_trampoline:
+ # When throwing from the interpreter (i.e. throwing from LLIntSlowPaths), so
+ # the throw target is not necessarily interpreted code, we come to here.
+ # This essentially emulates the JIT's throwing protocol.
+ loadp JITStackFrame::globalData[sp], t1
+ loadp JSGlobalData::callFrameForThrow[t1], t0
+ jmp JSGlobalData::targetMachinePCForThrow[t1]
+
+
+_llint_throw_during_call_trampoline:
+ preserveReturnAddressAfterCall(t2)
+ loadp JITStackFrame::globalData[sp], t1
+ loadp JSGlobalData::callFrameForThrow[t1], t0
+ jmp JSGlobalData::targetMachinePCForThrow[t1]
+
+
+# Lastly, make sure that we can link even though we don't support all opcodes.
+# These opcodes should never arise when using LLInt or either JIT. We assert
+# as much.
+
+macro notSupported()
+ if ASSERT_ENABLED
+ crash()
+ else
+ # We should use whatever the smallest possible instruction is, just to
+ # ensure that there is a gap between instruction labels. If multiple
+ # smallest instructions exist, we should pick the one that is most
+ # likely result in execution being halted. Currently that is the break
+ # instruction on all architectures we're interested in. (Break is int3
+ # on Intel, which is 1 byte, and bkpt on ARMv7, which is 2 bytes.)
+ break
+ end
+end
+
+_llint_op_get_array_length:
+ notSupported()
+
+_llint_op_get_by_id_chain:
+ notSupported()
+
+_llint_op_get_by_id_custom_chain:
+ notSupported()
+
+_llint_op_get_by_id_custom_proto:
+ notSupported()
+
+_llint_op_get_by_id_custom_self:
+ notSupported()
+
+_llint_op_get_by_id_generic:
+ notSupported()
+
+_llint_op_get_by_id_getter_chain:
+ notSupported()
+
+_llint_op_get_by_id_getter_proto:
+ notSupported()
+
+_llint_op_get_by_id_getter_self:
+ notSupported()
+
+_llint_op_get_by_id_proto:
+ notSupported()
+
+_llint_op_get_by_id_self:
+ notSupported()
+
+_llint_op_get_string_length:
+ notSupported()
+
+_llint_op_put_by_id_generic:
+ notSupported()
+
+_llint_op_put_by_id_replace:
+ notSupported()
+
+_llint_op_put_by_id_transition:
+ notSupported()
+
+
+# Indicate the end of LLInt.
+_llint_end:
+ crash()
+
diff --git a/Source/JavaScriptCore/llint/LowLevelInterpreter.cpp b/Source/JavaScriptCore/llint/LowLevelInterpreter.cpp
new file mode 100644
index 000000000..b95a50082
--- /dev/null
+++ b/Source/JavaScriptCore/llint/LowLevelInterpreter.cpp
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "LowLevelInterpreter.h"
+
+#if ENABLE(LLINT)
+
+#include "LLIntOfflineAsmConfig.h"
+#include <wtf/InlineASM.h>
+
+// This is a file generated by offlineasm, which contains all of the assembly code
+// for the interpreter, as compiled from LowLevelInterpreter.asm.
+#include "LLIntAssembly.h"
+
+#endif // ENABLE(LLINT)
diff --git a/Source/JavaScriptCore/llint/LowLevelInterpreter.h b/Source/JavaScriptCore/llint/LowLevelInterpreter.h
new file mode 100644
index 000000000..e5a54a45d
--- /dev/null
+++ b/Source/JavaScriptCore/llint/LowLevelInterpreter.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef LowLevelInterpreter_h
+#define LowLevelInterpreter_h
+
+#include <wtf/Platform.h>
+
+#if ENABLE(LLINT)
+
+#include "Opcode.h"
+
+#define LLINT_INSTRUCTION_DECL(opcode, length) extern "C" void llint_##opcode();
+ FOR_EACH_OPCODE_ID(LLINT_INSTRUCTION_DECL);
+#undef LLINT_INSTRUCTION_DECL
+
+extern "C" void llint_begin();
+extern "C" void llint_end();
+extern "C" void llint_program_prologue();
+extern "C" void llint_eval_prologue();
+extern "C" void llint_function_for_call_prologue();
+extern "C" void llint_function_for_construct_prologue();
+extern "C" void llint_function_for_call_arity_check();
+extern "C" void llint_function_for_construct_arity_check();
+extern "C" void llint_generic_return_point();
+extern "C" void llint_throw_from_slow_path_trampoline();
+extern "C" void llint_throw_during_call_trampoline();
+
+#endif // ENABLE(LLINT)
+
+#endif // LowLevelInterpreter_h