summaryrefslogtreecommitdiff
path: root/Source/JavaScriptCore/interpreter
diff options
context:
space:
mode:
Diffstat (limited to 'Source/JavaScriptCore/interpreter')
-rw-r--r--Source/JavaScriptCore/interpreter/CallFrame.h31
-rw-r--r--Source/JavaScriptCore/interpreter/CallFrameClosure.h1
-rw-r--r--Source/JavaScriptCore/interpreter/Interpreter.cpp534
-rw-r--r--Source/JavaScriptCore/interpreter/Interpreter.h41
-rw-r--r--Source/JavaScriptCore/interpreter/JSStack.cpp52
-rw-r--r--Source/JavaScriptCore/interpreter/JSStack.h60
-rw-r--r--Source/JavaScriptCore/interpreter/JSStackInlines.h232
7 files changed, 670 insertions, 281 deletions
diff --git a/Source/JavaScriptCore/interpreter/CallFrame.h b/Source/JavaScriptCore/interpreter/CallFrame.h
index 7aa49a9b0..2b0ea3aac 100644
--- a/Source/JavaScriptCore/interpreter/CallFrame.h
+++ b/Source/JavaScriptCore/interpreter/CallFrame.h
@@ -256,6 +256,7 @@ namespace JSC {
CodeBlock* someCodeBlockForPossiblyInlinedCode() { return codeBlock(); }
#endif
+ CallFrame* callerFrameNoFlags() { return callerFrame()->removeHostCallFrameFlag(); }
// Call this to get the true call frame (accounted for inlining and any
// other optimizations), when you have entered into VM code through one
@@ -281,6 +282,36 @@ namespace JSC {
ExecState();
~ExecState();
+ // The following are for internal use in debugging and verification
+ // code only and not meant as an API for general usage:
+
+ size_t argIndexForRegister(Register* reg)
+ {
+ // The register at 'offset' number of slots from the frame pointer
+ // i.e.
+ // reg = frame[offset];
+ // ==> reg = frame + offset;
+ // ==> offset = reg - frame;
+ int offset = reg - this->registers();
+
+ // The offset is defined (based on argumentOffset()) to be:
+ // offset = s_firstArgumentOffset - argIndex;
+ // Hence:
+ // argIndex = s_firstArgumentOffset - offset;
+ size_t argIndex = s_firstArgumentOffset - offset;
+ return argIndex;
+ }
+
+ JSValue getArgumentUnsafe(size_t argIndex)
+ {
+ // User beware! This method does not verify that there is a valid
+ // argument at the specified argIndex. This is used for debugging
+ // and verification code only. The caller is expected to know what
+ // he/she is doing when calling this method.
+ return this[argumentOffset(argIndex)].jsValue();
+ }
+
+ friend class JSStack;
friend class VMInspector;
};
diff --git a/Source/JavaScriptCore/interpreter/CallFrameClosure.h b/Source/JavaScriptCore/interpreter/CallFrameClosure.h
index 157d1b3b9..010c9655b 100644
--- a/Source/JavaScriptCore/interpreter/CallFrameClosure.h
+++ b/Source/JavaScriptCore/interpreter/CallFrameClosure.h
@@ -34,7 +34,6 @@ struct CallFrameClosure {
JSFunction* function;
FunctionExecutable* functionExecutable;
JSGlobalData* globalData;
- Register* oldEnd;
JSScope* scope;
int parameterCountIncludingThis;
int argumentCountIncludingThis;
diff --git a/Source/JavaScriptCore/interpreter/Interpreter.cpp b/Source/JavaScriptCore/interpreter/Interpreter.cpp
index 8c09019da..0d475b416 100644
--- a/Source/JavaScriptCore/interpreter/Interpreter.cpp
+++ b/Source/JavaScriptCore/interpreter/Interpreter.cpp
@@ -48,6 +48,7 @@
#include "JSNameScope.h"
#include "JSNotAnObject.h"
#include "JSPropertyNameIterator.h"
+#include "JSStackInlines.h"
#include "JSString.h"
#include "JSWithScope.h"
#include "LLIntCLoop.h"
@@ -67,6 +68,7 @@
#include <stdio.h>
#include <wtf/StackStats.h>
#include <wtf/Threading.h>
+#include <wtf/WTFThreadData.h>
#include <wtf/text/StringBuilder.h>
#if ENABLE(JIT)
@@ -79,41 +81,131 @@ using namespace std;
namespace JSC {
-static CallFrame* getCallerInfo(JSGlobalData*, CallFrame*, int& lineNumber, unsigned& bytecodeOffset);
-
-// Returns the depth of the scope chain within a given call frame.
-static int depth(CodeBlock* codeBlock, JSScope* sc)
+Interpreter::ErrorHandlingMode::ErrorHandlingMode(ExecState *exec)
+ : m_interpreter(*exec->interpreter())
{
- if (!codeBlock->needsFullScopeChain())
- return 0;
- return sc->localDepth();
+ if (!m_interpreter.m_errorHandlingModeReentry)
+ m_interpreter.stack().enableErrorStackReserve();
+ m_interpreter.m_errorHandlingModeReentry++;
}
-ALWAYS_INLINE CallFrame* Interpreter::slideRegisterWindowForCall(CodeBlock* newCodeBlock, JSStack* stack, CallFrame* callFrame, size_t registerOffset, int argumentCountIncludingThis)
+Interpreter::ErrorHandlingMode::~ErrorHandlingMode()
{
- // This ensures enough space for the worst case scenario of zero arguments passed by the caller.
- if (!stack->grow(callFrame->registers() + registerOffset + newCodeBlock->numParameters() + newCodeBlock->m_numCalleeRegisters))
- return 0;
+ m_interpreter.m_errorHandlingModeReentry--;
+ ASSERT(m_interpreter.m_errorHandlingModeReentry >= 0);
+ if (!m_interpreter.m_errorHandlingModeReentry)
+ m_interpreter.stack().disableErrorStackReserve();
+}
- if (argumentCountIncludingThis >= newCodeBlock->numParameters()) {
- Register* newCallFrame = callFrame->registers() + registerOffset;
- return CallFrame::create(newCallFrame);
- }
- // Too few arguments -- copy arguments, then fill in missing arguments with undefined.
- size_t delta = newCodeBlock->numParameters() - argumentCountIncludingThis;
- CallFrame* newCallFrame = CallFrame::create(callFrame->registers() + registerOffset + delta);
+// The Interpreter::StackPolicy class is used to compute a stack capacity
+// requirement to ensure that we have enough room on the native stack for:
+// 1. the max cummulative stack used by the interpreter and all code
+// paths sub of it up till leaf functions.
+// 2. the max cummulative stack used by the interpreter before it reaches
+// the next checkpoint (execute...() function) in the interpreter.
+//
+// The interpreter can be run on different threads and hence, different
+// native stacks (with different sizes) before exiting out of the first
+// frame. Hence, the required capacity needs to be re-computed on every
+// entry into the interpreter.
+//
+// Currently the requiredStack is computed based on a policy. See comments
+// in StackPolicy::StackPolicy() for details.
+
+Interpreter::StackPolicy::StackPolicy(Interpreter& interpreter, const StackBounds& stack)
+ : m_interpreter(interpreter)
+{
+ int size = stack.size();
+
+ const int DEFAULT_REQUIRED_STACK = 1024 * 1024;
+ const int DEFAULT_MINIMUM_USEABLE_STACK = 128 * 1024;
+ const int DEFAULT_ERROR_MODE_REQUIRED_STACK = 32 * 1024;
+
+ // Here's the policy in a nutshell:
+ //
+ // 1. If we have a large stack, let JS use as much stack as possible
+ // but require that we have at least DEFAULT_REQUIRED_STACK capacity
+ // remaining on the stack:
+ //
+ // stack grows this way -->
+ // ---------------------------------------------------------
+ // | ... | <-- DEFAULT_REQUIRED_STACK --> | ...
+ // ---------------------------------------------------------
+ // ^ ^
+ // start current sp
+ //
+ // 2. In event that we're re-entering the interpreter to handle
+ // exceptions (in error mode), we'll be a little more generous and
+ // require less stack capacity for the interpreter to be re-entered.
+ //
+ // This is needed because we may have just detected an eminent stack
+ // overflow based on the normally computed required stack capacity.
+ // However, the normal required capacity far exceeds what is needed
+ // for exception handling work. Hence, in error mode, we only require
+ // DEFAULT_ERROR_MODE_REQUIRED_STACK capacity.
+ //
+ // stack grows this way -->
+ // -----------------------------------------------------------------
+ // | ... | <-- DEFAULT_ERROR_MODE_REQUIRED_STACK --> | ...
+ // -----------------------------------------------------------------
+ // ^ ^
+ // start current sp
+ //
+ // This smaller requried capacity also means that we won't re-trigger
+ // a stack overflow for processing the exception caused by the original
+ // StackOverflowError.
+ //
+ // 3. If the stack is not large enough, give JS at least a minimum
+ // amount of useable stack:
+ //
+ // stack grows this way -->
+ // --------------------------------------------------------------------
+ // | <-- DEFAULT_MINIMUM_USEABLE_STACK --> | <-- requiredCapacity --> |
+ // --------------------------------------------------------------------
+ // ^ ^
+ // start current sp
+ //
+ // The minimum useable capacity is DEFAULT_MINIMUM_USEABLE_STACK.
+ // In this case, the requiredCapacity is whatever is left of the
+ // total stack capacity after we have give JS its minimum stack
+ // i.e. requiredCapcity can even be 0 if there's not enough stack.
+
+
+ // Policy 1: Normal mode: required = DEFAULT_REQUIRED_STACK.
+ // Policy 2: Erro mode: required = DEFAULT_ERROR_MODE_REQUIRED_STACK.
+ int requiredCapacity = !m_interpreter.m_errorHandlingModeReentry ?
+ DEFAULT_REQUIRED_STACK : DEFAULT_ERROR_MODE_REQUIRED_STACK;
+
+ int useableStack = size - requiredCapacity;
+
+ // Policy 3: Ensure the useable stack is not too small:
+ if (useableStack < DEFAULT_MINIMUM_USEABLE_STACK)
+ useableStack = DEFAULT_MINIMUM_USEABLE_STACK;
+
+ // Sanity check: Make sure we do not use more space than the stack's
+ // total capacity:
+ if (useableStack > size)
+ useableStack = size;
+
+ // Re-compute the requiredCapacity based on the adjusted useable stack
+ // size:
+ // interpreter stack checks:
+ requiredCapacity = size - useableStack;
+ ASSERT((requiredCapacity >= 0) && (requiredCapacity < size));
+
+ m_requiredCapacity = requiredCapacity;
+}
- Register* dst = &newCallFrame->uncheckedR(CallFrame::thisArgumentOffset());
- Register* end = dst - argumentCountIncludingThis;
- for ( ; dst != end; --dst)
- *dst = *(dst - delta);
- end -= delta;
- for ( ; dst != end; --dst)
- *dst = jsUndefined();
+static CallFrame* getCallerInfo(JSGlobalData*, CallFrame*, int& lineNumber, unsigned& bytecodeOffset);
- return newCallFrame;
+// Returns the depth of the scope chain within a given call frame.
+static int depth(CodeBlock* codeBlock, JSScope* sc)
+{
+ if (!codeBlock->needsFullScopeChain())
+ return 0;
+ return sc->localDepth();
}
JSValue eval(CallFrame* callFrame)
@@ -164,7 +256,7 @@ JSValue eval(CallFrame* callFrame)
JSValue thisValue = callerFrame->thisValue();
ASSERT(isValidThisObject(thisValue, callFrame));
Interpreter* interpreter = callFrame->globalData().interpreter;
- return interpreter->execute(eval, callFrame, thisValue, callerScopeChain, callFrame->registers() - interpreter->stack().begin() + 1 + JSStack::CallFrameHeaderSize);
+ return interpreter->execute(eval, callFrame, thisValue, callerScopeChain);
}
CallFrame* loadVarargs(CallFrame* callFrame, JSStack* stack, JSValue thisValue, JSValue arguments, int firstFreeRegister)
@@ -245,9 +337,10 @@ CallFrame* loadVarargs(CallFrame* callFrame, JSStack* stack, JSValue thisValue,
return newCallFrame;
}
-Interpreter::Interpreter()
+Interpreter::Interpreter(JSGlobalData& globalData)
: m_sampleEntryDepth(0)
- , m_reentryDepth(0)
+ , m_stack(globalData)
+ , m_errorHandlingModeReentry(0)
#if !ASSERT_DISABLED
, m_initialized(false)
#endif
@@ -681,26 +774,13 @@ NEVER_INLINE HandlerInfo* Interpreter::throwException(CallFrame*& callFrame, JSV
if (!unwindCallFrame(callFrame, exceptionValue, bytecodeOffset, codeBlock)) {
if (Profiler* profiler = callFrame->globalData().enabledProfiler())
profiler->exceptionUnwind(callFrame);
- callFrame->globalData().topCallFrame = callFrame;
return 0;
}
}
- callFrame->globalData().topCallFrame = callFrame;
if (Profiler* profiler = callFrame->globalData().enabledProfiler())
profiler->exceptionUnwind(callFrame);
- // Shrink the JS stack, in case stack overflow made it huge.
- Register* highWaterMark = 0;
- for (CallFrame* callerFrame = callFrame; callerFrame; callerFrame = callerFrame->callerFrame()->removeHostCallFrameFlag()) {
- CodeBlock* codeBlock = callerFrame->codeBlock();
- if (!codeBlock)
- continue;
- Register* callerHighWaterMark = callerFrame->registers() + codeBlock->m_numCalleeRegisters;
- highWaterMark = max(highWaterMark, callerHighWaterMark);
- }
- m_stack.shrink(highWaterMark);
-
// Unwind the scope chain within the exception handler's call frame.
JSScope* scope = callFrame->scope();
int scopeDelta = 0;
@@ -747,20 +827,24 @@ JSValue Interpreter::execute(ProgramExecutable* program, CallFrame* callFrame, J
SamplingScope samplingScope(this);
JSScope* scope = callFrame->scope();
+ JSGlobalData& globalData = *scope->globalData();
+
ASSERT(isValidThisObject(thisObj, callFrame));
- ASSERT(!scope->globalData()->exception);
- ASSERT(!callFrame->globalData().isCollectorBusy());
- if (callFrame->globalData().isCollectorBusy())
+ ASSERT(!globalData.exception);
+ ASSERT(!globalData.isCollectorBusy());
+ if (globalData.isCollectorBusy())
CRASH();
StackStats::CheckPoint stackCheckPoint;
- if (m_reentryDepth >= MaxSmallThreadReentryDepth && m_reentryDepth >= callFrame->globalData().maxReentryDepth)
+ const StackBounds& nativeStack = wtfThreadData().stack();
+ StackPolicy policy(*this, nativeStack);
+ if (!nativeStack.isSafeToRecurse(policy.requiredCapacity()))
return checkedReturn(throwStackOverflowError(callFrame));
// First check if the "program" is actually just a JSON object. If so,
// we'll handle the JSON object here. Else, we'll handle real JS code
// below at failedJSONP.
- DynamicGlobalObjectScope globalObjectScope(*scope->globalData(), scope->globalObject());
+ DynamicGlobalObjectScope globalObjectScope(globalData, scope->globalObject());
Vector<JSONPData> JSONPData;
bool parseResult;
const String programSource = program->source().toString();
@@ -869,20 +953,16 @@ failedJSONP:
return checkedReturn(throwError(callFrame, error));
CodeBlock* codeBlock = &program->generatedBytecode();
- // Reserve stack space for this invocation:
- Register* oldEnd = m_stack.end();
- Register* newEnd = oldEnd + codeBlock->numParameters() + JSStack::CallFrameHeaderSize + codeBlock->m_numCalleeRegisters;
- if (!m_stack.grow(newEnd))
- return checkedReturn(throwStackOverflowError(callFrame));
-
// Push the call frame for this invocation:
- CallFrame* newCallFrame = CallFrame::create(oldEnd + codeBlock->numParameters() + JSStack::CallFrameHeaderSize);
ASSERT(codeBlock->numParameters() == 1); // 1 parameter for 'this'.
- newCallFrame->init(codeBlock, 0, scope, CallFrame::noCaller(), codeBlock->numParameters(), 0);
+ CallFrame* newCallFrame = m_stack.pushFrame(callFrame, codeBlock, scope, 1, 0);
+ if (UNLIKELY(!newCallFrame))
+ return checkedReturn(throwStackOverflowError(callFrame));
+
+ // Set the arguments for the callee:
newCallFrame->setThisValue(thisObj);
- TopCallFrameSetter topCallFrame(callFrame->globalData(), newCallFrame);
- if (Profiler* profiler = callFrame->globalData().enabledProfiler())
+ if (Profiler* profiler = globalData.enabledProfiler())
profiler->willExecute(callFrame, program->sourceURL(), program->lineNo());
// Execute the code:
@@ -890,213 +970,168 @@ failedJSONP:
{
SamplingTool::CallRecord callRecord(m_sampler.get());
- m_reentryDepth++;
#if ENABLE(LLINT_C_LOOP)
result = LLInt::CLoop::execute(newCallFrame, llint_program_prologue);
#elif ENABLE(JIT)
- result = program->generatedJITCode().execute(&m_stack, newCallFrame, scope->globalData());
+ result = program->generatedJITCode().execute(&m_stack, newCallFrame, &globalData);
#endif // ENABLE(JIT)
-
- m_reentryDepth--;
}
- if (Profiler* profiler = callFrame->globalData().enabledProfiler())
+ if (Profiler* profiler = globalData.enabledProfiler())
profiler->didExecute(callFrame, program->sourceURL(), program->lineNo());
- m_stack.shrink(oldEnd);
+ m_stack.popFrame(newCallFrame);
return checkedReturn(result);
}
JSValue Interpreter::executeCall(CallFrame* callFrame, JSObject* function, CallType callType, const CallData& callData, JSValue thisValue, const ArgList& args)
{
+ JSGlobalData& globalData = callFrame->globalData();
ASSERT(isValidThisObject(thisValue, callFrame));
ASSERT(!callFrame->hadException());
- ASSERT(!callFrame->globalData().isCollectorBusy());
- if (callFrame->globalData().isCollectorBusy())
+ ASSERT(!globalData.isCollectorBusy());
+ if (globalData.isCollectorBusy())
return jsNull();
StackStats::CheckPoint stackCheckPoint;
- if (m_reentryDepth >= MaxSmallThreadReentryDepth && m_reentryDepth >= callFrame->globalData().maxReentryDepth)
+ const StackBounds& nativeStack = wtfThreadData().stack();
+ StackPolicy policy(*this, nativeStack);
+ if (!nativeStack.isSafeToRecurse(policy.requiredCapacity()))
return checkedReturn(throwStackOverflowError(callFrame));
- Register* oldEnd = m_stack.end();
- ASSERT(callFrame->frameExtent() <= oldEnd || callFrame == callFrame->scope()->globalObject()->globalExec());
- int argCount = 1 + args.size(); // implicit "this" parameter
- size_t registerOffset = argCount + JSStack::CallFrameHeaderSize;
-
- CallFrame* newCallFrame = CallFrame::create(oldEnd + registerOffset);
- if (!m_stack.grow(newCallFrame->registers()))
- return checkedReturn(throwStackOverflowError(callFrame));
+ bool isJSCall = (callType == CallTypeJS);
+ JSScope* scope;
+ CodeBlock* newCodeBlock;
+ size_t argsCount = 1 + args.size(); // implicit "this" parameter
- newCallFrame->setThisValue(thisValue);
- for (size_t i = 0; i < args.size(); ++i)
- newCallFrame->setArgument(i, args.at(i));
-
- if (callType == CallTypeJS) {
- JSScope* callDataScope = callData.js.scope;
-
- DynamicGlobalObjectScope globalObjectScope(*callDataScope->globalData(), callDataScope->globalObject());
+ if (isJSCall)
+ scope = callData.js.scope;
+ else {
+ ASSERT(callType == CallTypeHost);
+ scope = callFrame->scope();
+ }
+ DynamicGlobalObjectScope globalObjectScope(globalData, scope->globalObject());
- JSObject* compileError = callData.js.functionExecutable->compileForCall(callFrame, callDataScope);
+ if (isJSCall) {
+ // Compile the callee:
+ JSObject* compileError = callData.js.functionExecutable->compileForCall(callFrame, scope);
if (UNLIKELY(!!compileError)) {
- m_stack.shrink(oldEnd);
return checkedReturn(throwError(callFrame, compileError));
}
+ newCodeBlock = &callData.js.functionExecutable->generatedBytecodeForCall();
+ ASSERT(!!newCodeBlock);
+ } else
+ newCodeBlock = 0;
- CodeBlock* newCodeBlock = &callData.js.functionExecutable->generatedBytecodeForCall();
- newCallFrame = slideRegisterWindowForCall(newCodeBlock, &m_stack, newCallFrame, 0, argCount);
- if (UNLIKELY(!newCallFrame)) {
- m_stack.shrink(oldEnd);
- return checkedReturn(throwStackOverflowError(callFrame));
- }
-
- newCallFrame->init(newCodeBlock, 0, callDataScope, callFrame->addHostCallFrameFlag(), argCount, function);
+ CallFrame* newCallFrame = m_stack.pushFrame(callFrame, newCodeBlock, scope, argsCount, function);
+ if (UNLIKELY(!newCallFrame))
+ return checkedReturn(throwStackOverflowError(callFrame));
- TopCallFrameSetter topCallFrame(callFrame->globalData(), newCallFrame);
+ // Set the arguments for the callee:
+ newCallFrame->setThisValue(thisValue);
+ for (size_t i = 0; i < args.size(); ++i)
+ newCallFrame->setArgument(i, args.at(i));
- if (Profiler* profiler = callFrame->globalData().enabledProfiler())
- profiler->willExecute(callFrame, function);
+ if (Profiler* profiler = globalData.enabledProfiler())
+ profiler->willExecute(callFrame, function);
- JSValue result;
- {
- SamplingTool::CallRecord callRecord(m_sampler.get());
+ JSValue result;
+ {
+ SamplingTool::CallRecord callRecord(m_sampler.get(), !isJSCall);
- m_reentryDepth++;
+ // Execute the code:
+ if (isJSCall) {
#if ENABLE(LLINT_C_LOOP)
result = LLInt::CLoop::execute(newCallFrame, llint_function_for_call_prologue);
#elif ENABLE(JIT)
- result = callData.js.functionExecutable->generatedJITCodeForCall().execute(&m_stack, newCallFrame, callDataScope->globalData());
+ result = callData.js.functionExecutable->generatedJITCodeForCall().execute(&m_stack, newCallFrame, &globalData);
#endif // ENABLE(JIT)
-
- m_reentryDepth--;
- }
-
- if (Profiler* profiler = callFrame->globalData().enabledProfiler())
- profiler->didExecute(callFrame, function);
-
- m_stack.shrink(oldEnd);
- return checkedReturn(result);
- }
-
- ASSERT(callType == CallTypeHost);
- JSScope* scope = callFrame->scope();
- newCallFrame->init(0, 0, scope, callFrame->addHostCallFrameFlag(), argCount, function);
-
- TopCallFrameSetter topCallFrame(callFrame->globalData(), newCallFrame);
-
- DynamicGlobalObjectScope globalObjectScope(*scope->globalData(), scope->globalObject());
-
- if (Profiler* profiler = callFrame->globalData().enabledProfiler())
- profiler->willExecute(callFrame, function);
-
- JSValue result;
- {
- SamplingTool::HostCallRecord callRecord(m_sampler.get());
- result = JSValue::decode(callData.native.function(newCallFrame));
+ } else
+ result = JSValue::decode(callData.native.function(newCallFrame));
}
- if (Profiler* profiler = callFrame->globalData().enabledProfiler())
+ if (Profiler* profiler = globalData.enabledProfiler())
profiler->didExecute(callFrame, function);
- m_stack.shrink(oldEnd);
+ m_stack.popFrame(newCallFrame);
return checkedReturn(result);
}
JSObject* Interpreter::executeConstruct(CallFrame* callFrame, JSObject* constructor, ConstructType constructType, const ConstructData& constructData, const ArgList& args)
{
+ JSGlobalData& globalData = callFrame->globalData();
ASSERT(!callFrame->hadException());
- ASSERT(!callFrame->globalData().isCollectorBusy());
+ ASSERT(!globalData.isCollectorBusy());
// We throw in this case because we have to return something "valid" but we're
// already in an invalid state.
- if (callFrame->globalData().isCollectorBusy())
+ if (globalData.isCollectorBusy())
return checkedReturn(throwStackOverflowError(callFrame));
StackStats::CheckPoint stackCheckPoint;
- if (m_reentryDepth >= MaxSmallThreadReentryDepth && m_reentryDepth >= callFrame->globalData().maxReentryDepth)
- return checkedReturn(throwStackOverflowError(callFrame));
-
- Register* oldEnd = m_stack.end();
- int argCount = 1 + args.size(); // implicit "this" parameter
- size_t registerOffset = argCount + JSStack::CallFrameHeaderSize;
-
- if (!m_stack.grow(oldEnd + registerOffset))
+ const StackBounds& nativeStack = wtfThreadData().stack();
+ StackPolicy policy(*this, nativeStack);
+ if (!nativeStack.isSafeToRecurse(policy.requiredCapacity()))
return checkedReturn(throwStackOverflowError(callFrame));
- CallFrame* newCallFrame = CallFrame::create(oldEnd + registerOffset);
- newCallFrame->setThisValue(jsUndefined());
- for (size_t i = 0; i < args.size(); ++i)
- newCallFrame->setArgument(i, args.at(i));
+ bool isJSConstruct = (constructType == ConstructTypeJS);
+ JSScope* scope;
+ CodeBlock* newCodeBlock;
+ size_t argsCount = 1 + args.size(); // implicit "this" parameter
- if (constructType == ConstructTypeJS) {
- JSScope* constructDataScope = constructData.js.scope;
+ if (isJSConstruct)
+ scope = constructData.js.scope;
+ else {
+ ASSERT(constructType == ConstructTypeHost);
+ scope = callFrame->scope();
+ }
- DynamicGlobalObjectScope globalObjectScope(*constructDataScope->globalData(), constructDataScope->globalObject());
+ DynamicGlobalObjectScope globalObjectScope(globalData, scope->globalObject());
- JSObject* compileError = constructData.js.functionExecutable->compileForConstruct(callFrame, constructDataScope);
+ if (isJSConstruct) {
+ // Compile the callee:
+ JSObject* compileError = constructData.js.functionExecutable->compileForConstruct(callFrame, scope);
if (UNLIKELY(!!compileError)) {
- m_stack.shrink(oldEnd);
return checkedReturn(throwError(callFrame, compileError));
}
+ newCodeBlock = &constructData.js.functionExecutable->generatedBytecodeForConstruct();
+ ASSERT(!!newCodeBlock);
+ } else
+ newCodeBlock = 0;
- CodeBlock* newCodeBlock = &constructData.js.functionExecutable->generatedBytecodeForConstruct();
- newCallFrame = slideRegisterWindowForCall(newCodeBlock, &m_stack, newCallFrame, 0, argCount);
- if (UNLIKELY(!newCallFrame)) {
- m_stack.shrink(oldEnd);
- return checkedReturn(throwStackOverflowError(callFrame));
- }
-
- newCallFrame->init(newCodeBlock, 0, constructDataScope, callFrame->addHostCallFrameFlag(), argCount, constructor);
+ CallFrame* newCallFrame = m_stack.pushFrame(callFrame, newCodeBlock, scope, argsCount, constructor);
+ if (UNLIKELY(!newCallFrame))
+ return checkedReturn(throwStackOverflowError(callFrame));
- TopCallFrameSetter topCallFrame(callFrame->globalData(), newCallFrame);
+ // Set the arguments for the callee:
+ newCallFrame->setThisValue(jsUndefined());
+ for (size_t i = 0; i < args.size(); ++i)
+ newCallFrame->setArgument(i, args.at(i));
- if (Profiler* profiler = callFrame->globalData().enabledProfiler())
- profiler->willExecute(callFrame, constructor);
+ if (Profiler* profiler = globalData.enabledProfiler())
+ profiler->willExecute(callFrame, constructor);
- JSValue result;
- {
- SamplingTool::CallRecord callRecord(m_sampler.get());
+ JSValue result;
+ {
+ SamplingTool::CallRecord callRecord(m_sampler.get(), !isJSConstruct);
- m_reentryDepth++;
+ // Execute the code.
+ if (isJSConstruct) {
#if ENABLE(LLINT_C_LOOP)
result = LLInt::CLoop::execute(newCallFrame, llint_function_for_construct_prologue);
#elif ENABLE(JIT)
- result = constructData.js.functionExecutable->generatedJITCodeForConstruct().execute(&m_stack, newCallFrame, constructDataScope->globalData());
+ result = constructData.js.functionExecutable->generatedJITCodeForConstruct().execute(&m_stack, newCallFrame, &globalData);
#endif // ENABLE(JIT)
- m_reentryDepth--;
+ } else {
+ result = JSValue::decode(constructData.native.function(newCallFrame));
}
-
- if (Profiler* profiler = callFrame->globalData().enabledProfiler())
- profiler->didExecute(callFrame, constructor);
-
- m_stack.shrink(oldEnd);
- if (callFrame->hadException())
- return 0;
- ASSERT(result.isObject());
- return checkedReturn(asObject(result));
- }
-
- ASSERT(constructType == ConstructTypeHost);
- JSScope* scope = callFrame->scope();
- newCallFrame->init(0, 0, scope, callFrame->addHostCallFrameFlag(), argCount, constructor);
-
- TopCallFrameSetter topCallFrame(callFrame->globalData(), newCallFrame);
-
- DynamicGlobalObjectScope globalObjectScope(*scope->globalData(), scope->globalObject());
-
- if (Profiler* profiler = callFrame->globalData().enabledProfiler())
- profiler->willExecute(callFrame, constructor);
-
- JSValue result;
- {
- SamplingTool::HostCallRecord callRecord(m_sampler.get());
- result = JSValue::decode(constructData.native.function(newCallFrame));
}
- if (Profiler* profiler = callFrame->globalData().enabledProfiler())
+ if (Profiler* profiler = globalData.enabledProfiler())
profiler->didExecute(callFrame, constructor);
- m_stack.shrink(oldEnd);
+ m_stack.popFrame(newCallFrame);
+
if (callFrame->hadException())
return 0;
ASSERT(result.isObject());
@@ -1105,101 +1140,118 @@ JSObject* Interpreter::executeConstruct(CallFrame* callFrame, JSObject* construc
CallFrameClosure Interpreter::prepareForRepeatCall(FunctionExecutable* functionExecutable, CallFrame* callFrame, JSFunction* function, int argumentCountIncludingThis, JSScope* scope)
{
- ASSERT(!scope->globalData()->exception);
+ JSGlobalData& globalData = *scope->globalData();
+ ASSERT(!globalData.exception);
- if (callFrame->globalData().isCollectorBusy())
+ if (globalData.isCollectorBusy())
return CallFrameClosure();
StackStats::CheckPoint stackCheckPoint;
- if (m_reentryDepth >= MaxSmallThreadReentryDepth && m_reentryDepth >= callFrame->globalData().maxReentryDepth) {
- throwStackOverflowError(callFrame);
- return CallFrameClosure();
- }
-
- Register* oldEnd = m_stack.end();
- size_t registerOffset = argumentCountIncludingThis + JSStack::CallFrameHeaderSize;
-
- CallFrame* newCallFrame = CallFrame::create(oldEnd + registerOffset);
- if (!m_stack.grow(newCallFrame->registers())) {
+ const StackBounds& nativeStack = wtfThreadData().stack();
+ StackPolicy policy(*this, nativeStack);
+ if (!nativeStack.isSafeToRecurse(policy.requiredCapacity())) {
throwStackOverflowError(callFrame);
return CallFrameClosure();
}
+ // Compile the callee:
JSObject* error = functionExecutable->compileForCall(callFrame, scope);
if (error) {
throwError(callFrame, error);
- m_stack.shrink(oldEnd);
return CallFrameClosure();
}
- CodeBlock* codeBlock = &functionExecutable->generatedBytecodeForCall();
+ CodeBlock* newCodeBlock = &functionExecutable->generatedBytecodeForCall();
- newCallFrame = slideRegisterWindowForCall(codeBlock, &m_stack, newCallFrame, 0, argumentCountIncludingThis);
+ size_t argsCount = argumentCountIncludingThis;
+
+ CallFrame* newCallFrame = m_stack.pushFrame(callFrame, newCodeBlock, scope, argsCount, function);
if (UNLIKELY(!newCallFrame)) {
throwStackOverflowError(callFrame);
- m_stack.shrink(oldEnd);
return CallFrameClosure();
}
- newCallFrame->init(codeBlock, 0, scope, callFrame->addHostCallFrameFlag(), argumentCountIncludingThis, function);
- scope->globalData()->topCallFrame = newCallFrame;
- CallFrameClosure result = { callFrame, newCallFrame, function, functionExecutable, scope->globalData(), oldEnd, scope, codeBlock->numParameters(), argumentCountIncludingThis };
+
+ if (UNLIKELY(!newCallFrame)) {
+ throwStackOverflowError(callFrame);
+ return CallFrameClosure();
+ }
+
+ // Return the successful closure:
+ CallFrameClosure result = { callFrame, newCallFrame, function, functionExecutable, &globalData, scope, newCodeBlock->numParameters(), argumentCountIncludingThis };
return result;
}
JSValue Interpreter::execute(CallFrameClosure& closure)
{
+ JSGlobalData& globalData = *closure.globalData;
SamplingScope samplingScope(this);
- ASSERT(!closure.oldCallFrame->globalData().isCollectorBusy());
- if (closure.oldCallFrame->globalData().isCollectorBusy())
+ ASSERT(!globalData.isCollectorBusy());
+ if (globalData.isCollectorBusy())
return jsNull();
StackStats::CheckPoint stackCheckPoint;
+ m_stack.validateFence(closure.newCallFrame, "BEFORE");
closure.resetCallFrame();
- if (Profiler* profiler = closure.oldCallFrame->globalData().enabledProfiler())
+ m_stack.validateFence(closure.newCallFrame, "STEP 1");
+
+ if (Profiler* profiler = globalData.enabledProfiler())
profiler->willExecute(closure.oldCallFrame, closure.function);
- TopCallFrameSetter topCallFrame(*closure.globalData, closure.newCallFrame);
+ // The code execution below may push more frames and point the topCallFrame
+ // to those newer frames, or it may pop to the top frame to the caller of
+ // the current repeat frame, or it may leave the top frame pointing to the
+ // current repeat frame.
+ //
+ // Hence, we need to preserve the topCallFrame here ourselves before
+ // repeating this call on a second callback function.
+ TopCallFrameSetter topCallFrame(globalData, closure.newCallFrame);
+
+ // Execute the code:
JSValue result;
{
SamplingTool::CallRecord callRecord(m_sampler.get());
- m_reentryDepth++;
#if ENABLE(LLINT_C_LOOP)
result = LLInt::CLoop::execute(closure.newCallFrame, llint_function_for_call_prologue);
#elif ENABLE(JIT)
- result = closure.functionExecutable->generatedJITCodeForCall().execute(&m_stack, closure.newCallFrame, closure.globalData);
+ result = closure.functionExecutable->generatedJITCodeForCall().execute(&m_stack, closure.newCallFrame, &globalData);
#endif // ENABLE(JIT)
- m_reentryDepth--;
}
- if (Profiler* profiler = closure.oldCallFrame->globalData().enabledProfiler())
+ if (Profiler* profiler = globalData.enabledProfiler())
profiler->didExecute(closure.oldCallFrame, closure.function);
+
+ m_stack.validateFence(closure.newCallFrame, "AFTER");
return checkedReturn(result);
}
void Interpreter::endRepeatCall(CallFrameClosure& closure)
{
- closure.globalData->topCallFrame = closure.oldCallFrame;
- m_stack.shrink(closure.oldEnd);
+ m_stack.popFrame(closure.newCallFrame);
}
-JSValue Interpreter::execute(EvalExecutable* eval, CallFrame* callFrame, JSValue thisValue, JSScope* scope, int globalRegisterOffset)
+JSValue Interpreter::execute(EvalExecutable* eval, CallFrame* callFrame, JSValue thisValue, JSScope* scope)
{
+ JSGlobalData& globalData = *scope->globalData();
SamplingScope samplingScope(this);
+ ASSERT(scope->globalData() == &callFrame->globalData());
ASSERT(isValidThisObject(thisValue, callFrame));
- ASSERT(!scope->globalData()->exception);
- ASSERT(!callFrame->globalData().isCollectorBusy());
- if (callFrame->globalData().isCollectorBusy())
+ ASSERT(!globalData.exception);
+ ASSERT(!globalData.isCollectorBusy());
+ if (globalData.isCollectorBusy())
return jsNull();
- DynamicGlobalObjectScope globalObjectScope(*scope->globalData(), scope->globalObject());
+ DynamicGlobalObjectScope globalObjectScope(globalData, scope->globalObject());
StackStats::CheckPoint stackCheckPoint;
- if (m_reentryDepth >= MaxSmallThreadReentryDepth && m_reentryDepth >= callFrame->globalData().maxReentryDepth)
+ const StackBounds& nativeStack = wtfThreadData().stack();
+ StackPolicy policy(*this, nativeStack);
+ if (!nativeStack.isSafeToRecurse(policy.requiredCapacity()))
return checkedReturn(throwStackOverflowError(callFrame));
+ // Compile the callee:
JSObject* compileError = eval->compile(callFrame, scope);
if (UNLIKELY(!!compileError))
return checkedReturn(throwError(callFrame, compileError));
@@ -1222,7 +1274,7 @@ JSValue Interpreter::execute(EvalExecutable* eval, CallFrame* callFrame, JSValue
variableObject = scope;
}
// Scope for BatchedTransitionOptimizer
- BatchedTransitionOptimizer optimizer(callFrame->globalData(), variableObject);
+ BatchedTransitionOptimizer optimizer(globalData, variableObject);
for (unsigned i = 0; i < numVariables; ++i) {
const Identifier& ident = codeBlock->variable(i);
@@ -1239,40 +1291,34 @@ JSValue Interpreter::execute(EvalExecutable* eval, CallFrame* callFrame, JSValue
}
}
- Register* oldEnd = m_stack.end();
- Register* newEnd = m_stack.begin() + globalRegisterOffset + codeBlock->m_numCalleeRegisters;
- if (!m_stack.grow(newEnd))
+ // Push the frame:
+ ASSERT(codeBlock->numParameters() == 1); // 1 parameter for 'this'.
+ CallFrame* newCallFrame = m_stack.pushFrame(callFrame, codeBlock, scope, 1, 0);
+ if (UNLIKELY(!newCallFrame))
return checkedReturn(throwStackOverflowError(callFrame));
- CallFrame* newCallFrame = CallFrame::create(m_stack.begin() + globalRegisterOffset);
-
- ASSERT(codeBlock->numParameters() == 1); // 1 parameter for 'this'.
- newCallFrame->init(codeBlock, 0, scope, callFrame->addHostCallFrameFlag(), codeBlock->numParameters(), 0);
+ // Set the arguments for the callee:
newCallFrame->setThisValue(thisValue);
- TopCallFrameSetter topCallFrame(callFrame->globalData(), newCallFrame);
-
- if (Profiler* profiler = callFrame->globalData().enabledProfiler())
+ if (Profiler* profiler = globalData.enabledProfiler())
profiler->willExecute(callFrame, eval->sourceURL(), eval->lineNo());
+ // Execute the code:
JSValue result;
{
SamplingTool::CallRecord callRecord(m_sampler.get());
-
- m_reentryDepth++;
#if ENABLE(LLINT_C_LOOP)
result = LLInt::CLoop::execute(newCallFrame, llint_eval_prologue);
#elif ENABLE(JIT)
- result = eval->generatedJITCode().execute(&m_stack, newCallFrame, scope->globalData());
+ result = eval->generatedJITCode().execute(&m_stack, newCallFrame, &globalData);
#endif // ENABLE(JIT)
- m_reentryDepth--;
}
- if (Profiler* profiler = callFrame->globalData().enabledProfiler())
+ if (Profiler* profiler = globalData.enabledProfiler())
profiler->didExecute(callFrame, eval->sourceURL(), eval->lineNo());
- m_stack.shrink(oldEnd);
+ m_stack.popFrame(newCallFrame);
return checkedReturn(result);
}
diff --git a/Source/JavaScriptCore/interpreter/Interpreter.h b/Source/JavaScriptCore/interpreter/Interpreter.h
index 11c6f078a..c3bca1ad7 100644
--- a/Source/JavaScriptCore/interpreter/Interpreter.h
+++ b/Source/JavaScriptCore/interpreter/Interpreter.h
@@ -48,6 +48,7 @@ namespace JSC {
class EvalExecutable;
class ExecutableBase;
class FunctionExecutable;
+ class JSGlobalData;
class JSGlobalObject;
class LLIntOffsetsExtractor;
class ProgramExecutable;
@@ -170,21 +171,22 @@ namespace JSC {
}
};
- // We use a smaller reentrancy limit on iPhone because of the high amount of
- // stack space required on the web thread.
-#if PLATFORM(IOS)
- enum { MaxLargeThreadReentryDepth = 64, MaxSmallThreadReentryDepth = 16 };
-#else
- enum { MaxLargeThreadReentryDepth = 256, MaxSmallThreadReentryDepth = 16 };
-#endif // PLATFORM(IOS)
-
class Interpreter {
WTF_MAKE_FAST_ALLOCATED;
friend class CachedCall;
friend class LLIntOffsetsExtractor;
friend class JIT;
+
public:
- Interpreter();
+ class ErrorHandlingMode {
+ public:
+ JS_EXPORT_PRIVATE ErrorHandlingMode(ExecState*);
+ JS_EXPORT_PRIVATE ~ErrorHandlingMode();
+ private:
+ Interpreter& m_interpreter;
+ };
+
+ Interpreter(JSGlobalData &);
~Interpreter();
void initialize(bool canUseJIT);
@@ -218,7 +220,6 @@ namespace JSC {
JSValue executeCall(CallFrame*, JSObject* function, CallType, const CallData&, JSValue thisValue, const ArgList&);
JSObject* executeConstruct(CallFrame*, JSObject* function, ConstructType, const ConstructData&, const ArgList&);
JSValue execute(EvalExecutable*, CallFrame*, JSValue thisValue, JSScope*);
- JSValue execute(EvalExecutable*, CallFrame*, JSValue thisValue, JSScope*, int globalRegisterOffset);
JSValue retrieveArgumentsFromVMCode(CallFrame*, JSFunction*) const;
JSValue retrieveCallerFromVMCode(CallFrame*, JSFunction*) const;
@@ -241,6 +242,16 @@ namespace JSC {
JS_EXPORT_PRIVATE void dumpCallFrame(CallFrame*);
private:
+ class StackPolicy {
+ public:
+ StackPolicy(Interpreter&, const StackBounds&);
+ inline size_t requiredCapacity() { return m_requiredCapacity; }
+
+ private:
+ Interpreter& m_interpreter;
+ size_t m_requiredCapacity;
+ };
+
enum ExecutionFlag { Normal, InitializeAndReturn };
CallFrameClosure prepareForRepeatCall(FunctionExecutable*, CallFrame*, JSFunction*, int argumentCountIncludingThis, JSScope*);
@@ -249,8 +260,6 @@ namespace JSC {
NEVER_INLINE bool unwindCallFrame(CallFrame*&, JSValue, unsigned& bytecodeOffset, CodeBlock*&);
- static ALWAYS_INLINE CallFrame* slideRegisterWindowForCall(CodeBlock*, JSStack*, CallFrame*, size_t registerOffset, int argc);
-
static CallFrame* findFunctionCallFrameFromVMCode(CallFrame*, JSFunction*);
void dumpRegisters(CallFrame*);
@@ -261,9 +270,8 @@ namespace JSC {
int m_sampleEntryDepth;
OwnPtr<SamplingTool> m_sampler;
- int m_reentryDepth;
-
JSStack m_stack;
+ int m_errorHandlingModeReentry;
#if ENABLE(COMPUTED_GOTO_OPCODES) && ENABLE(LLINT)
Opcode* m_opcodeTable; // Maps OpcodeID => Opcode for compiling
@@ -281,11 +289,6 @@ namespace JSC {
return !thisValue.isObject() || thisValue.toThisObject(exec) == thisValue;
}
- inline JSValue Interpreter::execute(EvalExecutable* eval, CallFrame* callFrame, JSValue thisValue, JSScope* scope)
- {
- return execute(eval, callFrame, thisValue, scope, m_stack.size() + 1 + JSStack::CallFrameHeaderSize);
- }
-
JSValue eval(CallFrame*);
CallFrame* loadVarargs(CallFrame*, JSStack*, JSValue thisValue, JSValue arguments, int firstFreeRegister);
diff --git a/Source/JavaScriptCore/interpreter/JSStack.cpp b/Source/JavaScriptCore/interpreter/JSStack.cpp
index 5dd708a48..f5f9e3763 100644
--- a/Source/JavaScriptCore/interpreter/JSStack.cpp
+++ b/Source/JavaScriptCore/interpreter/JSStack.cpp
@@ -28,6 +28,7 @@
#include "config.h"
#include "JSStack.h"
+#include "JSStackInlines.h"
#include "ConservativeRoots.h"
#include "Interpreter.h"
@@ -41,7 +42,22 @@ static Mutex& stackStatisticsMutex()
DEFINE_STATIC_LOCAL(Mutex, staticMutex, ());
return staticMutex;
}
-
+
+JSStack::JSStack(JSGlobalData& globalData, size_t capacity)
+ : m_end(0)
+ , m_topCallFrame(globalData.topCallFrame)
+{
+ ASSERT(capacity && isPageAligned(capacity));
+
+ m_reservation = PageReservation::reserve(roundUpAllocationSize(capacity * sizeof(Register), commitSize), OSAllocator::JSVMStackPages);
+ m_end = static_cast<Register*>(m_reservation.base());
+ m_commitEnd = static_cast<Register*>(m_reservation.base());
+
+ disableErrorStackReserve();
+
+ m_topCallFrame = 0;
+}
+
JSStack::~JSStack()
{
void* base = m_reservation.base();
@@ -52,15 +68,22 @@ JSStack::~JSStack()
bool JSStack::growSlowCase(Register* newEnd)
{
+ // If we have already committed enough memory to satisfy this request,
+ // just update the end pointer and return.
if (newEnd <= m_commitEnd) {
m_end = newEnd;
return true;
}
+ // Compute the chunk size of additional memory to commit, and see if we
+ // have it is still within our budget. If not, we'll fail to grow and
+ // return false.
long delta = roundUpAllocationSize(reinterpret_cast<char*>(newEnd) - reinterpret_cast<char*>(m_commitEnd), commitSize);
- if (reinterpret_cast<char*>(m_commitEnd) + delta > static_cast<char*>(m_reservation.base()) + m_reservation.size())
+ if (reinterpret_cast<char*>(m_commitEnd) + delta > reinterpret_cast<char*>(m_useableEnd))
return false;
+ // Otherwise, the growth is still within our budget. Go ahead and commit
+ // it and return true.
m_reservation.commit(m_commitEnd, delta);
addToCommittedByteCount(delta);
m_commitEnd = reinterpret_cast_ptr<Register*>(reinterpret_cast<char*>(m_commitEnd) + delta);
@@ -70,12 +93,12 @@ bool JSStack::growSlowCase(Register* newEnd)
void JSStack::gatherConservativeRoots(ConservativeRoots& conservativeRoots)
{
- conservativeRoots.add(begin(), end());
+ conservativeRoots.add(begin(), getTopOfStack());
}
void JSStack::gatherConservativeRoots(ConservativeRoots& conservativeRoots, JITStubRoutineSet& jitStubRoutines, DFGCodeBlocks& dfgCodeBlocks)
{
- conservativeRoots.add(begin(), end(), jitStubRoutines, dfgCodeBlocks);
+ conservativeRoots.add(begin(), getTopOfStack(), jitStubRoutines, dfgCodeBlocks);
}
void JSStack::releaseExcessCapacity()
@@ -104,4 +127,25 @@ void JSStack::addToCommittedByteCount(long byteCount)
committedBytesCount += byteCount;
}
+void JSStack::enableErrorStackReserve()
+{
+ m_useableEnd = reservationEnd();
+}
+
+void JSStack::disableErrorStackReserve()
+{
+ char* useableEnd = reinterpret_cast<char*>(reservationEnd()) - commitSize;
+ m_useableEnd = reinterpret_cast<Register*>(useableEnd);
+
+ // By the time we get here, we are guaranteed to be destructing the last
+ // Interpreter::ErrorHandlingMode that enabled this reserve in the first
+ // place. That means the stack space beyond m_useableEnd before we
+ // enabled the reserve was not previously in use. Hence, it is safe to
+ // shrink back to that m_useableEnd.
+ if (m_end > m_useableEnd) {
+ ASSERT(m_topCallFrame->frameExtent() <= m_useableEnd);
+ shrink(m_useableEnd);
+ }
+}
+
} // namespace JSC
diff --git a/Source/JavaScriptCore/interpreter/JSStack.h b/Source/JavaScriptCore/interpreter/JSStack.h
index 86fa40be7..3beb59ebf 100644
--- a/Source/JavaScriptCore/interpreter/JSStack.h
+++ b/Source/JavaScriptCore/interpreter/JSStack.h
@@ -35,11 +35,17 @@
#include <wtf/PageReservation.h>
#include <wtf/VMTags.h>
+#if !defined(NDEBUG) && !defined(ENABLE_DEBUG_JSSTACK)
+#define ENABLE_DEBUG_JSSTACK 1
+#endif
+
namespace JSC {
class ConservativeRoots;
class DFGCodeBlocks;
+ class ExecState;
class JITStubRoutineSet;
+ class JSGlobalData;
class LLIntOffsetsExtractor;
class JSStack {
@@ -61,7 +67,7 @@ namespace JSC {
// Allow 8k of excess registers before we start trying to reap the stack
static const ptrdiff_t maxExcessCapacity = 8 * 1024;
- JSStack(size_t capacity = defaultCapacity);
+ JSStack(JSGlobalData&, size_t capacity = defaultCapacity);
~JSStack();
void gatherConservativeRoots(ConservativeRoots&);
@@ -72,7 +78,6 @@ namespace JSC {
size_t size() const { return end() - begin(); }
bool grow(Register*);
- void shrink(Register*);
static size_t committedByteCount();
static void initializeThreading();
@@ -82,26 +87,55 @@ namespace JSC {
return &m_end;
}
+ Register* getTopOfFrame(CallFrame*);
+ Register* getStartOfFrame(CallFrame*);
+ Register* getTopOfStack();
+
+ CallFrame* pushFrame(CallFrame* callerFrame, class CodeBlock*,
+ JSScope*, int argsCount, JSObject* callee);
+
+ void popFrame(CallFrame*);
+
+ void enableErrorStackReserve();
+ void disableErrorStackReserve();
+
+#if ENABLE(DEBUG_JSSTACK)
+ void installFence(CallFrame*, const char *function = "", int lineNo = 0);
+ void validateFence(CallFrame*, const char *function = "", int lineNo = 0);
+ static const int FenceSize = 4;
+#else // !ENABLE(DEBUG_JSSTACK)
+ void installFence(CallFrame*, const char* = "", int = 0) { }
+ void validateFence(CallFrame*, const char* = "", int = 0) { }
+#endif // !ENABLE(DEBUG_JSSTACK)
+
private:
- friend class LLIntOffsetsExtractor;
+ Register* reservationEnd() const
+ {
+ char* base = static_cast<char*>(m_reservation.base());
+ char* reservationEnd = base + m_reservation.size();
+ return reinterpret_cast<Register*>(reservationEnd);
+ }
+
+#if ENABLE(DEBUG_JSSTACK)
+ static JSValue generateFenceValue(size_t argIndex);
+ void installTrapsAfterFrame(CallFrame*);
+#else
+ void installTrapsAfterFrame(CallFrame*) { }
+#endif
bool growSlowCase(Register*);
+ void shrink(Register*);
void releaseExcessCapacity();
void addToCommittedByteCount(long);
+
Register* m_end;
Register* m_commitEnd;
+ Register* m_useableEnd;
PageReservation m_reservation;
- };
+ CallFrame*& m_topCallFrame;
- inline JSStack::JSStack(size_t capacity)
- : m_end(0)
- {
- ASSERT(capacity && isPageAligned(capacity));
-
- m_reservation = PageReservation::reserve(roundUpAllocationSize(capacity * sizeof(Register), commitSize), OSAllocator::JSVMStackPages);
- m_end = static_cast<Register*>(m_reservation.base());
- m_commitEnd = static_cast<Register*>(m_reservation.base());
- }
+ friend class LLIntOffsetsExtractor;
+ };
inline void JSStack::shrink(Register* newEnd)
{
diff --git a/Source/JavaScriptCore/interpreter/JSStackInlines.h b/Source/JavaScriptCore/interpreter/JSStackInlines.h
new file mode 100644
index 000000000..25b7dcf5a
--- /dev/null
+++ b/Source/JavaScriptCore/interpreter/JSStackInlines.h
@@ -0,0 +1,232 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef JSStackInlines_h
+#define JSStackInlines_h
+
+#include "CallFrame.h"
+#include "CodeBlock.h"
+#include "JSStack.h"
+#include <wtf/UnusedParam.h>
+
+namespace JSC {
+
+inline Register* JSStack::getTopOfFrame(CallFrame* frame)
+{
+ if (UNLIKELY(!frame))
+ return begin();
+ return frame->frameExtent();
+}
+
+inline Register* JSStack::getTopOfStack()
+{
+ return getTopOfFrame(m_topCallFrame);
+}
+
+inline Register* JSStack::getStartOfFrame(CallFrame* frame)
+{
+ CallFrame* callerFrame = frame->callerFrameNoFlags();
+ return getTopOfFrame(callerFrame);
+}
+
+inline CallFrame* JSStack::pushFrame(CallFrame* callerFrame,
+ class CodeBlock* codeBlock, JSScope* scope, int argsCount, JSObject* callee)
+{
+ ASSERT(!!scope);
+ Register* oldEnd = getTopOfStack();
+
+ // Ensure that we have enough space for the parameters:
+ size_t paddedArgsCount = argsCount;
+ if (codeBlock) {
+ size_t numParameters = codeBlock->numParameters();
+ if (paddedArgsCount < numParameters)
+ paddedArgsCount = numParameters;
+ }
+
+ Register* newCallFrameSlot = oldEnd + paddedArgsCount + JSStack::CallFrameHeaderSize;
+#if ENABLE(DEBUG_JSSTACK)
+ newCallFrameSlot += JSStack::FenceSize;
+#endif
+ Register* newEnd = newCallFrameSlot;
+ if (!!codeBlock)
+ newEnd += codeBlock->m_numCalleeRegisters;
+
+ // Ensure that we have the needed stack capacity to push the new frame:
+ if (!grow(newEnd))
+ return 0;
+
+ // Compute the address of the new frame for this invocation:
+ CallFrame* newCallFrame = CallFrame::create(newCallFrameSlot);
+ ASSERT(!!newCallFrame);
+
+ // The caller frame should always be the real previous frame on the stack,
+ // and not a potential GlobalExec that was passed in. Point callerFrame to
+ // the top frame on the stack.
+ callerFrame = m_topCallFrame;
+
+ // Initialize the frame header:
+ newCallFrame->init(codeBlock, 0, scope,
+ callerFrame->addHostCallFrameFlag(), argsCount, callee);
+
+ ASSERT(!!newCallFrame->scope());
+
+ // Pad additional args if needed:
+ // Note: we need to subtract 1 from argsCount and paddedArgsCount to
+ // exclude the this pointer.
+ for (size_t i = argsCount-1; i < paddedArgsCount-1; ++i)
+ newCallFrame->setArgument(i, jsUndefined());
+
+ installFence(newCallFrame, __FUNCTION__, __LINE__);
+ validateFence(newCallFrame, __FUNCTION__, __LINE__);
+ installTrapsAfterFrame(newCallFrame);
+
+ // Push the new frame:
+ m_topCallFrame = newCallFrame;
+
+ return newCallFrame;
+}
+
+inline void JSStack::popFrame(CallFrame* frame)
+{
+ validateFence(frame, __FUNCTION__, __LINE__);
+ CallFrame* callerFrame = frame->callerFrameNoFlags();
+
+ // Pop to the caller:
+ m_topCallFrame = callerFrame;
+
+ // If we are popping the very first frame from the stack i.e. no more
+ // frames before this, then we can now safely shrink the stack. In
+ // this case, we're shrinking all the way to the beginning since there
+ // are no more frames on the stack.
+ if (!callerFrame)
+ shrink(begin());
+
+ installTrapsAfterFrame(callerFrame);
+}
+
+
+#if ENABLE(DEBUG_JSSTACK)
+inline JSValue JSStack::generateFenceValue(size_t argIndex)
+{
+ unsigned fenceBits = 0xfacebad0 | ((argIndex+1) & 0xf);
+ JSValue fenceValue = JSValue(fenceBits);
+ return fenceValue;
+}
+
+// The JSStack fences mechanism works as follows:
+// 1. A fence is a number (JSStack::FenceSize) of JSValues that are initialized
+// with values generated by JSStack::generateFenceValue().
+// 2. When pushFrame() is called, the fence is installed after the max extent
+// of the previous topCallFrame and the last arg of the new frame:
+//
+// | ... |
+// |--------------------------------------|
+// | Frame Header of previous frame |
+// |--------------------------------------|
+// topCallFrame --> | |
+// | Locals of previous frame |
+// |--------------------------------------|
+// | *** the Fence *** |
+// |--------------------------------------|
+// | Args of new frame |
+// |--------------------------------------|
+// | Frame Header of new frame |
+// |--------------------------------------|
+// frame --> | Locals of new frame |
+// | |
+//
+// 3. In popFrame() and elsewhere, we can call JSStack::validateFence() to
+// assert that the fence contains the values we expect.
+
+inline void JSStack::installFence(CallFrame* frame, const char *function, int lineNo)
+{
+ UNUSED_PARAM(function);
+ UNUSED_PARAM(lineNo);
+ Register* startOfFrame = getStartOfFrame(frame);
+
+ // The last argIndex is at:
+ size_t maxIndex = frame->argIndexForRegister(startOfFrame) + 1;
+ size_t startIndex = maxIndex - FenceSize;
+ for (size_t i = startIndex; i < maxIndex; ++i) {
+ JSValue fenceValue = generateFenceValue(i);
+ frame->setArgument(i, fenceValue);
+ }
+}
+
+inline void JSStack::validateFence(CallFrame* frame, const char *function, int lineNo)
+{
+ UNUSED_PARAM(function);
+ UNUSED_PARAM(lineNo);
+ ASSERT(!!frame->scope());
+ Register* startOfFrame = getStartOfFrame(frame);
+ size_t maxIndex = frame->argIndexForRegister(startOfFrame) + 1;
+ size_t startIndex = maxIndex - FenceSize;
+ for (size_t i = startIndex; i < maxIndex; ++i) {
+ JSValue fenceValue = generateFenceValue(i);
+ JSValue actualValue = frame->getArgumentUnsafe(i);
+ ASSERT(fenceValue == actualValue);
+ }
+}
+
+// When debugging the JSStack, we install bad values after the extent of the
+// topCallFrame at the end of pushFrame() and popFrame(). The intention is
+// to trigger crashes in the event that memory in this supposedly unused
+// region is read and consumed without proper initialization. After the trap
+// words are installed, the stack looks like this:
+//
+// | ... |
+// |-----------------------------|
+// | Frame Header of frame |
+// |-----------------------------|
+// topCallFrame --> | |
+// | Locals of frame |
+// |-----------------------------|
+// | *** Trap words *** |
+// |-----------------------------|
+// | Unused space ... |
+// | ... |
+
+inline void JSStack::installTrapsAfterFrame(CallFrame* frame)
+{
+ Register* topOfFrame = getTopOfFrame(frame);
+ const int sizeOfTrap = 64;
+ int32_t* startOfTrap = reinterpret_cast<int32_t*>(topOfFrame);
+ int32_t* endOfTrap = startOfTrap + sizeOfTrap;
+ int32_t* endOfCommitedMemory = reinterpret_cast<int32_t*>(m_commitEnd);
+
+ // Make sure we're not exceeding the amount of available memory to write to:
+ if (endOfTrap > endOfCommitedMemory)
+ endOfTrap = endOfCommitedMemory;
+
+ // Lay the traps:
+ int32_t* p = startOfTrap;
+ while (p < endOfTrap)
+ *p++ = 0xabadcafe; // A bad word to trigger a crash if deref'ed.
+}
+#endif // ENABLE(DEBUG_JSSTACK)
+
+} // namespace JSC
+
+#endif // JSStackInlines_h