summaryrefslogtreecommitdiff
path: root/Source/JavaScriptCore/runtime/VM.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'Source/JavaScriptCore/runtime/VM.cpp')
-rw-r--r--Source/JavaScriptCore/runtime/VM.cpp703
1 files changed, 509 insertions, 194 deletions
diff --git a/Source/JavaScriptCore/runtime/VM.cpp b/Source/JavaScriptCore/runtime/VM.cpp
index 3ca134ba2..de96ca07f 100644
--- a/Source/JavaScriptCore/runtime/VM.cpp
+++ b/Source/JavaScriptCore/runtime/VM.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2008, 2011, 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2008, 2011, 2013-2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -10,7 +10,7 @@
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
- * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
+ * 3. Neither the name of Apple Inc. ("Apple") nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
@@ -30,41 +30,79 @@
#include "VM.h"
#include "ArgList.h"
+#include "ArrayBufferNeuteringWatchpoint.h"
+#include "BuiltinExecutables.h"
+#include "BytecodeIntrinsicRegistry.h"
+#include "CodeBlock.h"
#include "CodeCache.h"
#include "CommonIdentifiers.h"
+#include "CommonSlowPaths.h"
+#include "CustomGetterSetter.h"
#include "DFGLongLivedState.h"
-#include "DebuggerActivation.h"
+#include "DFGWorklist.h"
+#include "Disassembler.h"
+#include "ErrorInstance.h"
+#include "Exception.h"
+#include "FTLThunks.h"
#include "FunctionConstructor.h"
#include "GCActivityCallback.h"
+#include "GeneratorFrame.h"
#include "GetterSetter.h"
#include "Heap.h"
+#include "HeapIterationScope.h"
#include "HostCallReturnValue.h"
+#include "Identifier.h"
#include "IncrementalSweeper.h"
+#include "InferredTypeTable.h"
#include "Interpreter.h"
-#include "JSActivation.h"
+#include "JITCode.h"
#include "JSAPIValueWrapper.h"
#include "JSArray.h"
+#include "JSCInlines.h"
#include "JSFunction.h"
+#include "JSGlobalObjectFunctions.h"
+#include "JSInternalPromiseDeferred.h"
+#include "JSLexicalEnvironment.h"
#include "JSLock.h"
-#include "JSNameScope.h"
#include "JSNotAnObject.h"
-#include "JSPropertyNameIterator.h"
+#include "JSPromiseDeferred.h"
+#include "JSPropertyNameEnumerator.h"
+#include "JSTemplateRegistryKey.h"
#include "JSWithScope.h"
#include "Lexer.h"
#include "Lookup.h"
+#include "MapData.h"
+#include "NativeStdFunctionCell.h"
#include "Nodes.h"
-#include "ParserArena.h"
+#include "Parser.h"
+#include "ProfilerDatabase.h"
+#include "PropertyMapHashTable.h"
#include "RegExpCache.h"
#include "RegExpObject.h"
+#include "RegisterAtOffsetList.h"
+#include "RuntimeType.h"
+#include "SamplingProfiler.h"
+#include "SimpleTypedArrayController.h"
#include "SourceProviderCache.h"
+#include "StackVisitor.h"
#include "StrictEvalActivation.h"
#include "StrongInlines.h"
+#include "StructureInlines.h"
+#include "TypeProfiler.h"
+#include "TypeProfilerLog.h"
#include "UnlinkedCodeBlock.h"
+#include "VMEntryScope.h"
+#include "Watchdog.h"
+#include "WeakGCMapInlines.h"
+#include "WeakMapData.h"
+#include <wtf/CurrentTime.h>
#include <wtf/ProcessID.h>
#include <wtf/RetainPtr.h>
#include <wtf/StringPrintStream.h>
#include <wtf/Threading.h>
#include <wtf/WTFThreadData.h>
+#include <wtf/text/AtomicStringTable.h>
+#include <wtf/text/SymbolRegistry.h>
#if ENABLE(DFG_JIT)
#include "ConservativeRoots.h"
@@ -82,24 +120,6 @@ using namespace WTF;
namespace JSC {
-extern const HashTable arrayConstructorTable;
-extern const HashTable arrayPrototypeTable;
-extern const HashTable booleanPrototypeTable;
-extern const HashTable jsonTable;
-extern const HashTable dateTable;
-extern const HashTable dateConstructorTable;
-extern const HashTable errorPrototypeTable;
-extern const HashTable globalObjectTable;
-extern const HashTable mathTable;
-extern const HashTable numberConstructorTable;
-extern const HashTable numberPrototypeTable;
-JS_EXPORTDATA extern const HashTable objectConstructorTable;
-extern const HashTable privateNamePrototypeTable;
-extern const HashTable regExpTable;
-extern const HashTable regExpConstructorTable;
-extern const HashTable regExpPrototypeTable;
-extern const HashTable stringConstructorTable;
-
// Note: Platform.h will enforce that ENABLE(ASSEMBLER) is true if either
// ENABLE(JIT) or ENABLE(YARR_JIT) or both are enabled. The code below
// just checks for ENABLE(JIT) or ENABLE(YARR_JIT) with this premise in mind.
@@ -107,22 +127,14 @@ extern const HashTable stringConstructorTable;
#if ENABLE(ASSEMBLER)
static bool enableAssembler(ExecutableAllocator& executableAllocator)
{
- if (!executableAllocator.isValid() || (!Options::useJIT() && !Options::useRegExpJIT()))
+ if (!Options::useJIT() && !Options::useRegExpJIT())
return false;
-#if USE(CF)
-#if COMPILER(GCC) && !COMPILER(CLANG)
- // FIXME: remove this once the EWS have been upgraded to LLVM.
- // Work around a bug of GCC with strict-aliasing.
- RetainPtr<CFStringRef> canUseJITKeyRetain = adoptCF(CFStringCreateWithCString(0 , "JavaScriptCoreUseJIT", kCFStringEncodingMacRoman));
- CFStringRef canUseJITKey = canUseJITKeyRetain.get();
-#else
- CFStringRef canUseJITKey = CFSTR("JavaScriptCoreUseJIT");
-#endif // COMPILER(GCC) && !COMPILER(CLANG)
- RetainPtr<CFTypeRef> canUseJIT = adoptCF(CFPreferencesCopyAppValue(canUseJITKey, kCFPreferencesCurrentApplication));
- if (canUseJIT)
- return kCFBooleanTrue == canUseJIT.get();
-#endif
+ if (!executableAllocator.isValid()) {
+ if (Options::crashIfCantAllocateJITMemory())
+ CRASH();
+ return false;
+ }
#if USE(CF) || OS(UNIX)
char* canUseJITString = getenv("JavaScriptCoreUseJIT");
@@ -141,45 +153,23 @@ VM::VM(VMType vmType, HeapType heapType)
, heap(this, heapType)
, vmType(vmType)
, clientData(0)
+ , topVMEntryFrame(nullptr)
, topCallFrame(CallFrame::noCaller())
- , arrayConstructorTable(fastNew<HashTable>(JSC::arrayConstructorTable))
- , arrayPrototypeTable(fastNew<HashTable>(JSC::arrayPrototypeTable))
- , booleanPrototypeTable(fastNew<HashTable>(JSC::booleanPrototypeTable))
- , dateTable(fastNew<HashTable>(JSC::dateTable))
- , dateConstructorTable(fastNew<HashTable>(JSC::dateConstructorTable))
- , errorPrototypeTable(fastNew<HashTable>(JSC::errorPrototypeTable))
- , globalObjectTable(fastNew<HashTable>(JSC::globalObjectTable))
- , jsonTable(fastNew<HashTable>(JSC::jsonTable))
- , mathTable(fastNew<HashTable>(JSC::mathTable))
- , numberConstructorTable(fastNew<HashTable>(JSC::numberConstructorTable))
- , numberPrototypeTable(fastNew<HashTable>(JSC::numberPrototypeTable))
- , objectConstructorTable(fastNew<HashTable>(JSC::objectConstructorTable))
- , privateNamePrototypeTable(fastNew<HashTable>(JSC::privateNamePrototypeTable))
- , regExpTable(fastNew<HashTable>(JSC::regExpTable))
- , regExpConstructorTable(fastNew<HashTable>(JSC::regExpConstructorTable))
- , regExpPrototypeTable(fastNew<HashTable>(JSC::regExpPrototypeTable))
- , stringConstructorTable(fastNew<HashTable>(JSC::stringConstructorTable))
- , identifierTable(vmType == Default ? wtfThreadData().currentIdentifierTable() : createIdentifierTable())
- , propertyNames(new CommonIdentifiers(this))
+ , m_atomicStringTable(vmType == Default ? wtfThreadData().atomicStringTable() : new AtomicStringTable)
+ , propertyNames(nullptr)
, emptyList(new MarkedArgumentBuffer)
- , parserArena(adoptPtr(new ParserArena))
- , keywords(adoptPtr(new Keywords(this)))
+ , customGetterSetterFunctionMap(*this)
+ , stringCache(*this)
+ , prototypeMap(*this)
, interpreter(0)
- , jsArrayClassInfo(&JSArray::s_info)
- , jsFinalObjectClassInfo(&JSFinalObject::s_info)
-#if ENABLE(DFG_JIT)
+ , jsArrayClassInfo(JSArray::info())
+ , jsFinalObjectClassInfo(JSFinalObject::info())
, sizeOfLastScratchBuffer(0)
-#endif
- , dynamicGlobalObject(0)
- , m_enabledProfiler(0)
+ , entryScope(0)
, m_regExpCache(new RegExpCache(this))
#if ENABLE(REGEXP_TRACING)
, m_rtTraceList(new RTTraceList())
#endif
-#ifndef NDEBUG
- , exclusiveThread(0)
-#endif
- , m_newStringsSinceLastHashCons(0)
#if ENABLE(ASSEMBLER)
, m_canUseAssembler(enableAssembler(executableAllocator))
#endif
@@ -192,22 +182,43 @@ VM::VM(VMType vmType, HeapType heapType)
#if ENABLE(GC_VALIDATION)
, m_initializingObjectClass(0)
#endif
+ , m_stackPointerAtVMEntry(0)
+ , m_stackLimit(0)
+#if !ENABLE(JIT)
+ , m_jsStackLimit(0)
+#endif
+#if ENABLE(FTL_JIT)
+ , m_ftlStackLimit(0)
+ , m_largestFTLStackSize(0)
+#endif
, m_inDefineOwnProperty(false)
- , m_codeCache(CodeCache::create(CodeCache::GlobalCodeCache))
+ , m_codeCache(std::make_unique<CodeCache>())
+ , m_enabledProfiler(nullptr)
+ , m_builtinExecutables(std::make_unique<BuiltinExecutables>(*this))
+ , m_typeProfilerEnabledCount(0)
+ , m_controlFlowProfilerEnabledCount(0)
{
interpreter = new Interpreter(*this);
+ StackBounds stack = wtfThreadData().stack();
+ updateReservedZoneSize(Options::reservedZoneSize());
+#if !ENABLE(JIT)
+ interpreter->stack().setReservedZoneSize(Options::reservedZoneSize());
+#endif
+ setLastStackTop(stack.origin());
// Need to be careful to keep everything consistent here
JSLockHolder lock(this);
- IdentifierTable* existingEntryIdentifierTable = wtfThreadData().setCurrentIdentifierTable(identifierTable);
+ AtomicStringTable* existingEntryAtomicStringTable = wtfThreadData().setCurrentAtomicStringTable(m_atomicStringTable);
+ propertyNames = new CommonIdentifiers(this);
structureStructure.set(*this, Structure::createStructure(*this));
structureRareDataStructure.set(*this, StructureRareData::createStructure(*this, 0, jsNull()));
- debuggerActivationStructure.set(*this, DebuggerActivation::createStructure(*this, 0, jsNull()));
terminatedExecutionErrorStructure.set(*this, TerminatedExecutionError::createStructure(*this, 0, jsNull()));
stringStructure.set(*this, JSString::createStructure(*this, 0, jsNull()));
notAnObjectStructure.set(*this, JSNotAnObject::createStructure(*this, 0, jsNull()));
- propertyNameIteratorStructure.set(*this, JSPropertyNameIterator::createStructure(*this, 0, jsNull()));
+ propertyNameEnumeratorStructure.set(*this, JSPropertyNameEnumerator::createStructure(*this, 0, jsNull()));
getterSetterStructure.set(*this, GetterSetter::createStructure(*this, 0, jsNull()));
+ customGetterSetterStructure.set(*this, CustomGetterSetter::createStructure(*this, 0, jsNull()));
+ scopedArgumentsTableStructure.set(*this, ScopedArgumentsTable::createStructure(*this, 0, jsNull()));
apiWrapperStructure.set(*this, JSAPIValueWrapper::createStructure(*this, 0, jsNull()));
JSScopeStructure.set(*this, JSScope::createStructure(*this, 0, jsNull()));
executableStructure.set(*this, ExecutableBase::createStructure(*this, 0, jsNull()));
@@ -215,59 +226,138 @@ VM::VM(VMType vmType, HeapType heapType)
evalExecutableStructure.set(*this, EvalExecutable::createStructure(*this, 0, jsNull()));
programExecutableStructure.set(*this, ProgramExecutable::createStructure(*this, 0, jsNull()));
functionExecutableStructure.set(*this, FunctionExecutable::createStructure(*this, 0, jsNull()));
+#if ENABLE(WEBASSEMBLY)
+ webAssemblyExecutableStructure.set(*this, WebAssemblyExecutable::createStructure(*this, 0, jsNull()));
+#endif
+ moduleProgramExecutableStructure.set(*this, ModuleProgramExecutable::createStructure(*this, 0, jsNull()));
regExpStructure.set(*this, RegExp::createStructure(*this, 0, jsNull()));
- sharedSymbolTableStructure.set(*this, SharedSymbolTable::createStructure(*this, 0, jsNull()));
+ symbolStructure.set(*this, Symbol::createStructure(*this, 0, jsNull()));
+ symbolTableStructure.set(*this, SymbolTable::createStructure(*this, 0, jsNull()));
structureChainStructure.set(*this, StructureChain::createStructure(*this, 0, jsNull()));
sparseArrayValueMapStructure.set(*this, SparseArrayValueMap::createStructure(*this, 0, jsNull()));
- withScopeStructure.set(*this, JSWithScope::createStructure(*this, 0, jsNull()));
+ templateRegistryKeyStructure.set(*this, JSTemplateRegistryKey::createStructure(*this, 0, jsNull()));
+ arrayBufferNeuteringWatchpointStructure.set(*this, ArrayBufferNeuteringWatchpoint::createStructure(*this));
unlinkedFunctionExecutableStructure.set(*this, UnlinkedFunctionExecutable::createStructure(*this, 0, jsNull()));
unlinkedProgramCodeBlockStructure.set(*this, UnlinkedProgramCodeBlock::createStructure(*this, 0, jsNull()));
unlinkedEvalCodeBlockStructure.set(*this, UnlinkedEvalCodeBlock::createStructure(*this, 0, jsNull()));
unlinkedFunctionCodeBlockStructure.set(*this, UnlinkedFunctionCodeBlock::createStructure(*this, 0, jsNull()));
+ unlinkedModuleProgramCodeBlockStructure.set(*this, UnlinkedModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
propertyTableStructure.set(*this, PropertyTable::createStructure(*this, 0, jsNull()));
+ weakMapDataStructure.set(*this, WeakMapData::createStructure(*this, 0, jsNull()));
+ inferredValueStructure.set(*this, InferredValue::createStructure(*this, 0, jsNull()));
+ inferredTypeStructure.set(*this, InferredType::createStructure(*this, 0, jsNull()));
+ inferredTypeTableStructure.set(*this, InferredTypeTable::createStructure(*this, 0, jsNull()));
+ functionRareDataStructure.set(*this, FunctionRareData::createStructure(*this, 0, jsNull()));
+ generatorFrameStructure.set(*this, GeneratorFrame::createStructure(*this, 0, jsNull()));
+ exceptionStructure.set(*this, Exception::createStructure(*this, 0, jsNull()));
+ promiseDeferredStructure.set(*this, JSPromiseDeferred::createStructure(*this, 0, jsNull()));
+ internalPromiseDeferredStructure.set(*this, JSInternalPromiseDeferred::createStructure(*this, 0, jsNull()));
+ programCodeBlockStructure.set(*this, ProgramCodeBlock::createStructure(*this, 0, jsNull()));
+ moduleProgramCodeBlockStructure.set(*this, ModuleProgramCodeBlock::createStructure(*this, 0, jsNull()));
+ evalCodeBlockStructure.set(*this, EvalCodeBlock::createStructure(*this, 0, jsNull()));
+ functionCodeBlockStructure.set(*this, FunctionCodeBlock::createStructure(*this, 0, jsNull()));
+#if ENABLE(WEBASSEMBLY)
+ webAssemblyCodeBlockStructure.set(*this, WebAssemblyCodeBlock::createStructure(*this, 0, jsNull()));
+#endif
+
+ iterationTerminator.set(*this, JSFinalObject::create(*this, JSFinalObject::createStructure(*this, 0, jsNull(), 1)));
+ nativeStdFunctionCellStructure.set(*this, NativeStdFunctionCell::createStructure(*this, 0, jsNull()));
smallStrings.initializeCommonStrings(*this);
- wtfThreadData().setCurrentIdentifierTable(existingEntryIdentifierTable);
+ wtfThreadData().setCurrentAtomicStringTable(existingEntryAtomicStringTable);
#if ENABLE(JIT)
- jitStubs = adoptPtr(new JITThunks());
- performPlatformSpecificJITAssertions(this);
+ jitStubs = std::make_unique<JITThunks>();
+ allCalleeSaveRegisterOffsets = std::make_unique<RegisterAtOffsetList>(RegisterSet::vmCalleeSaveRegisters(), RegisterAtOffsetList::ZeroBased);
#endif
+ arityCheckData = std::make_unique<CommonSlowPaths::ArityCheckData>();
+
+#if ENABLE(FTL_JIT)
+ ftlThunks = std::make_unique<FTL::Thunks>();
+#endif // ENABLE(FTL_JIT)
- interpreter->initialize(this->canUseJIT());
+ interpreter->initialize();
#if ENABLE(JIT)
initializeHostCallReturnValue(); // This is needed to convince the linker not to drop host call return support.
#endif
heap.notifyIsSafeToCollect();
-
+
LLInt::Data::performAssertions(*this);
- if (Options::enableProfiler()) {
- m_perBytecodeProfiler = adoptPtr(new Profiler::Database(*this));
+ if (Options::useProfiler()) {
+ m_perBytecodeProfiler = std::make_unique<Profiler::Database>(*this);
StringPrintStream pathOut;
-#if !OS(WINCE)
const char* profilerPath = getenv("JSC_PROFILER_PATH");
if (profilerPath)
pathOut.print(profilerPath, "/");
-#endif
pathOut.print("JSCProfile-", getCurrentProcessID(), "-", m_perBytecodeProfiler->databaseID(), ".json");
m_perBytecodeProfiler->registerToSaveAtExit(pathOut.toCString().data());
}
+ callFrameForCatch = nullptr;
+
#if ENABLE(DFG_JIT)
if (canUseJIT())
- m_dfgState = adoptPtr(new DFG::LongLivedState());
+ dfgState = std::make_unique<DFG::LongLivedState>();
#endif
+
+ // Initialize this last, as a free way of asserting that VM initialization itself
+ // won't use this.
+ m_typedArrayController = adoptRef(new SimpleTypedArrayController());
+
+ m_bytecodeIntrinsicRegistry = std::make_unique<BytecodeIntrinsicRegistry>(*this);
+
+ if (Options::useTypeProfiler())
+ enableTypeProfiler();
+ if (Options::useControlFlowProfiler())
+ enableControlFlowProfiler();
+#if ENABLE(SAMPLING_PROFILER)
+ if (Options::useSamplingProfiler()) {
+ setShouldBuildPCToCodeOriginMapping();
+ m_samplingProfiler = adoptRef(new SamplingProfiler(*this, Stopwatch::create()));
+ m_samplingProfiler->start();
+ }
+#endif // ENABLE(SAMPLING_PROFILER)
+
+ if (Options::alwaysGeneratePCToCodeOriginMap())
+ setShouldBuildPCToCodeOriginMapping();
+
+ if (Options::watchdog()) {
+ std::chrono::milliseconds timeoutMillis(Options::watchdog());
+ Watchdog& watchdog = ensureWatchdog();
+ watchdog.setTimeLimit(timeoutMillis);
+ }
}
VM::~VM()
{
- // Clear this first to ensure that nobody tries to remove themselves from it.
- m_perBytecodeProfiler.clear();
+ // Never GC, ever again.
+ heap.incrementDeferralDepth();
+
+#if ENABLE(SAMPLING_PROFILER)
+ if (m_samplingProfiler)
+ m_samplingProfiler->shutdown();
+#endif // ENABLE(SAMPLING_PROFILER)
+
+#if ENABLE(DFG_JIT)
+ // Make sure concurrent compilations are done, but don't install them, since there is
+ // no point to doing so.
+ for (unsigned i = DFG::numberOfWorklists(); i--;) {
+ if (DFG::Worklist* worklist = DFG::worklistForIndexOrNull(i)) {
+ worklist->waitUntilAllPlansForVMAreReady(*this);
+ worklist->removeAllReadyPlansForVM(*this);
+ }
+ }
+#endif // ENABLE(DFG_JIT)
+
+ waitForAsynchronousDisassembly();
+ // Clear this first to ensure that nobody tries to remove themselves from it.
+ m_perBytecodeProfiler = nullptr;
+
ASSERT(m_apiLock->currentThreadIsHoldingLock());
m_apiLock->willDestroyVM(this);
heap.lastChanceToFinalize();
@@ -277,47 +367,11 @@ VM::~VM()
interpreter = reinterpret_cast<Interpreter*>(0xbbadbeef);
#endif
- arrayPrototypeTable->deleteTable();
- arrayConstructorTable->deleteTable();
- booleanPrototypeTable->deleteTable();
- dateTable->deleteTable();
- dateConstructorTable->deleteTable();
- errorPrototypeTable->deleteTable();
- globalObjectTable->deleteTable();
- jsonTable->deleteTable();
- mathTable->deleteTable();
- numberConstructorTable->deleteTable();
- numberPrototypeTable->deleteTable();
- objectConstructorTable->deleteTable();
- privateNamePrototypeTable->deleteTable();
- regExpTable->deleteTable();
- regExpConstructorTable->deleteTable();
- regExpPrototypeTable->deleteTable();
- stringConstructorTable->deleteTable();
-
- fastDelete(const_cast<HashTable*>(arrayConstructorTable));
- fastDelete(const_cast<HashTable*>(arrayPrototypeTable));
- fastDelete(const_cast<HashTable*>(booleanPrototypeTable));
- fastDelete(const_cast<HashTable*>(dateTable));
- fastDelete(const_cast<HashTable*>(dateConstructorTable));
- fastDelete(const_cast<HashTable*>(errorPrototypeTable));
- fastDelete(const_cast<HashTable*>(globalObjectTable));
- fastDelete(const_cast<HashTable*>(jsonTable));
- fastDelete(const_cast<HashTable*>(mathTable));
- fastDelete(const_cast<HashTable*>(numberConstructorTable));
- fastDelete(const_cast<HashTable*>(numberPrototypeTable));
- fastDelete(const_cast<HashTable*>(objectConstructorTable));
- fastDelete(const_cast<HashTable*>(privateNamePrototypeTable));
- fastDelete(const_cast<HashTable*>(regExpTable));
- fastDelete(const_cast<HashTable*>(regExpConstructorTable));
- fastDelete(const_cast<HashTable*>(regExpPrototypeTable));
- fastDelete(const_cast<HashTable*>(stringConstructorTable));
-
delete emptyList;
delete propertyNames;
if (vmType != Default)
- deleteIdentifierTable(identifierTable);
+ delete m_atomicStringTable;
delete clientData;
delete m_regExpCache;
@@ -331,17 +385,22 @@ VM::~VM()
#endif
}
-PassRefPtr<VM> VM::createContextGroup(HeapType heapType)
+void VM::setLastStackTop(void* lastStackTop)
+{
+ m_lastStackTop = lastStackTop;
+}
+
+Ref<VM> VM::createContextGroup(HeapType heapType)
{
- return adoptRef(new VM(APIContextGroup, heapType));
+ return adoptRef(*new VM(APIContextGroup, heapType));
}
-PassRefPtr<VM> VM::create(HeapType heapType)
+Ref<VM> VM::create(HeapType heapType)
{
- return adoptRef(new VM(Default, heapType));
+ return adoptRef(*new VM(Default, heapType));
}
-PassRefPtr<VM> VM::createLeaked(HeapType heapType)
+Ref<VM> VM::createLeaked(HeapType heapType)
{
return create(heapType);
}
@@ -355,10 +414,8 @@ VM& VM::sharedInstance()
{
GlobalJSLock globalLock;
VM*& instance = sharedInstanceInternal();
- if (!instance) {
+ if (!instance)
instance = adoptRef(new VM(APIShared, SmallHeap)).leakRef();
- instance->makeUsableFromMultipleThreads();
- }
return *instance;
}
@@ -368,6 +425,32 @@ VM*& VM::sharedInstanceInternal()
return sharedInstance;
}
+Watchdog& VM::ensureWatchdog()
+{
+ if (!m_watchdog) {
+ m_watchdog = adoptRef(new Watchdog());
+
+ // The LLINT peeks into the Watchdog object directly. In order to do that,
+ // the LLINT assumes that the internal shape of a std::unique_ptr is the
+ // same as a plain C++ pointer, and loads the address of Watchdog from it.
+ RELEASE_ASSERT(*reinterpret_cast<Watchdog**>(&m_watchdog) == m_watchdog.get());
+
+ // And if we've previously compiled any functions, we need to revert
+ // them because they don't have the needed polling checks for the watchdog
+ // yet.
+ deleteAllCode();
+ }
+ return *m_watchdog;
+}
+
+#if ENABLE(SAMPLING_PROFILER)
+void VM::ensureSamplingProfiler(RefPtr<Stopwatch>&& stopwatch)
+{
+ if (!m_samplingProfiler)
+ m_samplingProfiler = adoptRef(new SamplingProfiler(*this, WTFMove(stopwatch)));
+}
+#endif // ENABLE(SAMPLING_PROFILER)
+
#if ENABLE(JIT)
static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
{
@@ -376,6 +459,8 @@ static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
return charCodeAtThunkGenerator;
case CharAtIntrinsic:
return charAtThunkGenerator;
+ case Clz32Intrinsic:
+ return clz32ThunkGenerator;
case FromCharCodeIntrinsic:
return fromCharCodeThunkGenerator;
case SqrtIntrinsic:
@@ -396,26 +481,33 @@ static ThunkGenerator thunkGeneratorForIntrinsic(Intrinsic intrinsic)
return logThunkGenerator;
case IMulIntrinsic:
return imulThunkGenerator;
+ case RandomIntrinsic:
+ return randomThunkGenerator;
default:
return 0;
}
}
-NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor)
+NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
{
- return jitStubs->hostFunctionStub(this, function, constructor);
+ return jitStubs->hostFunctionStub(this, function, constructor, name);
}
-NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic)
+NativeExecutable* VM::getHostFunction(NativeFunction function, Intrinsic intrinsic, const String& name)
{
ASSERT(canUseJIT());
- return jitStubs->hostFunctionStub(this, function, intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0, intrinsic);
+ return jitStubs->hostFunctionStub(this, function, intrinsic != NoIntrinsic ? thunkGeneratorForIntrinsic(intrinsic) : 0, intrinsic, name);
}
#else // !ENABLE(JIT)
-NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor)
+
+NativeExecutable* VM::getHostFunction(NativeFunction function, NativeFunction constructor, const String& name)
{
- return NativeExecutable::create(*this, function, constructor);
+ return NativeExecutable::create(*this,
+ adoptRef(new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_call_trampoline), JITCode::HostCallThunk)), function,
+ adoptRef(new NativeJITCode(MacroAssemblerCodeRef::createLLIntCodeRef(llint_native_construct_trampoline), JITCode::HostCallThunk)), constructor,
+ NoIntrinsic, name);
}
+
#endif // !ENABLE(JIT)
VM::ClientData::~ClientData()
@@ -426,7 +518,7 @@ void VM::resetDateCache()
{
localTimeOffsetCache.reset();
cachedDateString = String();
- cachedDateStringValue = QNaN;
+ cachedDateStringValue = std::numeric_limits<double>::quiet_NaN();
dateInstanceCache.reset();
}
@@ -440,11 +532,33 @@ void VM::stopSampling()
interpreter->stopSampling();
}
-void VM::discardAllCode()
+void VM::whenIdle(std::function<void()> callback)
+{
+ if (!entryScope) {
+ callback();
+ return;
+ }
+
+ entryScope->addDidPopListener(callback);
+}
+
+void VM::deleteAllLinkedCode()
+{
+ whenIdle([this]() {
+ heap.deleteAllCodeBlocks();
+ heap.reportAbandonedObjectGraph();
+ });
+}
+
+void VM::deleteAllCode()
{
- m_codeCache->clear();
- heap.deleteAllCompiledCode();
- heap.reportAbandonedObjectGraph();
+ whenIdle([this]() {
+ m_codeCache->clear();
+ m_regExpCache->deleteAllCode();
+ heap.deleteAllCodeBlocks();
+ heap.deleteAllUnlinkedCodeBlocks();
+ heap.reportAbandonedObjectGraph();
+ });
}
void VM::dumpSampleData(ExecState* exec)
@@ -457,7 +571,7 @@ void VM::dumpSampleData(ExecState* exec)
SourceProviderCache* VM::addSourceProviderCache(SourceProvider* sourceProvider)
{
- SourceProviderCacheMap::AddResult addResult = sourceProviderCacheMap.add(sourceProvider, 0);
+ auto addResult = sourceProviderCacheMap.add(sourceProvider, nullptr);
if (addResult.isNewEntry)
addResult.iterator->value = adoptRef(new SourceProviderCache);
return addResult.iterator->value.get();
@@ -468,60 +582,115 @@ void VM::clearSourceProviderCaches()
sourceProviderCacheMap.clear();
}
-struct StackPreservingRecompiler : public MarkedBlock::VoidFunctor {
- HashSet<FunctionExecutable*> currentlyExecutingFunctions;
- void operator()(JSCell* cell)
- {
- if (!cell->inherits(&FunctionExecutable::s_info))
- return;
- FunctionExecutable* executable = jsCast<FunctionExecutable*>(cell);
- if (currentlyExecutingFunctions.contains(executable))
- return;
- executable->clearCodeIfNotCompiling();
+void VM::throwException(ExecState* exec, Exception* exception)
+{
+ if (Options::breakOnThrow()) {
+ dataLog("In call frame ", RawPointer(exec), " for code block ", *exec->codeBlock(), "\n");
+ CRASH();
}
-};
-void VM::releaseExecutableMemory()
-{
- if (dynamicGlobalObject) {
- StackPreservingRecompiler recompiler;
- HashSet<JSCell*> roots;
- heap.canonicalizeCellLivenessData();
- heap.getConservativeRegisterRoots(roots);
- HashSet<JSCell*>::iterator end = roots.end();
- for (HashSet<JSCell*>::iterator ptr = roots.begin(); ptr != end; ++ptr) {
- ScriptExecutable* executable = 0;
- JSCell* cell = *ptr;
- if (cell->inherits(&ScriptExecutable::s_info))
- executable = static_cast<ScriptExecutable*>(*ptr);
- else if (cell->inherits(&JSFunction::s_info)) {
- JSFunction* function = jsCast<JSFunction*>(*ptr);
- if (function->isHostFunction())
- continue;
- executable = function->jsExecutable();
- } else
- continue;
- ASSERT(executable->inherits(&ScriptExecutable::s_info));
- executable->unlinkCalls();
- if (executable->inherits(&FunctionExecutable::s_info))
- recompiler.currentlyExecutingFunctions.add(static_cast<FunctionExecutable*>(executable));
-
- }
- heap.objectSpace().forEachLiveCell<StackPreservingRecompiler>(recompiler);
+ ASSERT(exec == topCallFrame || exec == exec->lexicalGlobalObject()->globalExec() || exec == exec->vmEntryGlobalObject()->globalExec());
+
+ interpreter->notifyDebuggerOfExceptionToBeThrown(exec, exception);
+
+ setException(exception);
+}
+
+JSValue VM::throwException(ExecState* exec, JSValue thrownValue)
+{
+ Exception* exception = jsDynamicCast<Exception*>(thrownValue);
+ if (!exception)
+ exception = Exception::create(*this, thrownValue);
+
+ throwException(exec, exception);
+ return JSValue(exception);
+}
+
+JSObject* VM::throwException(ExecState* exec, JSObject* error)
+{
+ return asObject(throwException(exec, JSValue(error)));
+}
+
+void VM::setStackPointerAtVMEntry(void* sp)
+{
+ m_stackPointerAtVMEntry = sp;
+ updateStackLimit();
+}
+
+size_t VM::updateReservedZoneSize(size_t reservedZoneSize)
+{
+ size_t oldReservedZoneSize = m_reservedZoneSize;
+ m_reservedZoneSize = reservedZoneSize;
+
+ updateStackLimit();
+
+ return oldReservedZoneSize;
+}
+
+#if PLATFORM(WIN)
+// On Windows the reserved stack space consists of committed memory, a guard page, and uncommitted memory,
+// where the guard page is a barrier between committed and uncommitted memory.
+// When data from the guard page is read or written, the guard page is moved, and memory is committed.
+// This is how the system grows the stack.
+// When using the C stack on Windows we need to precommit the needed stack space.
+// Otherwise we might crash later if we access uncommitted stack memory.
+// This can happen if we allocate stack space larger than the page guard size (4K).
+// The system does not get the chance to move the guard page, and commit more memory,
+// and we crash if uncommitted memory is accessed.
+// The MSVC compiler fixes this by inserting a call to the _chkstk() function,
+// when needed, see http://support.microsoft.com/kb/100775.
+// By touching every page up to the stack limit with a dummy operation,
+// we force the system to move the guard page, and commit memory.
+
+static void preCommitStackMemory(void* stackLimit)
+{
+ const int pageSize = 4096;
+ for (volatile char* p = reinterpret_cast<char*>(&stackLimit); p > stackLimit; p -= pageSize) {
+ char ch = *p;
+ *p = ch;
}
- m_regExpCache->invalidateCode();
- heap.collectAllGarbage();
}
+#endif
-void VM::clearExceptionStack()
+inline void VM::updateStackLimit()
{
- m_exceptionStack = RefCountedArray<StackFrame>();
+#if PLATFORM(WIN)
+ void* lastStackLimit = m_stackLimit;
+#endif
+
+ if (m_stackPointerAtVMEntry) {
+ ASSERT(wtfThreadData().stack().isGrowingDownward());
+ char* startOfStack = reinterpret_cast<char*>(m_stackPointerAtVMEntry);
+#if ENABLE(FTL_JIT)
+ m_stackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_reservedZoneSize + m_largestFTLStackSize);
+ m_ftlStackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_reservedZoneSize + 2 * m_largestFTLStackSize);
+#else
+ m_stackLimit = wtfThreadData().stack().recursionLimit(startOfStack, Options::maxPerThreadStackUsage(), m_reservedZoneSize);
+#endif
+ } else {
+#if ENABLE(FTL_JIT)
+ m_stackLimit = wtfThreadData().stack().recursionLimit(m_reservedZoneSize + m_largestFTLStackSize);
+ m_ftlStackLimit = wtfThreadData().stack().recursionLimit(m_reservedZoneSize + 2 * m_largestFTLStackSize);
+#else
+ m_stackLimit = wtfThreadData().stack().recursionLimit(m_reservedZoneSize);
+#endif
+ }
+
+#if PLATFORM(WIN)
+ if (lastStackLimit != m_stackLimit)
+ preCommitStackMemory(m_stackLimit);
+#endif
}
-
-void releaseExecutableMemory(VM& vm)
+
+#if ENABLE(FTL_JIT)
+void VM::updateFTLLargestStackSize(size_t stackSize)
{
- vm.releaseExecutableMemory();
+ if (stackSize > m_largestFTLStackSize) {
+ m_largestFTLStackSize = stackSize;
+ updateStackLimit();
+ }
}
+#endif
#if ENABLE(DFG_JIT)
void VM::gatherConservativeRoots(ConservativeRoots& conservativeRoots)
@@ -536,9 +705,22 @@ void VM::gatherConservativeRoots(ConservativeRoots& conservativeRoots)
}
#endif
+void logSanitizeStack(VM* vm)
+{
+ if (Options::verboseSanitizeStack() && vm->topCallFrame) {
+ int dummy;
+ dataLog(
+ "Sanitizing stack with top call frame at ", RawPointer(vm->topCallFrame),
+ ", current stack pointer at ", RawPointer(&dummy), ", in ",
+ pointerDump(vm->topCallFrame->codeBlock()), " and last code origin = ",
+ vm->topCallFrame->codeOrigin(), "\n");
+ }
+}
+
#if ENABLE(REGEXP_TRACING)
void VM::addRegExpToTrace(RegExp* regExp)
{
+ gcProtect(regExp);
m_rtTraceList->add(regExp);
}
@@ -549,14 +731,16 @@ void VM::dumpRegExpTrace()
if (iter != m_rtTraceList->end()) {
dataLogF("\nRegExp Tracing\n");
- dataLogF(" match() matches\n");
- dataLogF("Regular Expression JIT Address calls found\n");
- dataLogF("----------------------------------------+----------------+----------+----------\n");
+ dataLogF("Regular Expression 8 Bit 16 Bit match() Matches Average\n");
+ dataLogF(" <Match only / Match> JIT Addr JIT Address calls found String len\n");
+ dataLogF("----------------------------------------+----------------+----------------+----------+----------+-----------\n");
unsigned reCount = 0;
- for (; iter != m_rtTraceList->end(); ++iter, ++reCount)
+ for (; iter != m_rtTraceList->end(); ++iter, ++reCount) {
(*iter)->printTraceData();
+ gcUnprotect(*iter);
+ }
dataLogF("%d Regular Expressions\n", reCount);
}
@@ -569,4 +753,135 @@ void VM::dumpRegExpTrace()
}
#endif
+void VM::registerWatchpointForImpureProperty(const Identifier& propertyName, Watchpoint* watchpoint)
+{
+ auto result = m_impurePropertyWatchpointSets.add(propertyName.string(), nullptr);
+ if (result.isNewEntry)
+ result.iterator->value = adoptRef(new WatchpointSet(IsWatched));
+ result.iterator->value->add(watchpoint);
+}
+
+void VM::addImpureProperty(const String& propertyName)
+{
+ if (RefPtr<WatchpointSet> watchpointSet = m_impurePropertyWatchpointSets.take(propertyName))
+ watchpointSet->fireAll("Impure property added");
+}
+
+class SetEnabledProfilerFunctor {
+public:
+ bool operator()(CodeBlock* codeBlock)
+ {
+ if (JITCode::isOptimizingJIT(codeBlock->jitType()))
+ codeBlock->jettison(Profiler::JettisonDueToLegacyProfiler);
+ return false;
+ }
+};
+
+void VM::setEnabledProfiler(LegacyProfiler* profiler)
+{
+ m_enabledProfiler = profiler;
+ if (m_enabledProfiler) {
+ SetEnabledProfilerFunctor functor;
+ heap.forEachCodeBlock(functor);
+ }
+}
+
+static bool enableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doEnableWork)
+{
+ bool needsToRecompile = false;
+ if (!counter) {
+ doEnableWork();
+ needsToRecompile = true;
+ }
+ counter++;
+
+ return needsToRecompile;
+}
+
+static bool disableProfilerWithRespectToCount(unsigned& counter, std::function<void()> doDisableWork)
+{
+ RELEASE_ASSERT(counter > 0);
+ bool needsToRecompile = false;
+ counter--;
+ if (!counter) {
+ doDisableWork();
+ needsToRecompile = true;
+ }
+
+ return needsToRecompile;
+}
+
+bool VM::enableTypeProfiler()
+{
+ auto enableTypeProfiler = [this] () {
+ this->m_typeProfiler = std::make_unique<TypeProfiler>();
+ this->m_typeProfilerLog = std::make_unique<TypeProfilerLog>();
+ };
+
+ return enableProfilerWithRespectToCount(m_typeProfilerEnabledCount, enableTypeProfiler);
+}
+
+bool VM::disableTypeProfiler()
+{
+ auto disableTypeProfiler = [this] () {
+ this->m_typeProfiler.reset(nullptr);
+ this->m_typeProfilerLog.reset(nullptr);
+ };
+
+ return disableProfilerWithRespectToCount(m_typeProfilerEnabledCount, disableTypeProfiler);
+}
+
+bool VM::enableControlFlowProfiler()
+{
+ auto enableControlFlowProfiler = [this] () {
+ this->m_controlFlowProfiler = std::make_unique<ControlFlowProfiler>();
+ };
+
+ return enableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, enableControlFlowProfiler);
+}
+
+bool VM::disableControlFlowProfiler()
+{
+ auto disableControlFlowProfiler = [this] () {
+ this->m_controlFlowProfiler.reset(nullptr);
+ };
+
+ return disableProfilerWithRespectToCount(m_controlFlowProfilerEnabledCount, disableControlFlowProfiler);
+}
+
+void VM::dumpTypeProfilerData()
+{
+ if (!typeProfiler())
+ return;
+
+ typeProfilerLog()->processLogEntries(ASCIILiteral("VM Dump Types"));
+ typeProfiler()->dumpTypeProfilerData(*this);
+}
+
+void VM::queueMicrotask(JSGlobalObject* globalObject, PassRefPtr<Microtask> task)
+{
+ m_microtaskQueue.append(std::make_unique<QueuedTask>(*this, globalObject, task));
+}
+
+void VM::drainMicrotasks()
+{
+ while (!m_microtaskQueue.isEmpty())
+ m_microtaskQueue.takeFirst()->run();
+}
+
+void QueuedTask::run()
+{
+ m_microtask->run(m_globalObject->globalExec());
+}
+
+void sanitizeStackForVM(VM* vm)
+{
+ logSanitizeStack(vm);
+#if !ENABLE(JIT)
+ vm->interpreter->stack().sanitizeStack();
+#else
+ sanitizeStackForVMImpl(vm);
+#endif
+}
+
} // namespace JSC