summaryrefslogtreecommitdiff
path: root/Source/JavaScriptCore/heap
diff options
context:
space:
mode:
authorSimon Hausmann <simon.hausmann@nokia.com>2012-07-11 13:45:28 +0200
committerSimon Hausmann <simon.hausmann@nokia.com>2012-07-11 13:45:28 +0200
commitd6a599dbc9d824a462b2b206316e102bf8136446 (patch)
treeecb257a5e55b2239d74b90fdad62fccd661cf286 /Source/JavaScriptCore/heap
parent3ccc3a85f09a83557b391aae380d3bf5f81a2911 (diff)
downloadqtwebkit-d6a599dbc9d824a462b2b206316e102bf8136446.tar.gz
Imported WebKit commit 8ff1f22783a32de82fee915abd55bd1b298f2644 (http://svn.webkit.org/repository/webkit/trunk@122325)
New snapshot that should work with the latest Qt build system changes
Diffstat (limited to 'Source/JavaScriptCore/heap')
-rw-r--r--Source/JavaScriptCore/heap/ConservativeRoots.cpp46
-rw-r--r--Source/JavaScriptCore/heap/ConservativeRoots.h6
-rw-r--r--Source/JavaScriptCore/heap/CopiedSpace.cpp1
-rw-r--r--Source/JavaScriptCore/heap/Heap.cpp41
-rw-r--r--Source/JavaScriptCore/heap/Heap.h16
-rw-r--r--Source/JavaScriptCore/heap/HeapTimer.cpp42
-rw-r--r--Source/JavaScriptCore/heap/HeapTimer.h6
-rw-r--r--Source/JavaScriptCore/heap/IncrementalSweeper.cpp9
-rw-r--r--Source/JavaScriptCore/heap/IncrementalSweeper.h2
-rw-r--r--Source/JavaScriptCore/heap/JITStubRoutineSet.cpp126
-rw-r--r--Source/JavaScriptCore/heap/JITStubRoutineSet.h80
-rw-r--r--Source/JavaScriptCore/heap/MachineStackMarker.cpp14
-rw-r--r--Source/JavaScriptCore/heap/MachineStackMarker.h6
-rw-r--r--Source/JavaScriptCore/heap/MarkStack.cpp124
-rw-r--r--Source/JavaScriptCore/heap/MarkStack.h13
-rw-r--r--Source/JavaScriptCore/heap/MarkedAllocator.cpp2
-rw-r--r--Source/JavaScriptCore/heap/SlotVisitor.h2
-rw-r--r--Source/JavaScriptCore/heap/WeakBlock.cpp4
18 files changed, 462 insertions, 78 deletions
diff --git a/Source/JavaScriptCore/heap/ConservativeRoots.cpp b/Source/JavaScriptCore/heap/ConservativeRoots.cpp
index d63faebf3..6b9cbef45 100644
--- a/Source/JavaScriptCore/heap/ConservativeRoots.cpp
+++ b/Source/JavaScriptCore/heap/ConservativeRoots.cpp
@@ -62,11 +62,6 @@ void ConservativeRoots::grow()
m_roots = newRoots;
}
-class DummyMarkHook {
-public:
- void mark(void*) { }
-};
-
template<typename MarkHook>
inline void ConservativeRoots::genericAddPointer(void* p, TinyBloomFilter filter, MarkHook& markHook)
{
@@ -110,15 +105,48 @@ void ConservativeRoots::genericAddSpan(void* begin, void* end, MarkHook& markHoo
genericAddPointer(*it, filter, markHook);
}
+class DummyMarkHook {
+public:
+ void mark(void*) { }
+};
+
void ConservativeRoots::add(void* begin, void* end)
{
- DummyMarkHook hook;
- genericAddSpan(begin, end, hook);
+ DummyMarkHook dummy;
+ genericAddSpan(begin, end, dummy);
}
-void ConservativeRoots::add(void* begin, void* end, DFGCodeBlocks& dfgCodeBlocks)
+void ConservativeRoots::add(void* begin, void* end, JITStubRoutineSet& jitStubRoutines)
+{
+ genericAddSpan(begin, end, jitStubRoutines);
+}
+
+template<typename T, typename U>
+class CompositeMarkHook {
+public:
+ CompositeMarkHook(T& first, U& second)
+ : m_first(first)
+ , m_second(second)
+ {
+ }
+
+ void mark(void* address)
+ {
+ m_first.mark(address);
+ m_second.mark(address);
+ }
+
+private:
+ T& m_first;
+ U& m_second;
+};
+
+void ConservativeRoots::add(
+ void* begin, void* end, JITStubRoutineSet& jitStubRoutines, DFGCodeBlocks& dfgCodeBlocks)
{
- genericAddSpan(begin, end, dfgCodeBlocks);
+ CompositeMarkHook<JITStubRoutineSet, DFGCodeBlocks> markHook(
+ jitStubRoutines, dfgCodeBlocks);
+ genericAddSpan(begin, end, markHook);
}
} // namespace JSC
diff --git a/Source/JavaScriptCore/heap/ConservativeRoots.h b/Source/JavaScriptCore/heap/ConservativeRoots.h
index 9d9e9ba0c..219bdcc8e 100644
--- a/Source/JavaScriptCore/heap/ConservativeRoots.h
+++ b/Source/JavaScriptCore/heap/ConservativeRoots.h
@@ -32,9 +32,10 @@
namespace JSC {
-class JSCell;
class DFGCodeBlocks;
class Heap;
+class JITStubRoutineSet;
+class JSCell;
class ConservativeRoots {
public:
@@ -42,7 +43,8 @@ public:
~ConservativeRoots();
void add(void* begin, void* end);
- void add(void* begin, void* end, DFGCodeBlocks&);
+ void add(void* begin, void* end, JITStubRoutineSet&);
+ void add(void* begin, void* end, JITStubRoutineSet&, DFGCodeBlocks&);
size_t size();
JSCell** roots();
diff --git a/Source/JavaScriptCore/heap/CopiedSpace.cpp b/Source/JavaScriptCore/heap/CopiedSpace.cpp
index 631e829ec..9eb70a556 100644
--- a/Source/JavaScriptCore/heap/CopiedSpace.cpp
+++ b/Source/JavaScriptCore/heap/CopiedSpace.cpp
@@ -66,6 +66,7 @@ CheckedBoolean CopiedSpace::tryAllocateSlowCase(size_t bytes, void** outPtr)
if (isOversize(bytes))
return tryAllocateOversize(bytes, outPtr);
+ ASSERT(m_heap->globalData()->apiLock().currentThreadIsHoldingLock());
m_heap->didAllocate(m_allocator.currentCapacity());
allocateBlock();
diff --git a/Source/JavaScriptCore/heap/Heap.cpp b/Source/JavaScriptCore/heap/Heap.cpp
index ef062c9ce..377132765 100644
--- a/Source/JavaScriptCore/heap/Heap.cpp
+++ b/Source/JavaScriptCore/heap/Heap.cpp
@@ -160,15 +160,9 @@ static inline size_t proportionalHeapSize(size_t heapSize, size_t ramSize)
return 1.25 * heapSize;
}
-static inline bool isValidSharedInstanceThreadState()
+static inline bool isValidSharedInstanceThreadState(JSGlobalData* globalData)
{
- if (!JSLock::lockCount())
- return false;
-
- if (!JSLock::currentThreadIsHoldingLock())
- return false;
-
- return true;
+ return globalData->apiLock().currentThreadIsHoldingLock();
}
static inline bool isValidThreadState(JSGlobalData* globalData)
@@ -176,7 +170,7 @@ static inline bool isValidThreadState(JSGlobalData* globalData)
if (globalData->identifierTable != wtfThreadData().currentIdentifierTable())
return false;
- if (globalData->isSharedInstance() && !isValidSharedInstanceThreadState())
+ if (globalData->isSharedInstance() && !isValidSharedInstanceThreadState(globalData))
return false;
return true;
@@ -275,10 +269,6 @@ void Heap::lastChanceToFinalize()
ASSERT(!m_globalData->dynamicGlobalObject);
ASSERT(m_operationInProgress == NoOperation);
- // FIXME: Make this a release-mode crash once we're sure no one's doing this.
- if (size_t size = m_protectedValues.size())
- WTFLogAlways("ERROR: JavaScriptCore heap deallocated while %ld values were still protected", static_cast<unsigned long>(size));
-
m_objectSpace.lastChanceToFinalize();
#if ENABLE(SIMPLE_HEAP_PROFILING)
@@ -327,7 +317,7 @@ void Heap::didAbandon(size_t bytes)
void Heap::protect(JSValue k)
{
ASSERT(k);
- ASSERT(JSLock::currentThreadIsHoldingLock() || !m_globalData->isSharedInstance());
+ ASSERT(m_globalData->apiLock().currentThreadIsHoldingLock());
if (!k.isCell())
return;
@@ -338,7 +328,7 @@ void Heap::protect(JSValue k)
bool Heap::unprotect(JSValue k)
{
ASSERT(k);
- ASSERT(JSLock::currentThreadIsHoldingLock() || !m_globalData->isSharedInstance());
+ ASSERT(m_globalData->apiLock().currentThreadIsHoldingLock());
if (!k.isCell())
return false;
@@ -430,6 +420,7 @@ void Heap::markRoots(bool fullGC)
// We gather conservative roots before clearing mark bits because conservative
// gathering uses the mark bits to determine whether a reference is valid.
ConservativeRoots machineThreadRoots(&m_objectSpace.blocks(), &m_storageSpace);
+ m_jitStubRoutines.clearMarks();
{
GCPHASE(GatherConservativeRoots);
m_machineThreads.gatherConservativeRoots(machineThreadRoots, &dummy);
@@ -439,7 +430,8 @@ void Heap::markRoots(bool fullGC)
m_dfgCodeBlocks.clearMarks();
{
GCPHASE(GatherRegisterFileRoots);
- registerFile().gatherConservativeRoots(registerFileRoots, m_dfgCodeBlocks);
+ registerFile().gatherConservativeRoots(
+ registerFileRoots, m_jitStubRoutines, m_dfgCodeBlocks);
}
#if ENABLE(DFG_JIT)
@@ -464,6 +456,7 @@ void Heap::markRoots(bool fullGC)
m_storageSpace.startedCopying();
SlotVisitor& visitor = m_slotVisitor;
+ visitor.setup();
HeapRootVisitor heapRootVisitor(visitor);
{
@@ -549,9 +542,10 @@ void Heap::markRoots(bool fullGC)
}
{
- GCPHASE(TraceCodeBlocks);
- MARK_LOG_ROOT(visitor, "Trace Code Blocks");
+ GCPHASE(TraceCodeBlocksAndJITStubRoutines);
+ MARK_LOG_ROOT(visitor, "Trace Code Blocks and JIT Stub Routines");
m_dfgCodeBlocks.traceMarkedCodeBlocks(visitor);
+ m_jitStubRoutines.traceMarkedStubRoutines(visitor);
visitor.donateAndDrain();
}
@@ -595,12 +589,11 @@ void Heap::markRoots(bool fullGC)
#endif
visitor.reset();
- m_sharedData.reset();
#if ENABLE(PARALLEL_GC)
m_sharedData.resetChildren();
#endif
+ m_sharedData.reset();
m_storageSpace.doneCopying();
-
}
size_t Heap::objectCount()
@@ -675,6 +668,7 @@ void Heap::deleteUnmarkedCompiledCode()
}
m_dfgCodeBlocks.deleteUnmarkedJettisonedCodeBlocks();
+ m_jitStubRoutines.deleteUnmarkedJettisonedStubRoutines();
}
void Heap::collectAllGarbage()
@@ -692,6 +686,7 @@ void Heap::collect(SweepToggle sweepToggle)
SamplingRegion samplingRegion("Garbage Collection");
GCPHASE(Collect);
+ ASSERT(globalData()->apiLock().currentThreadIsHoldingLock());
ASSERT(globalData()->identifierTable == wtfThreadData().currentIdentifierTable());
ASSERT(m_isSafeToCollect);
JAVASCRIPTCORE_GC_BEGIN();
@@ -777,19 +772,19 @@ void Heap::collect(SweepToggle sweepToggle)
JAVASCRIPTCORE_GC_END();
}
-void Heap::setActivityCallback(PassOwnPtr<GCActivityCallback> activityCallback)
+void Heap::setActivityCallback(GCActivityCallback* activityCallback)
{
m_activityCallback = activityCallback;
}
GCActivityCallback* Heap::activityCallback()
{
- return m_activityCallback.get();
+ return m_activityCallback;
}
IncrementalSweeper* Heap::sweeper()
{
- return m_sweeper.get();
+ return m_sweeper;
}
void Heap::setGarbageCollectionTimerEnabled(bool enable)
diff --git a/Source/JavaScriptCore/heap/Heap.h b/Source/JavaScriptCore/heap/Heap.h
index 91c3aa58f..a43be3df0 100644
--- a/Source/JavaScriptCore/heap/Heap.h
+++ b/Source/JavaScriptCore/heap/Heap.h
@@ -26,6 +26,7 @@
#include "DFGCodeBlocks.h"
#include "HandleSet.h"
#include "HandleStack.h"
+#include "JITStubRoutineSet.h"
#include "MarkedAllocator.h"
#include "MarkedBlock.h"
#include "MarkedBlockSet.h"
@@ -44,10 +45,12 @@ namespace JSC {
class CodeBlock;
class ExecutableBase;
class GCActivityCallback;
+ class GCAwareJITStubRoutine;
class GlobalCodeBlock;
class Heap;
class HeapRootVisitor;
class IncrementalSweeper;
+ class JITStubRoutine;
class JSCell;
class JSGlobalData;
class JSValue;
@@ -99,10 +102,10 @@ namespace JSC {
MachineThreads& machineThreads() { return m_machineThreads; }
JS_EXPORT_PRIVATE GCActivityCallback* activityCallback();
- JS_EXPORT_PRIVATE void setActivityCallback(PassOwnPtr<GCActivityCallback>);
+ JS_EXPORT_PRIVATE void setActivityCallback(GCActivityCallback*);
JS_EXPORT_PRIVATE void setGarbageCollectionTimerEnabled(bool);
- IncrementalSweeper* sweeper();
+ JS_EXPORT_PRIVATE IncrementalSweeper* sweeper();
// true if an allocation or collection is in progress
inline bool isBusy();
@@ -168,6 +171,8 @@ namespace JSC {
private:
friend class CodeBlock;
+ friend class GCAwareJITStubRoutine;
+ friend class JITStubRoutine;
friend class LLIntOffsetsExtractor;
friend class MarkedSpace;
friend class MarkedAllocator;
@@ -229,6 +234,7 @@ namespace JSC {
HandleSet m_handleSet;
HandleStack m_handleStack;
DFGCodeBlocks m_dfgCodeBlocks;
+ JITStubRoutineSet m_jitStubRoutines;
FinalizerOwner m_finalizerOwner;
bool m_isSafeToCollect;
@@ -237,10 +243,10 @@ namespace JSC {
double m_lastGCLength;
double m_lastCodeDiscardTime;
- OwnPtr<GCActivityCallback> m_activityCallback;
- OwnPtr<IncrementalSweeper> m_sweeper;
-
DoublyLinkedList<ExecutableBase> m_compiledCode;
+
+ GCActivityCallback* m_activityCallback;
+ IncrementalSweeper* m_sweeper;
};
inline bool Heap::shouldCollect()
diff --git a/Source/JavaScriptCore/heap/HeapTimer.cpp b/Source/JavaScriptCore/heap/HeapTimer.cpp
index b4d928a34..ae66f9e26 100644
--- a/Source/JavaScriptCore/heap/HeapTimer.cpp
+++ b/Source/JavaScriptCore/heap/HeapTimer.cpp
@@ -26,6 +26,10 @@
#include "config.h"
#include "HeapTimer.h"
+#include "APIShims.h"
+#include "JSObject.h"
+#include "JSString.h"
+#include "ScopeChain.h"
#include <wtf/Threading.h>
namespace JSC {
@@ -46,7 +50,8 @@ HeapTimer::HeapTimer(JSGlobalData* globalData, CFRunLoopRef runLoop)
HeapTimer::~HeapTimer()
{
- invalidate();
+ CFRunLoopRemoveTimer(m_runLoop.get(), m_timer.get(), kCFRunLoopCommonModes);
+ CFRunLoopTimerInvalidate(m_timer.get());
}
void HeapTimer::synchronize()
@@ -60,14 +65,37 @@ void HeapTimer::synchronize()
void HeapTimer::invalidate()
{
- CFRunLoopRemoveTimer(m_runLoop.get(), m_timer.get(), kCFRunLoopCommonModes);
- CFRunLoopTimerInvalidate(m_timer.get());
+ m_globalData = 0;
+ CFRunLoopTimerSetNextFireDate(m_timer.get(), CFAbsoluteTimeGetCurrent() - s_decade);
+}
+
+void HeapTimer::didStartVMShutdown()
+{
+ if (CFRunLoopGetCurrent() == m_runLoop.get()) {
+ invalidate();
+ delete this;
+ return;
+ }
+ ASSERT(!m_globalData->apiLock().currentThreadIsHoldingLock());
+ MutexLocker locker(m_shutdownMutex);
+ invalidate();
}
void HeapTimer::timerDidFire(CFRunLoopTimerRef, void* info)
{
HeapTimer* agent = static_cast<HeapTimer*>(info);
- agent->doWork();
+ agent->m_shutdownMutex.lock();
+ if (!agent->m_globalData) {
+ agent->m_shutdownMutex.unlock();
+ delete agent;
+ return;
+ }
+ {
+ // We don't ref here to prevent us from resurrecting the ref count of a "dead" JSGlobalData.
+ APIEntryShim shim(agent->m_globalData, APIEntryShimWithoutLock::DontRefGlobalData);
+ agent->doWork();
+ }
+ agent->m_shutdownMutex.unlock();
}
#else
@@ -81,6 +109,11 @@ HeapTimer::~HeapTimer()
{
}
+void HeapTimer::didStartVMShutdown()
+{
+ delete this;
+}
+
void HeapTimer::synchronize()
{
}
@@ -89,7 +122,6 @@ void HeapTimer::invalidate()
{
}
-
#endif
diff --git a/Source/JavaScriptCore/heap/HeapTimer.h b/Source/JavaScriptCore/heap/HeapTimer.h
index fea013975..9255e0648 100644
--- a/Source/JavaScriptCore/heap/HeapTimer.h
+++ b/Source/JavaScriptCore/heap/HeapTimer.h
@@ -27,6 +27,7 @@
#define HeapTimer_h
#include <wtf/RetainPtr.h>
+#include <wtf/Threading.h>
#if USE(CF)
#include <CoreFoundation/CoreFoundation.h>
@@ -46,7 +47,8 @@ public:
#endif
virtual ~HeapTimer();
-
+
+ void didStartVMShutdown();
virtual void synchronize();
virtual void doWork() = 0;
@@ -59,6 +61,8 @@ protected:
RetainPtr<CFRunLoopTimerRef> m_timer;
RetainPtr<CFRunLoopRef> m_runLoop;
CFRunLoopTimerContext m_context;
+
+ Mutex m_shutdownMutex;
#endif
private:
diff --git a/Source/JavaScriptCore/heap/IncrementalSweeper.cpp b/Source/JavaScriptCore/heap/IncrementalSweeper.cpp
index 848377346..49222c545 100644
--- a/Source/JavaScriptCore/heap/IncrementalSweeper.cpp
+++ b/Source/JavaScriptCore/heap/IncrementalSweeper.cpp
@@ -45,7 +45,6 @@ static const CFTimeInterval sweepTimeMultiplier = 1.0 / sweepTimeTotal;
void IncrementalSweeper::doWork()
{
- APIEntryShim shim(m_globalData);
doSweep(WTF::monotonicallyIncreasingTime());
}
@@ -55,9 +54,9 @@ IncrementalSweeper::IncrementalSweeper(Heap* heap, CFRunLoopRef runLoop)
{
}
-PassOwnPtr<IncrementalSweeper> IncrementalSweeper::create(Heap* heap)
+IncrementalSweeper* IncrementalSweeper::create(Heap* heap)
{
- return adoptPtr(new IncrementalSweeper(heap, CFRunLoopGetCurrent()));
+ return new IncrementalSweeper(heap, CFRunLoopGetCurrent());
}
void IncrementalSweeper::scheduleTimer()
@@ -110,9 +109,9 @@ void IncrementalSweeper::doWork()
{
}
-PassOwnPtr<IncrementalSweeper> IncrementalSweeper::create(Heap* heap)
+IncrementalSweeper* IncrementalSweeper::create(Heap* heap)
{
- return adoptPtr(new IncrementalSweeper(heap->globalData()));
+ return new IncrementalSweeper(heap->globalData());
}
void IncrementalSweeper::startSweeping(const HashSet<MarkedBlock*>&)
diff --git a/Source/JavaScriptCore/heap/IncrementalSweeper.h b/Source/JavaScriptCore/heap/IncrementalSweeper.h
index 20f4e3ca8..eedfa7f6f 100644
--- a/Source/JavaScriptCore/heap/IncrementalSweeper.h
+++ b/Source/JavaScriptCore/heap/IncrementalSweeper.h
@@ -39,7 +39,7 @@ class Heap;
class IncrementalSweeper : public HeapTimer {
public:
- static PassOwnPtr<IncrementalSweeper> create(Heap*);
+ static IncrementalSweeper* create(Heap*);
void startSweeping(const HashSet<MarkedBlock*>& blockSnapshot);
virtual void doWork();
diff --git a/Source/JavaScriptCore/heap/JITStubRoutineSet.cpp b/Source/JavaScriptCore/heap/JITStubRoutineSet.cpp
new file mode 100644
index 000000000..054bf06dd
--- /dev/null
+++ b/Source/JavaScriptCore/heap/JITStubRoutineSet.cpp
@@ -0,0 +1,126 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "JITStubRoutineSet.h"
+
+#if ENABLE(JIT)
+
+#include "GCAwareJITStubRoutine.h"
+#include "ScopeChain.h"
+#include "SlotVisitor.h"
+
+namespace JSC {
+
+JITStubRoutineSet::JITStubRoutineSet() { }
+JITStubRoutineSet::~JITStubRoutineSet()
+{
+ for (size_t i = m_listOfRoutines.size(); i--;) {
+ GCAwareJITStubRoutine* routine = m_listOfRoutines[i];
+
+ routine->m_mayBeExecuting = false;
+
+ if (!routine->m_isJettisoned) {
+ // Inform the deref() routine that it should delete this guy as soon
+ // as the ref count reaches zero.
+ routine->m_isJettisoned = true;
+ continue;
+ }
+
+ routine->deleteFromGC();
+ }
+}
+
+void JITStubRoutineSet::add(GCAwareJITStubRoutine* routine)
+{
+ ASSERT(!routine->m_isJettisoned);
+
+ m_listOfRoutines.append(routine);
+
+ uintptr_t start = routine->startAddress();
+ uintptr_t end = routine->endAddress();
+ uintptr_t step = JITStubRoutine::addressStep();
+ for (uintptr_t iter = start; iter < end; iter += step) {
+ ASSERT(m_addressToRoutineMap.find(iter) == m_addressToRoutineMap.end());
+ m_addressToRoutineMap.add(iter, routine);
+ }
+}
+
+void JITStubRoutineSet::clearMarks()
+{
+ for (size_t i = m_listOfRoutines.size(); i--;)
+ m_listOfRoutines[i]->m_mayBeExecuting = false;
+}
+
+void JITStubRoutineSet::markSlow(uintptr_t address)
+{
+ HashMap<uintptr_t, GCAwareJITStubRoutine*>::iterator iter =
+ m_addressToRoutineMap.find(address & ~(JITStubRoutine::addressStep() - 1));
+
+ if (iter == m_addressToRoutineMap.end())
+ return;
+
+ iter->second->m_mayBeExecuting = true;
+}
+
+void JITStubRoutineSet::deleteUnmarkedJettisonedStubRoutines()
+{
+ for (size_t i = 0; i < m_listOfRoutines.size(); i++) {
+ GCAwareJITStubRoutine* routine = m_listOfRoutines[i];
+ if (!routine->m_isJettisoned || routine->m_mayBeExecuting)
+ continue;
+
+ uintptr_t start = routine->startAddress();
+ uintptr_t end = routine->endAddress();
+ uintptr_t step = JITStubRoutine::addressStep();
+ for (uintptr_t iter = start; iter < end; iter += step) {
+ ASSERT(m_addressToRoutineMap.find(iter) != m_addressToRoutineMap.end());
+ ASSERT(m_addressToRoutineMap.find(iter)->second == routine);
+ m_addressToRoutineMap.remove(iter);
+ }
+
+ routine->deleteFromGC();
+
+ m_listOfRoutines[i] = m_listOfRoutines.last();
+ m_listOfRoutines.removeLast();
+ i--;
+ }
+}
+
+void JITStubRoutineSet::traceMarkedStubRoutines(SlotVisitor& visitor)
+{
+ for (size_t i = m_listOfRoutines.size(); i--;) {
+ GCAwareJITStubRoutine* routine = m_listOfRoutines[i];
+ if (!routine->m_mayBeExecuting)
+ continue;
+
+ routine->markRequiredObjects(visitor);
+ }
+}
+
+} // namespace JSC
+
+#endif // ENABLE(JIT)
+
diff --git a/Source/JavaScriptCore/heap/JITStubRoutineSet.h b/Source/JavaScriptCore/heap/JITStubRoutineSet.h
new file mode 100644
index 000000000..ea120132e
--- /dev/null
+++ b/Source/JavaScriptCore/heap/JITStubRoutineSet.h
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef JITStubRoutineSet_h
+#define JITStubRoutineSet_h
+
+#include <wtf/Platform.h>
+
+#if ENABLE(JIT)
+
+#include "JITStubRoutine.h"
+#include <wtf/FastAllocBase.h>
+#include <wtf/HashMap.h>
+#include <wtf/Vector.h>
+
+namespace JSC {
+
+class GCAwareJITStubRoutine;
+class SlotVisitor;
+
+class JITStubRoutineSet {
+ WTF_MAKE_NONCOPYABLE(JITStubRoutineSet);
+ WTF_MAKE_FAST_ALLOCATED;
+
+public:
+ JITStubRoutineSet();
+ ~JITStubRoutineSet();
+
+ void add(GCAwareJITStubRoutine*);
+
+ void clearMarks();
+
+ void mark(void* candidateAddress)
+ {
+ uintptr_t address = reinterpret_cast<uintptr_t>(candidateAddress);
+ if (!JITStubRoutine::passesFilter(address))
+ return;
+
+ markSlow(address);
+ }
+
+ void deleteUnmarkedJettisonedStubRoutines();
+
+ void traceMarkedStubRoutines(SlotVisitor&);
+
+private:
+ void markSlow(uintptr_t address);
+
+ HashMap<uintptr_t, GCAwareJITStubRoutine*> m_addressToRoutineMap;
+ Vector<GCAwareJITStubRoutine*> m_listOfRoutines;
+};
+
+} // namespace JSC
+
+#endif // ENABLE(JIT)
+
+#endif // JITStubRoutineSet_h
+
diff --git a/Source/JavaScriptCore/heap/MachineStackMarker.cpp b/Source/JavaScriptCore/heap/MachineStackMarker.cpp
index 8e0c57b6a..7eb57479b 100644
--- a/Source/JavaScriptCore/heap/MachineStackMarker.cpp
+++ b/Source/JavaScriptCore/heap/MachineStackMarker.cpp
@@ -141,8 +141,10 @@ MachineThreads::MachineThreads(Heap* heap)
MachineThreads::~MachineThreads()
{
- if (m_threadSpecific)
- ThreadSpecificKeyDelete(m_threadSpecific);
+ if (m_threadSpecific) {
+ int error = pthread_key_delete(m_threadSpecific);
+ ASSERT_UNUSED(error, !error);
+ }
MutexLocker registeredThreadsLock(m_registeredThreadsMutex);
for (Thread* t = m_registeredThreads; t;) {
@@ -179,17 +181,19 @@ void MachineThreads::makeUsableFromMultipleThreads()
if (m_threadSpecific)
return;
- ThreadSpecificKeyCreate(&m_threadSpecific, removeThread);
+ int error = pthread_key_create(&m_threadSpecific, removeThread);
+ if (error)
+ CRASH();
}
void MachineThreads::addCurrentThread()
{
ASSERT(!m_heap->globalData()->exclusiveThread || m_heap->globalData()->exclusiveThread == currentThread());
- if (!m_threadSpecific || ThreadSpecificGet(m_threadSpecific))
+ if (!m_threadSpecific || pthread_getspecific(m_threadSpecific))
return;
- ThreadSpecificSet(m_threadSpecific, this);
+ pthread_setspecific(m_threadSpecific, this);
Thread* thread = new Thread(getCurrentPlatformThread(), wtfThreadData().stack().origin());
MutexLocker lock(m_registeredThreadsMutex);
diff --git a/Source/JavaScriptCore/heap/MachineStackMarker.h b/Source/JavaScriptCore/heap/MachineStackMarker.h
index 2209f97e9..5c7705fcf 100644
--- a/Source/JavaScriptCore/heap/MachineStackMarker.h
+++ b/Source/JavaScriptCore/heap/MachineStackMarker.h
@@ -22,14 +22,14 @@
#ifndef MachineThreads_h
#define MachineThreads_h
+#include <pthread.h>
#include <wtf/Noncopyable.h>
-#include <wtf/ThreadSpecific.h>
#include <wtf/ThreadingPrimitives.h>
namespace JSC {
- class Heap;
class ConservativeRoots;
+ class Heap;
class MachineThreads {
WTF_MAKE_NONCOPYABLE(MachineThreads);
@@ -55,7 +55,7 @@ namespace JSC {
Heap* m_heap;
Mutex m_registeredThreadsMutex;
Thread* m_registeredThreads;
- WTF::ThreadSpecificKey m_threadSpecific;
+ pthread_key_t m_threadSpecific;
};
} // namespace JSC
diff --git a/Source/JavaScriptCore/heap/MarkStack.cpp b/Source/JavaScriptCore/heap/MarkStack.cpp
index 3eb02c4e8..9d9130026 100644
--- a/Source/JavaScriptCore/heap/MarkStack.cpp
+++ b/Source/JavaScriptCore/heap/MarkStack.cpp
@@ -38,6 +38,7 @@
#include "Structure.h"
#include "UString.h"
#include "WriteBarrier.h"
+#include <wtf/Atomics.h>
#include <wtf/DataLog.h>
#include <wtf/MainThread.h>
@@ -65,7 +66,7 @@ MarkStackSegment* MarkStackSegmentAllocator::allocate()
}
}
- return static_cast<MarkStackSegment*>(OSAllocator::reserveAndCommit(Options::gcMarkStackSegmentSize));
+ return static_cast<MarkStackSegment*>(OSAllocator::reserveAndCommit(Options::gcMarkStackSegmentSize()));
}
void MarkStackSegmentAllocator::release(MarkStackSegment* segment)
@@ -86,13 +87,13 @@ void MarkStackSegmentAllocator::shrinkReserve()
while (segments) {
MarkStackSegment* toFree = segments;
segments = segments->m_previous;
- OSAllocator::decommitAndRelease(toFree, Options::gcMarkStackSegmentSize);
+ OSAllocator::decommitAndRelease(toFree, Options::gcMarkStackSegmentSize());
}
}
MarkStackArray::MarkStackArray(MarkStackSegmentAllocator& allocator)
: m_allocator(allocator)
- , m_segmentCapacity(MarkStackSegment::capacityFromSize(Options::gcMarkStackSegmentSize))
+ , m_segmentCapacity(MarkStackSegment::capacityFromSize(Options::gcMarkStackSegmentSize()))
, m_top(0)
, m_numberOfPreviousSegments(0)
{
@@ -225,8 +226,8 @@ void MarkStackArray::stealSomeCellsFrom(MarkStackArray& other, size_t idleThread
void MarkStackThreadSharedData::resetChildren()
{
for (unsigned i = 0; i < m_markingThreadsMarkStack.size(); ++i)
- m_markingThreadsMarkStack[i]->reset();
-}
+ m_markingThreadsMarkStack[i]->reset();
+}
size_t MarkStackThreadSharedData::childVisitCount()
{
@@ -257,12 +258,13 @@ void MarkStackThreadSharedData::markingThreadStartFunc(void* myVisitor)
MarkStackThreadSharedData::MarkStackThreadSharedData(JSGlobalData* globalData)
: m_globalData(globalData)
, m_copiedSpace(&globalData->heap.m_storageSpace)
+ , m_shouldHashConst(false)
, m_sharedMarkStack(m_segmentAllocator)
, m_numberOfActiveParallelMarkers(0)
, m_parallelMarkersShouldExit(false)
{
#if ENABLE(PARALLEL_GC)
- for (unsigned i = 1; i < Options::numberOfGCMarkers; ++i) {
+ for (unsigned i = 1; i < Options::numberOfGCMarkers(); ++i) {
SlotVisitor* slotVisitor = new SlotVisitor(*this);
m_markingThreadsMarkStack.append(slotVisitor);
m_markingThreads.append(createThread(markingThreadStartFunc, slotVisitor, "JavaScriptCore::Marking"));
@@ -298,6 +300,21 @@ void MarkStackThreadSharedData::reset()
ASSERT(m_opaqueRoots.isEmpty());
#endif
m_weakReferenceHarvesters.removeAll();
+
+ if (m_shouldHashConst) {
+ m_globalData->resetNewStringsSinceLastHashConst();
+ m_shouldHashConst = false;
+ }
+}
+
+void MarkStack::setup()
+{
+ m_shared.m_shouldHashConst = m_shared.m_globalData->haveEnoughNewStringsToHashConst();
+ m_shouldHashConst = m_shared.m_shouldHashConst;
+#if ENABLE(PARALLEL_GC)
+ for (unsigned i = 0; i < m_shared.m_markingThreadsMarkStack.size(); ++i)
+ m_shared.m_markingThreadsMarkStack[i]->m_shouldHashConst = m_shared.m_shouldHashConst;
+#endif
}
void MarkStack::reset()
@@ -309,6 +326,10 @@ void MarkStack::reset()
#else
m_opaqueRoots.clear();
#endif
+ if (m_shouldHashConst) {
+ m_uniqueStrings.clear();
+ m_shouldHashConst = false;
+ }
}
void MarkStack::append(ConservativeRoots& conservativeRoots)
@@ -333,7 +354,7 @@ ALWAYS_INLINE static void visitChildren(SlotVisitor& visitor, const JSCell* cell
}
if (isJSFinalObject(cell)) {
- JSObject::visitChildren(const_cast<JSCell*>(cell), visitor);
+ JSFinalObject::visitChildren(const_cast<JSCell*>(cell), visitor);
return;
}
@@ -368,7 +389,7 @@ void SlotVisitor::donateKnownParallel()
// Otherwise, assume that a thread will go idle soon, and donate.
m_stack.donateSomeCellsTo(m_shared.m_sharedMarkStack);
- if (m_shared.m_numberOfActiveParallelMarkers < Options::numberOfGCMarkers)
+ if (m_shared.m_numberOfActiveParallelMarkers < Options::numberOfGCMarkers())
m_shared.m_markingCondition.broadcast();
}
@@ -377,10 +398,10 @@ void SlotVisitor::drain()
ASSERT(m_isInParallelMode);
#if ENABLE(PARALLEL_GC)
- if (Options::numberOfGCMarkers > 1) {
+ if (Options::numberOfGCMarkers() > 1) {
while (!m_stack.isEmpty()) {
m_stack.refill();
- for (unsigned countdown = Options::minimumNumberOfScansBetweenRebalance; m_stack.canRemoveLast() && countdown--;)
+ for (unsigned countdown = Options::minimumNumberOfScansBetweenRebalance(); m_stack.canRemoveLast() && countdown--;)
visitChildren(*this, m_stack.removeLast());
donateKnownParallel();
}
@@ -401,14 +422,14 @@ void SlotVisitor::drainFromShared(SharedDrainMode sharedDrainMode)
{
ASSERT(m_isInParallelMode);
- ASSERT(Options::numberOfGCMarkers);
+ ASSERT(Options::numberOfGCMarkers());
bool shouldBeParallel;
#if ENABLE(PARALLEL_GC)
- shouldBeParallel = Options::numberOfGCMarkers > 1;
+ shouldBeParallel = Options::numberOfGCMarkers() > 1;
#else
- ASSERT(Options::numberOfGCMarkers == 1);
+ ASSERT(Options::numberOfGCMarkers() == 1);
shouldBeParallel = false;
#endif
@@ -469,7 +490,7 @@ void SlotVisitor::drainFromShared(SharedDrainMode sharedDrainMode)
}
}
- size_t idleThreadCount = Options::numberOfGCMarkers - m_shared.m_numberOfActiveParallelMarkers;
+ size_t idleThreadCount = Options::numberOfGCMarkers() - m_shared.m_numberOfActiveParallelMarkers;
m_stack.stealSomeCellsFrom(m_shared.m_sharedMarkStack, idleThreadCount);
m_shared.m_numberOfActiveParallelMarkers++;
}
@@ -521,6 +542,79 @@ void* SlotVisitor::allocateNewSpace(void* ptr, size_t bytes)
return CopiedSpace::allocateFromBlock(m_copyBlock, bytes);
}
+ALWAYS_INLINE bool JSString::tryHashConstLock()
+{
+#if ENABLE(PARALLEL_GC)
+ unsigned currentFlags = m_flags;
+
+ if (currentFlags & HashConstLock)
+ return false;
+
+ unsigned newFlags = currentFlags | HashConstLock;
+
+ if (!WTF::weakCompareAndSwap(&m_flags, currentFlags, newFlags))
+ return false;
+
+ WTF::memoryBarrierAfterLock();
+ return true;
+#else
+ if (isHashConstSingleton())
+ return false;
+
+ m_flags |= HashConstLock;
+
+ return true;
+#endif
+}
+
+ALWAYS_INLINE void JSString::releaseHashConstLock()
+{
+#if ENABLE(PARALLEL_GC)
+ WTF::memoryBarrierBeforeUnlock();
+#endif
+ m_flags &= ~HashConstLock;
+}
+
+ALWAYS_INLINE bool JSString::shouldTryHashConst()
+{
+ return ((length() > 1) && !isRope() && !isHashConstSingleton());
+}
+
+ALWAYS_INLINE void MarkStack::internalAppend(JSValue* slot)
+{
+ // This internalAppend is only intended for visits to object and array backing stores.
+ // as it can change the JSValue pointed to be the argument when the original JSValue
+ // is a string that contains the same contents as another string.
+
+ ASSERT(slot);
+ JSValue value = *slot;
+ ASSERT(value);
+ if (!value.isCell())
+ return;
+
+ JSCell* cell = value.asCell();
+
+ if (m_shouldHashConst && cell->isString()) {
+ JSString* string = jsCast<JSString*>(cell);
+ if (string->shouldTryHashConst() && string->tryHashConstLock()) {
+ UniqueStringMap::AddResult addResult = m_uniqueStrings.add(string->string().impl(), value);
+ if (addResult.isNewEntry)
+ string->setHashConstSingleton();
+ else {
+ JSValue existingJSValue = addResult.iterator->second;
+ if (value != existingJSValue)
+ jsCast<JSString*>(existingJSValue.asCell())->clearHashConstSingleton();
+ *slot = existingJSValue;
+ string->releaseHashConstLock();
+ return;
+ }
+ string->releaseHashConstLock();
+ }
+ }
+
+ internalAppend(cell);
+}
+
void SlotVisitor::copyAndAppend(void** ptr, size_t bytes, JSValue* values, unsigned length)
{
void* oldPtr = *ptr;
@@ -534,7 +628,7 @@ void SlotVisitor::copyAndAppend(void** ptr, size_t bytes, JSValue* values, unsig
newValues[i] = value;
if (!value)
continue;
- internalAppend(value);
+ internalAppend(&newValues[i]);
}
memcpy(newPtr, oldPtr, jsValuesOffset);
diff --git a/Source/JavaScriptCore/heap/MarkStack.h b/Source/JavaScriptCore/heap/MarkStack.h
index c3065e7d6..ff25531a4 100644
--- a/Source/JavaScriptCore/heap/MarkStack.h
+++ b/Source/JavaScriptCore/heap/MarkStack.h
@@ -219,6 +219,8 @@ namespace JSC {
MarkStackSegmentAllocator m_segmentAllocator;
+ bool m_shouldHashConst;
+
Vector<ThreadIdentifier> m_markingThreads;
Vector<MarkStack*> m_markingThreadsMarkStack;
@@ -259,6 +261,7 @@ namespace JSC {
MarkStackThreadSharedData& sharedData() { return m_shared; }
bool isEmpty() { return m_stack.isEmpty(); }
+ void setup();
void reset();
size_t visitCount() const { return m_visitCount; }
@@ -292,6 +295,7 @@ namespace JSC {
void internalAppend(JSCell*);
void internalAppend(JSValue);
+ void internalAppend(JSValue*);
JS_EXPORT_PRIVATE void mergeOpaqueRoots();
@@ -304,7 +308,7 @@ namespace JSC {
void mergeOpaqueRootsIfProfitable()
{
- if (static_cast<unsigned>(m_opaqueRoots.size()) < Options::opaqueRootMergeThreshold)
+ if (static_cast<unsigned>(m_opaqueRoots.size()) < Options::opaqueRootMergeThreshold())
return;
mergeOpaqueRoots();
}
@@ -325,6 +329,10 @@ namespace JSC {
MarkStackThreadSharedData& m_shared;
+ bool m_shouldHashConst; // Local per-thread copy of shared flag for performance reasons
+ typedef HashMap<StringImpl*, JSValue> UniqueStringMap;
+ UniqueStringMap m_uniqueStrings;
+
#if ENABLE(OBJECT_MARK_LOGGING)
unsigned m_logChildCount;
#endif
@@ -339,6 +347,7 @@ namespace JSC {
, m_visitCount(0)
, m_isInParallelMode(false)
, m_shared(shared)
+ , m_shouldHashConst(false)
{
}
@@ -350,7 +359,7 @@ namespace JSC {
inline void MarkStack::addOpaqueRoot(void* root)
{
#if ENABLE(PARALLEL_GC)
- if (Options::numberOfGCMarkers == 1) {
+ if (Options::numberOfGCMarkers() == 1) {
// Put directly into the shared HashSet.
m_shared.m_opaqueRoots.add(root);
return;
diff --git a/Source/JavaScriptCore/heap/MarkedAllocator.cpp b/Source/JavaScriptCore/heap/MarkedAllocator.cpp
index 9cac906a1..972728637 100644
--- a/Source/JavaScriptCore/heap/MarkedAllocator.cpp
+++ b/Source/JavaScriptCore/heap/MarkedAllocator.cpp
@@ -3,6 +3,7 @@
#include "GCActivityCallback.h"
#include "Heap.h"
+#include "JSGlobalData.h"
#include <wtf/CurrentTime.h>
namespace JSC {
@@ -56,6 +57,7 @@ inline void* MarkedAllocator::tryAllocate()
void* MarkedAllocator::allocateSlowCase()
{
+ ASSERT(m_heap->globalData()->apiLock().currentThreadIsHoldingLock());
#if COLLECT_ON_EVERY_ALLOCATION
m_heap->collectAllGarbage();
ASSERT(m_heap->m_operationInProgress == NoOperation);
diff --git a/Source/JavaScriptCore/heap/SlotVisitor.h b/Source/JavaScriptCore/heap/SlotVisitor.h
index 715e2008c..70d68bb04 100644
--- a/Source/JavaScriptCore/heap/SlotVisitor.h
+++ b/Source/JavaScriptCore/heap/SlotVisitor.h
@@ -41,7 +41,7 @@ public:
void donate()
{
ASSERT(m_isInParallelMode);
- if (Options::numberOfGCMarkers == 1)
+ if (Options::numberOfGCMarkers() == 1)
return;
donateKnownParallel();
diff --git a/Source/JavaScriptCore/heap/WeakBlock.cpp b/Source/JavaScriptCore/heap/WeakBlock.cpp
index 8900e73df..05a44ea7e 100644
--- a/Source/JavaScriptCore/heap/WeakBlock.cpp
+++ b/Source/JavaScriptCore/heap/WeakBlock.cpp
@@ -127,8 +127,10 @@ void WeakBlock::reap()
if (weakImpl->state() > WeakImpl::Dead)
continue;
- if (Heap::isMarked(weakImpl->jsValue().asCell()))
+ if (Heap::isMarked(weakImpl->jsValue().asCell())) {
+ ASSERT(weakImpl->state() == WeakImpl::Live);
continue;
+ }
weakImpl->setState(WeakImpl::Dead);
}