summaryrefslogtreecommitdiff
path: root/Source/JavaScriptCore/heap
diff options
context:
space:
mode:
authorSimon Hausmann <simon.hausmann@nokia.com>2012-05-25 15:09:11 +0200
committerSimon Hausmann <simon.hausmann@nokia.com>2012-05-25 15:09:11 +0200
commita89b2ebb8e192c5e8cea21079bda2ee2c0c7dddd (patch)
treeb7abd9f49ae1d4d2e426a5883bfccd42b8e2ee12 /Source/JavaScriptCore/heap
parent8d473cf9743f1d30a16a27114e93bd5af5648d23 (diff)
downloadqtwebkit-a89b2ebb8e192c5e8cea21079bda2ee2c0c7dddd.tar.gz
Imported WebKit commit eb5c1b8fe4d4b1b90b5137433fc58a91da0e6878 (http://svn.webkit.org/repository/webkit/trunk@118516)
Diffstat (limited to 'Source/JavaScriptCore/heap')
-rw-r--r--Source/JavaScriptCore/heap/BlockAllocator.cpp17
-rw-r--r--Source/JavaScriptCore/heap/BlockAllocator.h30
-rw-r--r--Source/JavaScriptCore/heap/CopiedBlock.h51
-rw-r--r--Source/JavaScriptCore/heap/CopiedSpace.cpp65
-rw-r--r--Source/JavaScriptCore/heap/CopiedSpace.h8
-rw-r--r--Source/JavaScriptCore/heap/CopiedSpaceInlineMethods.h20
-rw-r--r--Source/JavaScriptCore/heap/Heap.cpp111
-rw-r--r--Source/JavaScriptCore/heap/Heap.h18
-rw-r--r--Source/JavaScriptCore/heap/HeapBlock.h4
-rw-r--r--Source/JavaScriptCore/heap/MachineStackMarker.cpp14
-rw-r--r--Source/JavaScriptCore/heap/MarkedAllocator.cpp10
-rw-r--r--Source/JavaScriptCore/heap/MarkedBlock.cpp20
-rw-r--r--Source/JavaScriptCore/heap/MarkedBlock.h70
-rw-r--r--Source/JavaScriptCore/heap/MarkedSpace.cpp130
-rw-r--r--Source/JavaScriptCore/heap/MarkedSpace.h7
-rw-r--r--Source/JavaScriptCore/heap/PassWeak.h4
-rw-r--r--Source/JavaScriptCore/heap/Weak.h29
-rw-r--r--Source/JavaScriptCore/heap/WeakBlock.cpp25
-rw-r--r--Source/JavaScriptCore/heap/WeakBlock.h25
-rw-r--r--Source/JavaScriptCore/heap/WeakSet.cpp37
-rw-r--r--Source/JavaScriptCore/heap/WeakSet.h66
-rw-r--r--Source/JavaScriptCore/heap/WeakSetInlines.h2
22 files changed, 470 insertions, 293 deletions
diff --git a/Source/JavaScriptCore/heap/BlockAllocator.cpp b/Source/JavaScriptCore/heap/BlockAllocator.cpp
index ce6024079..485ec8dd1 100644
--- a/Source/JavaScriptCore/heap/BlockAllocator.cpp
+++ b/Source/JavaScriptCore/heap/BlockAllocator.cpp
@@ -26,7 +26,6 @@
#include "config.h"
#include "BlockAllocator.h"
-#include "MarkedBlock.h"
#include <wtf/CurrentTime.h>
namespace JSC {
@@ -54,14 +53,13 @@ BlockAllocator::~BlockAllocator()
void BlockAllocator::releaseFreeBlocks()
{
while (true) {
- MarkedBlock* block;
+ HeapBlock* block;
{
MutexLocker locker(m_freeBlockLock);
if (!m_numberOfFreeBlocks)
block = 0;
else {
- // FIXME: How do we know this is a MarkedBlock? It could be a CopiedBlock.
- block = static_cast<MarkedBlock*>(m_freeBlocks.removeHead());
+ block = m_freeBlocks.removeHead();
ASSERT(block);
m_numberOfFreeBlocks--;
}
@@ -69,8 +67,8 @@ void BlockAllocator::releaseFreeBlocks()
if (!block)
break;
-
- MarkedBlock::destroy(block);
+
+ block->m_allocation.deallocate();
}
}
@@ -120,14 +118,13 @@ void BlockAllocator::blockFreeingThreadMain()
size_t desiredNumberOfFreeBlocks = currentNumberOfFreeBlocks / 2;
while (!m_blockFreeingThreadShouldQuit) {
- MarkedBlock* block;
+ HeapBlock* block;
{
MutexLocker locker(m_freeBlockLock);
if (m_numberOfFreeBlocks <= desiredNumberOfFreeBlocks)
block = 0;
else {
- // FIXME: How do we know this is a MarkedBlock? It could be a CopiedBlock.
- block = static_cast<MarkedBlock*>(m_freeBlocks.removeHead());
+ block = m_freeBlocks.removeHead();
ASSERT(block);
m_numberOfFreeBlocks--;
}
@@ -136,7 +133,7 @@ void BlockAllocator::blockFreeingThreadMain()
if (!block)
break;
- MarkedBlock::destroy(block);
+ block->m_allocation.deallocate();
}
}
}
diff --git a/Source/JavaScriptCore/heap/BlockAllocator.h b/Source/JavaScriptCore/heap/BlockAllocator.h
index cc9557f85..846bdfa2a 100644
--- a/Source/JavaScriptCore/heap/BlockAllocator.h
+++ b/Source/JavaScriptCore/heap/BlockAllocator.h
@@ -26,14 +26,14 @@
#ifndef BlockAllocator_h
#define BlockAllocator_h
+#include "HeapBlock.h"
#include <wtf/DoublyLinkedList.h>
#include <wtf/Forward.h>
+#include <wtf/PageAllocationAligned.h>
#include <wtf/Threading.h>
namespace JSC {
-class HeapBlock;
-
// Simple allocator to reduce VM cost by holding onto blocks of memory for
// short periods of time and then freeing them on a secondary thread.
@@ -42,8 +42,8 @@ public:
BlockAllocator();
~BlockAllocator();
- HeapBlock* allocate();
- void deallocate(HeapBlock*);
+ PageAllocationAligned allocate();
+ void deallocate(PageAllocationAligned);
private:
void waitForRelativeTimeWhileHoldingLock(double relative);
@@ -63,24 +63,28 @@ private:
ThreadIdentifier m_blockFreeingThread;
};
-inline HeapBlock* BlockAllocator::allocate()
+inline PageAllocationAligned BlockAllocator::allocate()
{
MutexLocker locker(m_freeBlockLock);
m_isCurrentlyAllocating = true;
- if (!m_numberOfFreeBlocks) {
- ASSERT(m_freeBlocks.isEmpty());
- return 0;
+ if (m_numberOfFreeBlocks) {
+ ASSERT(!m_freeBlocks.isEmpty());
+ m_numberOfFreeBlocks--;
+ return m_freeBlocks.removeHead()->m_allocation;
}
- ASSERT(!m_freeBlocks.isEmpty());
- m_numberOfFreeBlocks--;
- return m_freeBlocks.removeHead();
+ ASSERT(m_freeBlocks.isEmpty());
+ PageAllocationAligned allocation = PageAllocationAligned::allocate(HeapBlock::s_blockSize, HeapBlock::s_blockSize, OSAllocator::JSGCHeapPages);
+ if (!static_cast<bool>(allocation))
+ CRASH();
+ return allocation;
}
-inline void BlockAllocator::deallocate(HeapBlock* block)
+inline void BlockAllocator::deallocate(PageAllocationAligned allocation)
{
MutexLocker locker(m_freeBlockLock);
- m_freeBlocks.push(block);
+ HeapBlock* heapBlock = new(NotNull, allocation.base()) HeapBlock(allocation);
+ m_freeBlocks.push(heapBlock);
m_numberOfFreeBlocks++;
}
diff --git a/Source/JavaScriptCore/heap/CopiedBlock.h b/Source/JavaScriptCore/heap/CopiedBlock.h
index 431b86c38..b408aa40b 100644
--- a/Source/JavaScriptCore/heap/CopiedBlock.h
+++ b/Source/JavaScriptCore/heap/CopiedBlock.h
@@ -38,32 +38,51 @@ class CopiedBlock : public HeapBlock {
friend class CopiedSpace;
friend class CopiedAllocator;
public:
- CopiedBlock(PageAllocationAligned& allocation)
- : HeapBlock(allocation)
- , m_offset(payload())
- , m_isPinned(false)
- {
- ASSERT(is8ByteAligned(static_cast<void*>(m_offset)));
-#if USE(JSVALUE64)
- char* offset = static_cast<char*>(m_offset);
- memset(static_cast<void*>(offset), 0, static_cast<size_t>((reinterpret_cast<char*>(this) + allocation.size()) - offset));
-#else
- JSValue emptyValue;
- JSValue* limit = reinterpret_cast_ptr<JSValue*>(reinterpret_cast<char*>(this) + allocation.size());
- for (JSValue* currentValue = reinterpret_cast<JSValue*>(m_offset); currentValue < limit; currentValue++)
- *currentValue = emptyValue;
-#endif
- }
+ static CopiedBlock* create(const PageAllocationAligned&);
+ static PageAllocationAligned destroy(CopiedBlock*);
char* payload();
size_t size();
size_t capacity();
private:
+ CopiedBlock(const PageAllocationAligned&);
+
void* m_offset;
uintptr_t m_isPinned;
};
+inline CopiedBlock* CopiedBlock::create(const PageAllocationAligned& allocation)
+{
+ return new(NotNull, allocation.base()) CopiedBlock(allocation);
+}
+
+inline PageAllocationAligned CopiedBlock::destroy(CopiedBlock* block)
+{
+ PageAllocationAligned allocation;
+ swap(allocation, block->m_allocation);
+
+ block->~CopiedBlock();
+ return allocation;
+}
+
+inline CopiedBlock::CopiedBlock(const PageAllocationAligned& allocation)
+ : HeapBlock(allocation)
+ , m_offset(payload())
+ , m_isPinned(false)
+{
+ ASSERT(is8ByteAligned(static_cast<void*>(m_offset)));
+#if USE(JSVALUE64)
+ char* offset = static_cast<char*>(m_offset);
+ memset(static_cast<void*>(offset), 0, static_cast<size_t>((reinterpret_cast<char*>(this) + allocation.size()) - offset));
+#else
+ JSValue emptyValue;
+ JSValue* limit = reinterpret_cast_ptr<JSValue*>(reinterpret_cast<char*>(this) + allocation.size());
+ for (JSValue* currentValue = reinterpret_cast<JSValue*>(m_offset); currentValue < limit; currentValue++)
+ *currentValue = emptyValue;
+#endif
+}
+
inline char* CopiedBlock::payload()
{
return reinterpret_cast<char*>(this) + ((sizeof(CopiedBlock) + 7) & ~7);
diff --git a/Source/JavaScriptCore/heap/CopiedSpace.cpp b/Source/JavaScriptCore/heap/CopiedSpace.cpp
index d52c4e756..7f5a665df 100644
--- a/Source/JavaScriptCore/heap/CopiedSpace.cpp
+++ b/Source/JavaScriptCore/heap/CopiedSpace.cpp
@@ -40,6 +40,18 @@ CopiedSpace::CopiedSpace(Heap* heap)
{
}
+CopiedSpace::~CopiedSpace()
+{
+ while (!m_toSpace->isEmpty())
+ m_heap->blockAllocator().deallocate(CopiedBlock::destroy(static_cast<CopiedBlock*>(m_toSpace->removeHead())));
+
+ while (!m_fromSpace->isEmpty())
+ m_heap->blockAllocator().deallocate(CopiedBlock::destroy(static_cast<CopiedBlock*>(m_fromSpace->removeHead())));
+
+ while (!m_oversizeBlocks.isEmpty())
+ CopiedBlock::destroy(static_cast<CopiedBlock*>(m_oversizeBlocks.removeHead())).deallocate();
+}
+
void CopiedSpace::init()
{
m_toSpace = &m_blocks1;
@@ -77,9 +89,10 @@ CheckedBoolean CopiedSpace::tryAllocateOversize(size_t bytes, void** outPtr)
return false;
}
- CopiedBlock* block = new (NotNull, allocation.base()) CopiedBlock(allocation);
+ CopiedBlock* block = CopiedBlock::create(allocation);
m_oversizeBlocks.push(block);
- m_oversizeFilter.add(reinterpret_cast<Bits>(block));
+ m_blockFilter.add(reinterpret_cast<Bits>(block));
+ m_blockSet.add(block);
*outPtr = allocateFromBlock(block, bytes);
@@ -135,7 +148,8 @@ CheckedBoolean CopiedSpace::tryReallocateOversize(void** ptr, size_t oldSize, si
if (isOversize(oldSize)) {
CopiedBlock* oldBlock = oversizeBlockFor(oldPtr);
m_oversizeBlocks.remove(oldBlock);
- oldBlock->m_allocation.deallocate();
+ m_blockSet.remove(oldBlock);
+ CopiedBlock::destroy(oldBlock).deallocate();
}
*ptr = newPtr;
@@ -156,8 +170,8 @@ void CopiedSpace::doneFillingBlock(CopiedBlock* block)
{
MutexLocker locker(m_toSpaceLock);
m_toSpace->push(block);
- m_toSpaceSet.add(block);
- m_toSpaceFilter.add(reinterpret_cast<Bits>(block));
+ m_blockSet.add(block);
+ m_blockFilter.add(reinterpret_cast<Bits>(block));
}
{
@@ -183,15 +197,15 @@ void CopiedSpace::doneCopying()
CopiedBlock* block = static_cast<CopiedBlock*>(m_fromSpace->removeHead());
if (block->m_isPinned) {
block->m_isPinned = false;
- // We don't add the block to the toSpaceSet because it was never removed.
- ASSERT(m_toSpaceSet.contains(block));
- m_toSpaceFilter.add(reinterpret_cast<Bits>(block));
+ // We don't add the block to the blockSet because it was never removed.
+ ASSERT(m_blockSet.contains(block));
+ m_blockFilter.add(reinterpret_cast<Bits>(block));
m_toSpace->push(block);
continue;
}
- m_toSpaceSet.remove(block);
- m_heap->blockAllocator().deallocate(block);
+ m_blockSet.remove(block);
+ m_heap->blockAllocator().deallocate(CopiedBlock::destroy(block));
}
CopiedBlock* curr = static_cast<CopiedBlock*>(m_oversizeBlocks.head());
@@ -199,9 +213,12 @@ void CopiedSpace::doneCopying()
CopiedBlock* next = static_cast<CopiedBlock*>(curr->next());
if (!curr->m_isPinned) {
m_oversizeBlocks.remove(curr);
- curr->m_allocation.deallocate();
- } else
+ m_blockSet.remove(curr);
+ CopiedBlock::destroy(curr).deallocate();
+ } else {
+ m_blockFilter.add(reinterpret_cast<Bits>(curr));
curr->m_isPinned = false;
+ }
curr = next;
}
@@ -215,15 +232,9 @@ void CopiedSpace::doneCopying()
CheckedBoolean CopiedSpace::getFreshBlock(AllocationEffort allocationEffort, CopiedBlock** outBlock)
{
CopiedBlock* block = 0;
- if (allocationEffort == AllocationMustSucceed) {
- if (HeapBlock* heapBlock = m_heap->blockAllocator().allocate())
- block = new (NotNull, heapBlock) CopiedBlock(heapBlock->m_allocation);
- else if (!allocateNewBlock(&block)) {
- *outBlock = 0;
- ASSERT_NOT_REACHED();
- return false;
- }
- } else {
+ if (allocationEffort == AllocationMustSucceed)
+ block = CopiedBlock::create(m_heap->blockAllocator().allocate());
+ else {
ASSERT(allocationEffort == AllocationCanFail);
if (m_heap->shouldCollect())
m_heap->collect(Heap::DoNotSweep);
@@ -240,18 +251,6 @@ CheckedBoolean CopiedSpace::getFreshBlock(AllocationEffort allocationEffort, Cop
return true;
}
-void CopiedSpace::freeAllBlocks()
-{
- while (!m_toSpace->isEmpty())
- m_heap->blockAllocator().deallocate(m_toSpace->removeHead());
-
- while (!m_fromSpace->isEmpty())
- m_heap->blockAllocator().deallocate(m_fromSpace->removeHead());
-
- while (!m_oversizeBlocks.isEmpty())
- m_oversizeBlocks.removeHead()->m_allocation.deallocate();
-}
-
size_t CopiedSpace::size()
{
size_t calculatedSize = 0;
diff --git a/Source/JavaScriptCore/heap/CopiedSpace.h b/Source/JavaScriptCore/heap/CopiedSpace.h
index d3cc040a5..27011781d 100644
--- a/Source/JavaScriptCore/heap/CopiedSpace.h
+++ b/Source/JavaScriptCore/heap/CopiedSpace.h
@@ -50,6 +50,7 @@ class CopiedSpace {
friend class JIT;
public:
CopiedSpace(Heap*);
+ ~CopiedSpace();
void init();
CheckedBoolean tryAllocate(size_t, void**);
@@ -64,12 +65,12 @@ public:
void pin(CopiedBlock*);
bool isPinned(void*);
+ bool contains(CopiedBlock*);
bool contains(void*, CopiedBlock*&);
size_t size();
size_t capacity();
- void freeAllBlocks();
bool isPagedOut(double deadline);
static CopiedBlock* blockFor(void*);
@@ -96,9 +97,8 @@ private:
CopiedAllocator m_allocator;
- TinyBloomFilter m_toSpaceFilter;
- TinyBloomFilter m_oversizeFilter;
- HashSet<CopiedBlock*> m_toSpaceSet;
+ TinyBloomFilter m_blockFilter;
+ HashSet<CopiedBlock*> m_blockSet;
Mutex m_toSpaceLock;
diff --git a/Source/JavaScriptCore/heap/CopiedSpaceInlineMethods.h b/Source/JavaScriptCore/heap/CopiedSpaceInlineMethods.h
index a8e45658b..c97762598 100644
--- a/Source/JavaScriptCore/heap/CopiedSpaceInlineMethods.h
+++ b/Source/JavaScriptCore/heap/CopiedSpaceInlineMethods.h
@@ -35,11 +35,21 @@
namespace JSC {
+inline bool CopiedSpace::contains(CopiedBlock* block)
+{
+ return !m_blockFilter.ruleOut(reinterpret_cast<Bits>(block)) && m_blockSet.contains(block);
+}
+
inline bool CopiedSpace::contains(void* ptr, CopiedBlock*& result)
{
CopiedBlock* block = blockFor(ptr);
+ if (contains(block)) {
+ result = block;
+ return true;
+ }
+ block = oversizeBlockFor(ptr);
result = block;
- return !m_toSpaceFilter.ruleOut(reinterpret_cast<Bits>(block)) && m_toSpaceSet.contains(block);
+ return contains(block);
}
inline void CopiedSpace::pin(CopiedBlock* block)
@@ -53,7 +63,7 @@ inline void CopiedSpace::startedCopying()
m_fromSpace = m_toSpace;
m_toSpace = temp;
- m_toSpaceFilter.reset();
+ m_blockFilter.reset();
m_allocator.startedCopying();
ASSERT(!m_inCopyingPhase);
@@ -63,7 +73,7 @@ inline void CopiedSpace::startedCopying()
inline void CopiedSpace::recycleBlock(CopiedBlock* block)
{
- m_heap->blockAllocator().deallocate(block);
+ m_heap->blockAllocator().deallocate(CopiedBlock::destroy(block));
{
MutexLocker locker(m_loanedBlocksLock);
@@ -98,8 +108,8 @@ inline CheckedBoolean CopiedSpace::addNewBlock()
return false;
m_toSpace->push(block);
- m_toSpaceFilter.add(reinterpret_cast<Bits>(block));
- m_toSpaceSet.add(block);
+ m_blockFilter.add(reinterpret_cast<Bits>(block));
+ m_blockSet.add(block);
m_allocator.resetCurrentBlock(block);
return true;
}
diff --git a/Source/JavaScriptCore/heap/Heap.cpp b/Source/JavaScriptCore/heap/Heap.cpp
index 2254b5b01..9b68fb10a 100644
--- a/Source/JavaScriptCore/heap/Heap.cpp
+++ b/Source/JavaScriptCore/heap/Heap.cpp
@@ -35,9 +35,9 @@
#include "Tracing.h"
#include "WeakSetInlines.h"
#include <algorithm>
+#include <wtf/RAMSize.h>
#include <wtf/CurrentTime.h>
-
using namespace std;
using namespace JSC;
@@ -45,14 +45,8 @@ namespace JSC {
namespace {
-#if CPU(X86) || CPU(X86_64)
-static const size_t largeHeapSize = 16 * 1024 * 1024;
-#elif PLATFORM(IOS)
-static const size_t largeHeapSize = 8 * 1024 * 1024;
-#else
-static const size_t largeHeapSize = 512 * 1024;
-#endif
-static const size_t smallHeapSize = 512 * 1024;
+static const size_t largeHeapSize = 32 * MB; // About 1.5X the average webpage.
+static const size_t smallHeapSize = 1 * MB; // Matches the FastMalloc per-thread cache.
#if ENABLE(GC_LOGGING)
#if COMPILER(CLANG)
@@ -148,14 +142,23 @@ struct GCCounter {
#define GCCOUNTER(name, value) do { } while (false)
#endif
-static size_t heapSizeForHint(HeapSize heapSize)
+static inline size_t minHeapSize(HeapType heapType, size_t ramSize)
{
- if (heapSize == LargeHeap)
- return largeHeapSize;
- ASSERT(heapSize == SmallHeap);
+ if (heapType == LargeHeap)
+ return min(largeHeapSize, ramSize / 4);
return smallHeapSize;
}
+static inline size_t proportionalHeapSize(size_t heapSize, size_t ramSize)
+{
+ // Try to stay under 1/2 RAM size to leave room for the DOM, rendering, networking, etc.
+ if (heapSize < ramSize / 4)
+ return 2 * heapSize;
+ if (heapSize < ramSize / 2)
+ return 1.5 * heapSize;
+ return 1.25 * heapSize;
+}
+
static inline bool isValidSharedInstanceThreadState()
{
if (!JSLock::lockCount())
@@ -230,9 +233,10 @@ inline PassOwnPtr<TypeCountSet> RecordType::returnValue()
} // anonymous namespace
-Heap::Heap(JSGlobalData* globalData, HeapSize heapSize)
- : m_heapSize(heapSize)
- , m_minBytesPerCycle(heapSizeForHint(heapSize))
+Heap::Heap(JSGlobalData* globalData, HeapType heapType)
+ : m_heapType(heapType)
+ , m_ramSize(ramSize())
+ , m_minBytesPerCycle(minHeapSize(m_heapType, m_ramSize))
, m_sizeAfterLastCollect(0)
, m_bytesAllocatedLimit(m_minBytesPerCycle)
, m_bytesAllocated(0)
@@ -240,12 +244,10 @@ Heap::Heap(JSGlobalData* globalData, HeapSize heapSize)
, m_operationInProgress(NoOperation)
, m_objectSpace(this)
, m_storageSpace(this)
- , m_markListSet(0)
, m_activityCallback(DefaultGCActivityCallback::create(this))
, m_machineThreads(this)
, m_sharedData(globalData)
, m_slotVisitor(m_sharedData)
- , m_weakSet(this)
, m_handleSet(globalData)
, m_isSafeToCollect(false)
, m_globalData(globalData)
@@ -257,13 +259,6 @@ Heap::Heap(JSGlobalData* globalData, HeapSize heapSize)
Heap::~Heap()
{
- delete m_markListSet;
-
- m_objectSpace.shrink();
- m_storageSpace.freeAllBlocks();
-
- ASSERT(!size());
- ASSERT(!capacity());
}
bool Heap::isPagedOut(double deadline)
@@ -282,11 +277,7 @@ void Heap::lastChanceToFinalize()
if (size_t size = m_protectedValues.size())
WTFLogAlways("ERROR: JavaScriptCore heap deallocated while %ld values were still protected", static_cast<unsigned long>(size));
- m_weakSet.finalizeAll();
- m_objectSpace.canonicalizeCellLivenessData();
- m_objectSpace.clearMarks();
- m_objectSpace.sweep();
- m_globalData->smallStrings.finalizeSmallStrings();
+ m_objectSpace.lastChanceToFinalize();
#if ENABLE(SIMPLE_HEAP_PROFILING)
m_slotVisitor.m_visitedTypeCounts.dump(WTF::dataFile(), "Visited Type Counts");
@@ -451,6 +442,15 @@ void Heap::markRoots(bool fullGC)
GCPHASE(GatherRegisterFileRoots);
registerFile().gatherConservativeRoots(registerFileRoots, m_dfgCodeBlocks);
}
+
+#if ENABLE(DFG_JIT)
+ ConservativeRoots scratchBufferRoots(&m_objectSpace.blocks(), &m_storageSpace);
+ {
+ GCPHASE(GatherScratchBufferRoots);
+ m_globalData->gatherConservativeRoots(scratchBufferRoots);
+ }
+#endif
+
#if ENABLE(GGC)
MarkedBlock::DirtyCellVector dirtyCells;
if (!fullGC) {
@@ -497,6 +497,13 @@ void Heap::markRoots(bool fullGC)
visitor.append(registerFileRoots);
visitor.donateAndDrain();
}
+#if ENABLE(DFG_JIT)
+ {
+ GCPHASE(VisitScratchBufferRoots);
+ visitor.append(scratchBufferRoots);
+ visitor.donateAndDrain();
+ }
+#endif
{
GCPHASE(VisitProtectedObjects);
markProtectedObjects(heapRootVisitor);
@@ -552,7 +559,7 @@ void Heap::markRoots(bool fullGC)
{
GCPHASE(VisitingLiveWeakHandles);
while (true) {
- m_weakSet.visitLiveWeakImpls(heapRootVisitor);
+ m_objectSpace.visitWeakSets(heapRootVisitor);
harvestWeakReferences();
if (visitor.isEmpty())
break;
@@ -566,11 +573,6 @@ void Heap::markRoots(bool fullGC)
}
}
- {
- GCPHASE(VisitingDeadWeakHandles);
- m_weakSet.visitDeadWeakImpls(heapRootVisitor);
- }
-
GCCOUNTER(VisitedValueCount, visitor.visitCount());
visitor.doneCopying();
@@ -674,25 +676,24 @@ void Heap::collect(SweepToggle sweepToggle)
markRoots(fullGC);
{
+ GCPHASE(ReapingWeakHandles);
+ m_objectSpace.reapWeakSets();
+ }
+
+ {
GCPHASE(FinalizeUnconditionalFinalizers);
finalizeUnconditionalFinalizers();
}
-
+
{
GCPHASE(FinalizeWeakHandles);
- m_weakSet.sweep();
+ m_objectSpace.sweepWeakSets();
m_globalData->smallStrings.finalizeSmallStrings();
}
JAVASCRIPTCORE_GC_MARKED();
{
- GCPHASE(ResetAllocators);
- m_objectSpace.resetAllocators();
- m_weakSet.resetAllocator();
- }
-
- {
GCPHASE(DeleteCodeBlocks);
m_dfgCodeBlocks.deleteUnmarkedJettisonedCodeBlocks();
}
@@ -702,19 +703,23 @@ void Heap::collect(SweepToggle sweepToggle)
GCPHASE(Sweeping);
m_objectSpace.sweep();
m_objectSpace.shrink();
- m_weakSet.shrink();
m_bytesAbandoned = 0;
}
- // To avoid pathological GC churn in large heaps, we set the new allocation
- // limit to be the current size of the heap. This heuristic
- // is a bit arbitrary. Using the current size of the heap after this
- // collection gives us a 2X multiplier, which is a 1:1 (heap size :
- // new bytes allocated) proportion, and seems to work well in benchmarks.
- size_t newSize = size();
+ {
+ GCPHASE(ResetAllocators);
+ m_objectSpace.resetAllocators();
+ }
+
+ size_t currentHeapSize = size();
if (fullGC) {
- m_sizeAfterLastCollect = newSize;
- m_bytesAllocatedLimit = max(newSize, m_minBytesPerCycle);
+ m_sizeAfterLastCollect = currentHeapSize;
+
+ // To avoid pathological GC churn in very small and very large heaps, we set
+ // the new allocation limit based on the current size of the heap, with a
+ // fixed minimum.
+ size_t maxHeapSize = max(minHeapSize(m_heapType, m_ramSize), proportionalHeapSize(currentHeapSize, m_ramSize));
+ m_bytesAllocatedLimit = maxHeapSize - currentHeapSize;
}
m_bytesAllocated = 0;
double lastGCEndTime = WTF::currentTime();
diff --git a/Source/JavaScriptCore/heap/Heap.h b/Source/JavaScriptCore/heap/Heap.h
index 6bf82e4a5..edfd91483 100644
--- a/Source/JavaScriptCore/heap/Heap.h
+++ b/Source/JavaScriptCore/heap/Heap.h
@@ -32,7 +32,6 @@
#include "MarkedSpace.h"
#include "SlotVisitor.h"
#include "WeakHandleOwner.h"
-#include "WeakSet.h"
#include "WriteBarrierSupport.h"
#include <wtf/HashCountedSet.h>
#include <wtf/HashSet.h>
@@ -65,8 +64,7 @@ namespace JSC {
enum OperationInProgress { NoOperation, Allocation, Collection };
- // Heap size hint.
- enum HeapSize { SmallHeap, LargeHeap };
+ enum HeapType { SmallHeap, LargeHeap };
class Heap {
WTF_MAKE_NONCOPYABLE(Heap);
@@ -90,7 +88,7 @@ namespace JSC {
static void writeBarrier(const JSCell*, JSCell*);
static uint8_t* addressOfCardFor(JSCell*);
- Heap(JSGlobalData*, HeapSize);
+ Heap(JSGlobalData*, HeapType);
~Heap();
JS_EXPORT_PRIVATE void lastChanceToFinalize();
@@ -144,12 +142,11 @@ namespace JSC {
void pushTempSortVector(Vector<ValueStringPair>*);
void popTempSortVector(Vector<ValueStringPair>*);
- HashSet<MarkedArgumentBuffer*>& markListSet() { if (!m_markListSet) m_markListSet = new HashSet<MarkedArgumentBuffer*>; return *m_markListSet; }
+ HashSet<MarkedArgumentBuffer*>& markListSet() { if (!m_markListSet) m_markListSet = adoptPtr(new HashSet<MarkedArgumentBuffer*>); return *m_markListSet; }
template<typename Functor> typename Functor::ReturnType forEachProtectedCell(Functor&);
template<typename Functor> typename Functor::ReturnType forEachProtectedCell();
- WeakSet* weakSet() { return &m_weakSet; }
HandleSet* handleSet() { return &m_handleSet; }
HandleStack* handleStack() { return &m_handleStack; }
@@ -197,7 +194,8 @@ namespace JSC {
RegisterFile& registerFile();
BlockAllocator& blockAllocator();
- const HeapSize m_heapSize;
+ const HeapType m_heapType;
+ const size_t m_ramSize;
const size_t m_minBytesPerCycle;
size_t m_sizeAfterLastCollect;
@@ -206,18 +204,17 @@ namespace JSC {
size_t m_bytesAbandoned;
OperationInProgress m_operationInProgress;
+ BlockAllocator m_blockAllocator;
MarkedSpace m_objectSpace;
CopiedSpace m_storageSpace;
- BlockAllocator m_blockAllocator;
-
#if ENABLE(SIMPLE_HEAP_PROFILING)
VTableSpectrum m_destroyedTypeCounts;
#endif
ProtectCountSet m_protectedValues;
Vector<Vector<ValueStringPair>* > m_tempSortingVectors;
- HashSet<MarkedArgumentBuffer*>* m_markListSet;
+ OwnPtr<HashSet<MarkedArgumentBuffer*> > m_markListSet;
OwnPtr<GCActivityCallback> m_activityCallback;
@@ -226,7 +223,6 @@ namespace JSC {
MarkStackThreadSharedData m_sharedData;
SlotVisitor m_slotVisitor;
- WeakSet m_weakSet;
HandleSet m_handleSet;
HandleStack m_handleStack;
DFGCodeBlocks m_dfgCodeBlocks;
diff --git a/Source/JavaScriptCore/heap/HeapBlock.h b/Source/JavaScriptCore/heap/HeapBlock.h
index 591520d2b..3cd3c6322 100644
--- a/Source/JavaScriptCore/heap/HeapBlock.h
+++ b/Source/JavaScriptCore/heap/HeapBlock.h
@@ -36,13 +36,13 @@ enum AllocationEffort { AllocationCanFail, AllocationMustSucceed };
class HeapBlock : public DoublyLinkedListNode<HeapBlock> {
public:
- HeapBlock(PageAllocationAligned& allocation)
+ HeapBlock(const PageAllocationAligned& allocation)
: DoublyLinkedListNode<HeapBlock>()
, m_prev(0)
, m_next(0)
, m_allocation(allocation)
{
- ASSERT(allocation);
+ ASSERT(m_allocation);
}
HeapBlock* m_prev;
diff --git a/Source/JavaScriptCore/heap/MachineStackMarker.cpp b/Source/JavaScriptCore/heap/MachineStackMarker.cpp
index affd833eb..6989047ac 100644
--- a/Source/JavaScriptCore/heap/MachineStackMarker.cpp
+++ b/Source/JavaScriptCore/heap/MachineStackMarker.cpp
@@ -356,14 +356,20 @@ static size_t getPlatformThreadRegisters(const PlatformThread& platformThread, P
return sizeof(CONTEXT);
#elif OS(QNX)
memset(&regs, 0, sizeof(regs));
- regs.tid = pthread_self();
- int fd = open("/proc/self", O_RDONLY);
+ regs.tid = platformThread;
+ // FIXME: If we find this hurts performance, we can consider caching the fd and keeping it open.
+ int fd = open("/proc/self/as", O_RDONLY);
if (fd == -1) {
- LOG_ERROR("Unable to open /proc/self (errno: %d)", errno);
+ LOG_ERROR("Unable to open /proc/self/as (errno: %d)", errno);
+ CRASH();
+ }
+ int rc = devctl(fd, DCMD_PROC_TIDSTATUS, &regs, sizeof(regs), 0);
+ if (rc != EOK) {
+ LOG_ERROR("devctl(DCMD_PROC_TIDSTATUS) failed (error: %d)", rc);
CRASH();
}
- devctl(fd, DCMD_PROC_TIDSTATUS, &regs, sizeof(regs), 0);
close(fd);
+ return sizeof(struct _debug_thread_info);
#elif USE(PTHREADS)
pthread_attr_init(&regs);
#if HAVE(PTHREAD_NP_H) || OS(NETBSD)
diff --git a/Source/JavaScriptCore/heap/MarkedAllocator.cpp b/Source/JavaScriptCore/heap/MarkedAllocator.cpp
index 01f00c376..ac0cf570a 100644
--- a/Source/JavaScriptCore/heap/MarkedAllocator.cpp
+++ b/Source/JavaScriptCore/heap/MarkedAllocator.cpp
@@ -86,17 +86,11 @@ void* MarkedAllocator::allocateSlowCase()
ASSERT(result);
return result;
}
-
+
MarkedBlock* MarkedAllocator::allocateBlock()
{
- MarkedBlock* block = static_cast<MarkedBlock*>(m_heap->blockAllocator().allocate());
- if (block)
- block = MarkedBlock::recycle(block, m_heap, m_cellSize, m_cellsNeedDestruction);
- else
- block = MarkedBlock::create(m_heap, m_cellSize, m_cellsNeedDestruction);
-
+ MarkedBlock* block = MarkedBlock::create(m_heap->blockAllocator().allocate(), m_heap, m_cellSize, m_cellsNeedDestruction);
m_markedSpace->didAddBlock(block);
-
return block;
}
diff --git a/Source/JavaScriptCore/heap/MarkedBlock.cpp b/Source/JavaScriptCore/heap/MarkedBlock.cpp
index 3a58b5a42..42dc10371 100644
--- a/Source/JavaScriptCore/heap/MarkedBlock.cpp
+++ b/Source/JavaScriptCore/heap/MarkedBlock.cpp
@@ -32,31 +32,27 @@
namespace JSC {
-MarkedBlock* MarkedBlock::create(Heap* heap, size_t cellSize, bool cellsNeedDestruction)
+MarkedBlock* MarkedBlock::create(const PageAllocationAligned& allocation, Heap* heap, size_t cellSize, bool cellsNeedDestruction)
{
- PageAllocationAligned allocation = PageAllocationAligned::allocate(blockSize, blockSize, OSAllocator::JSGCHeapPages);
- if (!static_cast<bool>(allocation))
- CRASH();
return new (NotNull, allocation.base()) MarkedBlock(allocation, heap, cellSize, cellsNeedDestruction);
}
-MarkedBlock* MarkedBlock::recycle(MarkedBlock* block, Heap* heap, size_t cellSize, bool cellsNeedDestruction)
+PageAllocationAligned MarkedBlock::destroy(MarkedBlock* block)
{
- return new (NotNull, block) MarkedBlock(block->m_allocation, heap, cellSize, cellsNeedDestruction);
-}
+ PageAllocationAligned allocation;
+ swap(allocation, block->m_allocation);
-void MarkedBlock::destroy(MarkedBlock* block)
-{
- block->m_allocation.deallocate();
+ block->~MarkedBlock();
+ return allocation;
}
-MarkedBlock::MarkedBlock(PageAllocationAligned& allocation, Heap* heap, size_t cellSize, bool cellsNeedDestruction)
+MarkedBlock::MarkedBlock(const PageAllocationAligned& allocation, Heap* heap, size_t cellSize, bool cellsNeedDestruction)
: HeapBlock(allocation)
, m_atomsPerCell((cellSize + atomSize - 1) / atomSize)
, m_endAtom(atomsPerBlock - m_atomsPerCell + 1)
, m_cellsNeedDestruction(cellsNeedDestruction)
, m_state(New) // All cells start out unmarked.
- , m_heap(heap)
+ , m_weakSet(heap)
{
ASSERT(heap);
HEAP_LOG_BLOCK_STATE_TRANSITION(this);
diff --git a/Source/JavaScriptCore/heap/MarkedBlock.h b/Source/JavaScriptCore/heap/MarkedBlock.h
index aa99ebf48..c21e20b19 100644
--- a/Source/JavaScriptCore/heap/MarkedBlock.h
+++ b/Source/JavaScriptCore/heap/MarkedBlock.h
@@ -25,6 +25,7 @@
#include "CardSet.h"
#include "HeapBlock.h"
+#include "WeakSet.h"
#include <wtf/Bitmap.h>
#include <wtf/DataLog.h>
#include <wtf/DoublyLinkedList.h>
@@ -112,21 +113,28 @@ namespace JSC {
ReturnType m_count;
};
- static MarkedBlock* create(Heap*, size_t cellSize, bool cellsNeedDestruction);
- static MarkedBlock* recycle(MarkedBlock*, Heap*, size_t cellSize, bool cellsNeedDestruction);
- static void destroy(MarkedBlock*);
+ static MarkedBlock* create(const PageAllocationAligned&, Heap*, size_t cellSize, bool cellsNeedDestruction);
+ static PageAllocationAligned destroy(MarkedBlock*);
static bool isAtomAligned(const void*);
static MarkedBlock* blockFor(const void*);
static size_t firstAtom();
+ void lastChanceToFinalize();
+
Heap* heap() const;
+ WeakSet& weakSet();
- void* allocate();
-
enum SweepMode { SweepOnly, SweepToFreeList };
FreeList sweep(SweepMode = SweepOnly);
+ void shrink();
+ void resetAllocator();
+
+ void visitWeakSet(HeapRootVisitor&);
+ void reapWeakSet();
+ void sweepWeakSet();
+
// While allocating from a free list, MarkedBlock temporarily has bogus
// cell liveness data. To restore accurate cell liveness data, call one
// of these functions:
@@ -135,7 +143,7 @@ namespace JSC {
void clearMarks();
size_t markCount();
- bool markCountIsZero(); // Faster than markCount().
+ bool isEmpty();
size_t cellSize();
bool cellsNeedDestruction();
@@ -187,7 +195,7 @@ namespace JSC {
typedef char Atom[atomSize];
- MarkedBlock(PageAllocationAligned&, Heap*, size_t cellSize, bool cellsNeedDestruction);
+ MarkedBlock(const PageAllocationAligned&, Heap*, size_t cellSize, bool cellsNeedDestruction);
Atom* atoms();
size_t atomNumber(const void*);
void callDestructor(JSCell*);
@@ -206,7 +214,7 @@ namespace JSC {
#endif
bool m_cellsNeedDestruction;
BlockState m_state;
- Heap* m_heap;
+ WeakSet m_weakSet;
};
inline MarkedBlock::FreeList::FreeList()
@@ -241,9 +249,47 @@ namespace JSC {
return reinterpret_cast<MarkedBlock*>(reinterpret_cast<Bits>(p) & blockMask);
}
+ inline void MarkedBlock::lastChanceToFinalize()
+ {
+ m_weakSet.lastChanceToFinalize();
+
+ clearMarks();
+ sweep();
+ }
+
inline Heap* MarkedBlock::heap() const
{
- return m_heap;
+ return m_weakSet.heap();
+ }
+
+ inline WeakSet& MarkedBlock::weakSet()
+ {
+ return m_weakSet;
+ }
+
+ inline void MarkedBlock::shrink()
+ {
+ m_weakSet.shrink();
+ }
+
+ inline void MarkedBlock::resetAllocator()
+ {
+ m_weakSet.resetAllocator();
+ }
+
+ inline void MarkedBlock::visitWeakSet(HeapRootVisitor& heapRootVisitor)
+ {
+ m_weakSet.visit(heapRootVisitor);
+ }
+
+ inline void MarkedBlock::reapWeakSet()
+ {
+ m_weakSet.reap();
+ }
+
+ inline void MarkedBlock::sweepWeakSet()
+ {
+ m_weakSet.sweep();
}
inline void MarkedBlock::didConsumeFreeList()
@@ -271,9 +317,9 @@ namespace JSC {
return m_marks.count();
}
- inline bool MarkedBlock::markCountIsZero()
+ inline bool MarkedBlock::isEmpty()
{
- return m_marks.isEmpty();
+ return m_marks.isEmpty() && m_weakSet.isEmpty();
}
inline size_t MarkedBlock::cellSize()
@@ -410,7 +456,7 @@ void MarkedBlock::gatherDirtyCells(DirtyCellVector& dirtyCells)
// blocks twice during GC.
m_state = Marked;
- if (markCountIsZero())
+ if (isEmpty())
return;
size_t cellSize = this->cellSize();
diff --git a/Source/JavaScriptCore/heap/MarkedSpace.cpp b/Source/JavaScriptCore/heap/MarkedSpace.cpp
index 405ed571a..1604d2d63 100644
--- a/Source/JavaScriptCore/heap/MarkedSpace.cpp
+++ b/Source/JavaScriptCore/heap/MarkedSpace.cpp
@@ -30,6 +30,57 @@ namespace JSC {
class Structure;
+class Take {
+public:
+ typedef MarkedBlock* ReturnType;
+
+ enum TakeMode { TakeIfEmpty, TakeAll };
+
+ Take(TakeMode, MarkedSpace*);
+ void operator()(MarkedBlock*);
+ ReturnType returnValue();
+
+private:
+ TakeMode m_takeMode;
+ MarkedSpace* m_markedSpace;
+ DoublyLinkedList<MarkedBlock> m_blocks;
+};
+
+inline Take::Take(TakeMode takeMode, MarkedSpace* newSpace)
+ : m_takeMode(takeMode)
+ , m_markedSpace(newSpace)
+{
+}
+
+inline void Take::operator()(MarkedBlock* block)
+{
+ if (m_takeMode == TakeIfEmpty && !block->isEmpty())
+ return;
+
+ m_markedSpace->allocatorFor(block).removeBlock(block);
+ m_blocks.append(block);
+}
+
+inline Take::ReturnType Take::returnValue()
+{
+ return m_blocks.head();
+}
+
+struct VisitWeakSet : MarkedBlock::VoidFunctor {
+ VisitWeakSet(HeapRootVisitor& heapRootVisitor) : m_heapRootVisitor(heapRootVisitor) { }
+ void operator()(MarkedBlock* block) { block->visitWeakSet(m_heapRootVisitor); }
+private:
+ HeapRootVisitor& m_heapRootVisitor;
+};
+
+struct ReapWeakSet : MarkedBlock::VoidFunctor {
+ void operator()(MarkedBlock* block) { block->reapWeakSet(); }
+};
+
+struct SweepWeakSet : MarkedBlock::VoidFunctor {
+ void operator()(MarkedBlock* block) { block->sweepWeakSet(); }
+};
+
MarkedSpace::MarkedSpace(Heap* heap)
: m_heap(heap)
{
@@ -44,6 +95,27 @@ MarkedSpace::MarkedSpace(Heap* heap)
}
}
+MarkedSpace::~MarkedSpace()
+{
+ // We record a temporary list of empties to avoid modifying m_blocks while iterating it.
+ Take take(Take::TakeAll, this);
+ freeBlocks(forEachBlock(take));
+}
+
+struct LastChanceToFinalize : MarkedBlock::VoidFunctor {
+ void operator()(MarkedBlock* block) { block->lastChanceToFinalize(); }
+};
+
+void MarkedSpace::lastChanceToFinalize()
+{
+ canonicalizeCellLivenessData();
+ forEachBlock<LastChanceToFinalize>();
+}
+
+struct ResetAllocator : MarkedBlock::VoidFunctor {
+ void operator()(MarkedBlock* block) { block->resetAllocator(); }
+};
+
void MarkedSpace::resetAllocators()
{
for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
@@ -55,6 +127,24 @@ void MarkedSpace::resetAllocators()
allocatorFor(cellSize).reset();
destructorAllocatorFor(cellSize).reset();
}
+
+ forEachBlock<ResetAllocator>();
+}
+
+void MarkedSpace::visitWeakSets(HeapRootVisitor& heapRootVisitor)
+{
+ VisitWeakSet visitWeakSet(heapRootVisitor);
+ forEachBlock(visitWeakSet);
+}
+
+void MarkedSpace::reapWeakSets()
+{
+ forEachBlock<ReapWeakSet>();
+}
+
+void MarkedSpace::sweepWeakSets()
+{
+ forEachBlock<SweepWeakSet>();
}
void MarkedSpace::canonicalizeCellLivenessData()
@@ -94,47 +184,21 @@ void MarkedSpace::freeBlocks(MarkedBlock* head)
m_blocks.remove(block);
block->sweep();
- m_heap->blockAllocator().deallocate(block);
+ m_heap->blockAllocator().deallocate(MarkedBlock::destroy(block));
}
}
-class TakeIfUnmarked {
-public:
- typedef MarkedBlock* ReturnType;
-
- TakeIfUnmarked(MarkedSpace*);
- void operator()(MarkedBlock*);
- ReturnType returnValue();
-
-private:
- MarkedSpace* m_markedSpace;
- DoublyLinkedList<MarkedBlock> m_empties;
+struct Shrink : MarkedBlock::VoidFunctor {
+ void operator()(MarkedBlock* block) { block->shrink(); }
};
-inline TakeIfUnmarked::TakeIfUnmarked(MarkedSpace* newSpace)
- : m_markedSpace(newSpace)
-{
-}
-
-inline void TakeIfUnmarked::operator()(MarkedBlock* block)
-{
- if (!block->markCountIsZero())
- return;
-
- m_markedSpace->allocatorFor(block).removeBlock(block);
- m_empties.append(block);
-}
-
-inline TakeIfUnmarked::ReturnType TakeIfUnmarked::returnValue()
-{
- return m_empties.head();
-}
-
void MarkedSpace::shrink()
{
// We record a temporary list of empties to avoid modifying m_blocks while iterating it.
- TakeIfUnmarked takeIfUnmarked(this);
- freeBlocks(forEachBlock(takeIfUnmarked));
+ Take takeIfEmpty(Take::TakeIfEmpty, this);
+ freeBlocks(forEachBlock(takeIfEmpty));
+
+ forEachBlock<Shrink>();
}
#if ENABLE(GGC)
diff --git a/Source/JavaScriptCore/heap/MarkedSpace.h b/Source/JavaScriptCore/heap/MarkedSpace.h
index 7bd5ca509..18b57c6d0 100644
--- a/Source/JavaScriptCore/heap/MarkedSpace.h
+++ b/Source/JavaScriptCore/heap/MarkedSpace.h
@@ -71,6 +71,8 @@ public:
static const size_t maxCellSize = 2048;
MarkedSpace(Heap*);
+ ~MarkedSpace();
+ void lastChanceToFinalize();
MarkedAllocator& firstAllocator();
MarkedAllocator& allocatorFor(size_t);
@@ -81,6 +83,10 @@ public:
void resetAllocators();
+ void visitWeakSets(HeapRootVisitor&);
+ void reapWeakSets();
+ void sweepWeakSets();
+
MarkedBlockSet& blocks() { return m_blocks; }
void canonicalizeCellLivenessData();
@@ -93,7 +99,6 @@ public:
template<typename Functor> typename Functor::ReturnType forEachBlock();
void shrink();
- void freeAllBlocks();
void freeBlocks(MarkedBlock* head);
void didAddBlock(MarkedBlock*);
diff --git a/Source/JavaScriptCore/heap/PassWeak.h b/Source/JavaScriptCore/heap/PassWeak.h
index 8c6364e4b..acd6e52c7 100644
--- a/Source/JavaScriptCore/heap/PassWeak.h
+++ b/Source/JavaScriptCore/heap/PassWeak.h
@@ -46,9 +46,7 @@ public:
T& operator*() const;
GetType get() const;
-#if !ASSERT_DISABLED
bool was(GetType) const;
-#endif
};
template<typename T> class PassWeak : public WeakImplAccessor<PassWeak<T>, T> {
@@ -102,12 +100,10 @@ template<typename Base, typename T> inline typename WeakImplAccessor<Base, T>::G
return jsCast<T*>(static_cast<const Base*>(this)->m_impl->jsValue().asCell());
}
-#if !ASSERT_DISABLED
template<typename Base, typename T> inline bool WeakImplAccessor<Base, T>::was(typename WeakImplAccessor<Base, T>::GetType other) const
{
return jsCast<T*>(static_cast<const Base*>(this)->m_impl->jsValue().asCell()) == other;
}
-#endif
template<typename T> inline PassWeak<T>::PassWeak()
: m_impl(0)
diff --git a/Source/JavaScriptCore/heap/Weak.h b/Source/JavaScriptCore/heap/Weak.h
index 0938249b8..e5e0a97ec 100644
--- a/Source/JavaScriptCore/heap/Weak.h
+++ b/Source/JavaScriptCore/heap/Weak.h
@@ -26,9 +26,10 @@
#ifndef Weak_h
#define Weak_h
-#include <wtf/Assertions.h>
#include "PassWeak.h"
#include "WeakSetInlines.h"
+#include <wtf/Assertions.h>
+#include <wtf/HashMap.h>
namespace JSC {
@@ -150,6 +151,32 @@ template<typename T> inline WeakImpl* Weak<T>::hashTableDeletedValue()
return reinterpret_cast<WeakImpl*>(-1);
}
+// This function helps avoid modifying a weak table while holding an iterator into it. (Object allocation
+// can run a finalizer that modifies the table. We avoid that by requiring a pre-constructed object as our value.)
+template<typename T, typename U> inline void weakAdd(HashMap<T, Weak<U> >& map, const T& key, PassWeak<U> value)
+{
+ ASSERT(!map.get(key));
+ map.set(key, value); // The table may still have a zombie for value.
+}
+
+template<typename T, typename U> inline void weakRemove(HashMap<T, Weak<U> >& map, const T& key, typename Weak<U>::GetType value)
+{
+ typename HashMap<T, Weak<U> >::iterator it = map.find(key);
+ ASSERT_UNUSED(value, value);
+ ASSERT(it != map.end());
+ ASSERT(it->second.was(value));
+ ASSERT(!it->second);
+ map.remove(it);
+}
+
+template<typename T> inline void weakClear(Weak<T>& weak, typename Weak<T>::GetType value)
+{
+ ASSERT_UNUSED(value, value);
+ ASSERT(weak.was(value));
+ ASSERT(!weak);
+ weak.clear();
+}
+
} // namespace JSC
namespace WTF {
diff --git a/Source/JavaScriptCore/heap/WeakBlock.cpp b/Source/JavaScriptCore/heap/WeakBlock.cpp
index f307e111e..685779d3a 100644
--- a/Source/JavaScriptCore/heap/WeakBlock.cpp
+++ b/Source/JavaScriptCore/heap/WeakBlock.cpp
@@ -36,19 +36,16 @@ namespace JSC {
WeakBlock* WeakBlock::create()
{
- PageAllocation allocation = PageAllocation::allocate(blockSize, OSAllocator::JSGCHeapPages);
- if (!static_cast<bool>(allocation))
- CRASH();
- return new (NotNull, allocation.base()) WeakBlock(allocation);
+ void* allocation = fastMalloc(blockSize);
+ return new (NotNull, allocation) WeakBlock;
}
void WeakBlock::destroy(WeakBlock* block)
{
- block->m_allocation.deallocate();
+ fastFree(block);
}
-WeakBlock::WeakBlock(PageAllocation& allocation)
- : m_allocation(allocation)
+WeakBlock::WeakBlock()
{
for (size_t i = 0; i < weakImplCount(); ++i) {
WeakImpl* weakImpl = &weakImpls()[i];
@@ -56,10 +53,10 @@ WeakBlock::WeakBlock(PageAllocation& allocation)
addToFreeList(&m_sweepResult.freeList, weakImpl);
}
- ASSERT(!m_sweepResult.isNull() && m_sweepResult.blockIsFree);
+ ASSERT(isEmpty());
}
-void WeakBlock::finalizeAll()
+void WeakBlock::lastChanceToFinalize()
{
for (size_t i = 0; i < weakImplCount(); ++i) {
WeakImpl* weakImpl = &weakImpls()[i];
@@ -90,10 +87,10 @@ void WeakBlock::sweep()
ASSERT(!m_sweepResult.isNull());
}
-void WeakBlock::visitLiveWeakImpls(HeapRootVisitor& heapRootVisitor)
+void WeakBlock::visit(HeapRootVisitor& heapRootVisitor)
{
// If a block is completely empty, a visit won't have any effect.
- if (!m_sweepResult.isNull() && m_sweepResult.blockIsFree)
+ if (isEmpty())
return;
SlotVisitor& visitor = heapRootVisitor.visitor();
@@ -118,10 +115,10 @@ void WeakBlock::visitLiveWeakImpls(HeapRootVisitor& heapRootVisitor)
}
}
-void WeakBlock::visitDeadWeakImpls(HeapRootVisitor&)
+void WeakBlock::reap()
{
- // If a block is completely empty, a visit won't have any effect.
- if (!m_sweepResult.isNull() && m_sweepResult.blockIsFree)
+ // If a block is completely empty, a reaping won't have any effect.
+ if (isEmpty())
return;
for (size_t i = 0; i < weakImplCount(); ++i) {
diff --git a/Source/JavaScriptCore/heap/WeakBlock.h b/Source/JavaScriptCore/heap/WeakBlock.h
index 9e546ea32..dc3e89d55 100644
--- a/Source/JavaScriptCore/heap/WeakBlock.h
+++ b/Source/JavaScriptCore/heap/WeakBlock.h
@@ -30,7 +30,6 @@
#include "WeakHandleOwner.h"
#include "WeakImpl.h"
#include <wtf/DoublyLinkedList.h>
-#include <wtf/PageAllocation.h>
#include <wtf/StdLibExtras.h>
namespace JSC {
@@ -42,7 +41,7 @@ class WeakHandleOwner;
class WeakBlock : public DoublyLinkedListNode<WeakBlock> {
public:
friend class WTF::DoublyLinkedListNode<WeakBlock>;
- static const size_t blockSize = 4 * KB;
+ static const size_t blockSize = 3 * KB; // 5% of MarkedBlock size
struct FreeCell {
FreeCell* next;
@@ -61,26 +60,26 @@ public:
static WeakImpl* asWeakImpl(FreeCell*);
+ bool isEmpty();
+
void sweep();
- const SweepResult& sweepResult();
SweepResult takeSweepResult();
- void visitLiveWeakImpls(HeapRootVisitor&);
- void visitDeadWeakImpls(HeapRootVisitor&);
+ void visit(HeapRootVisitor&);
+ void reap();
- void finalizeAll();
+ void lastChanceToFinalize();
private:
static FreeCell* asFreeCell(WeakImpl*);
- WeakBlock(PageAllocation&);
+ WeakBlock();
WeakImpl* firstWeakImpl();
void finalize(WeakImpl*);
WeakImpl* weakImpls();
size_t weakImplCount();
void addToFreeList(FreeCell**, WeakImpl*);
- PageAllocation m_allocation;
WeakBlock* m_prev;
WeakBlock* m_next;
SweepResult m_sweepResult;
@@ -111,11 +110,6 @@ inline WeakBlock::SweepResult WeakBlock::takeSweepResult()
return tmp;
}
-inline const WeakBlock::SweepResult& WeakBlock::sweepResult()
-{
- return m_sweepResult;
-}
-
inline WeakBlock::FreeCell* WeakBlock::asFreeCell(WeakImpl* weakImpl)
{
return reinterpret_cast<FreeCell*>(weakImpl);
@@ -151,6 +145,11 @@ inline void WeakBlock::addToFreeList(FreeCell** freeList, WeakImpl* weakImpl)
*freeList = freeCell;
}
+inline bool WeakBlock::isEmpty()
+{
+ return !m_sweepResult.isNull() && m_sweepResult.blockIsFree;
+}
+
} // namespace JSC
#endif // WeakBlock_h
diff --git a/Source/JavaScriptCore/heap/WeakSet.cpp b/Source/JavaScriptCore/heap/WeakSet.cpp
index d9c773cef..9374fd8ff 100644
--- a/Source/JavaScriptCore/heap/WeakSet.cpp
+++ b/Source/JavaScriptCore/heap/WeakSet.cpp
@@ -40,24 +40,6 @@ WeakSet::~WeakSet()
m_blocks.clear();
}
-void WeakSet::finalizeAll()
-{
- for (WeakBlock* block = m_blocks.head(); block; block = block->next())
- block->finalizeAll();
-}
-
-void WeakSet::visitLiveWeakImpls(HeapRootVisitor& visitor)
-{
- for (WeakBlock* block = m_blocks.head(); block; block = block->next())
- block->visitLiveWeakImpls(visitor);
-}
-
-void WeakSet::visitDeadWeakImpls(HeapRootVisitor& visitor)
-{
- for (WeakBlock* block = m_blocks.head(); block; block = block->next())
- block->visitDeadWeakImpls(visitor);
-}
-
void WeakSet::sweep()
{
WeakBlock* next;
@@ -65,7 +47,7 @@ void WeakSet::sweep()
next = block->next();
// If a block is completely empty, a new sweep won't have any effect.
- if (!block->sweepResult().isNull() && block->sweepResult().blockIsFree)
+ if (block->isEmpty())
continue;
block->takeSweepResult(); // Force a new sweep by discarding the last sweep.
@@ -73,23 +55,6 @@ void WeakSet::sweep()
}
}
-void WeakSet::shrink()
-{
- WeakBlock* next;
- for (WeakBlock* block = m_blocks.head(); block; block = next) {
- next = block->next();
-
- if (!block->sweepResult().isNull() && block->sweepResult().blockIsFree)
- removeAllocator(block);
- }
-}
-
-void WeakSet::resetAllocator()
-{
- m_allocator = 0;
- m_nextAllocator = m_blocks.head();
-}
-
WeakBlock::FreeCell* WeakSet::findAllocator()
{
if (WeakBlock::FreeCell* allocator = tryFindAllocator())
diff --git a/Source/JavaScriptCore/heap/WeakSet.h b/Source/JavaScriptCore/heap/WeakSet.h
index 0a683bd5f..be9844a64 100644
--- a/Source/JavaScriptCore/heap/WeakSet.h
+++ b/Source/JavaScriptCore/heap/WeakSet.h
@@ -35,20 +35,22 @@ class WeakImpl;
class WeakSet {
public:
+ static WeakImpl* allocate(JSValue, WeakHandleOwner* = 0, void* context = 0);
+ static void deallocate(WeakImpl*);
+
WeakSet(Heap*);
- void finalizeAll();
~WeakSet();
+ void lastChanceToFinalize();
- static WeakImpl* allocate(JSValue, WeakHandleOwner* = 0, void* context = 0);
- static void deallocate(WeakImpl*);
+ Heap* heap() const;
- void visitLiveWeakImpls(HeapRootVisitor&);
- void visitDeadWeakImpls(HeapRootVisitor&);
+ bool isEmpty() const;
+ void visit(HeapRootVisitor&);
+ void reap();
void sweep();
- void resetAllocator();
-
void shrink();
+ void resetAllocator();
private:
JS_EXPORT_PRIVATE WeakBlock::FreeCell* findAllocator();
@@ -69,11 +71,61 @@ inline WeakSet::WeakSet(Heap* heap)
{
}
+inline Heap* WeakSet::heap() const
+{
+ return m_heap;
+}
+
+inline bool WeakSet::isEmpty() const
+{
+ for (WeakBlock* block = m_blocks.head(); block; block = block->next()) {
+ if (!block->isEmpty())
+ return false;
+ }
+
+ return true;
+}
+
inline void WeakSet::deallocate(WeakImpl* weakImpl)
{
weakImpl->setState(WeakImpl::Deallocated);
}
+inline void WeakSet::lastChanceToFinalize()
+{
+ for (WeakBlock* block = m_blocks.head(); block; block = block->next())
+ block->lastChanceToFinalize();
+}
+
+inline void WeakSet::visit(HeapRootVisitor& visitor)
+{
+ for (WeakBlock* block = m_blocks.head(); block; block = block->next())
+ block->visit(visitor);
+}
+
+inline void WeakSet::reap()
+{
+ for (WeakBlock* block = m_blocks.head(); block; block = block->next())
+ block->reap();
+}
+
+inline void WeakSet::shrink()
+{
+ WeakBlock* next;
+ for (WeakBlock* block = m_blocks.head(); block; block = next) {
+ next = block->next();
+
+ if (block->isEmpty())
+ removeAllocator(block);
+ }
+}
+
+inline void WeakSet::resetAllocator()
+{
+ m_allocator = 0;
+ m_nextAllocator = m_blocks.head();
+}
+
} // namespace JSC
#endif // WeakSet_h
diff --git a/Source/JavaScriptCore/heap/WeakSetInlines.h b/Source/JavaScriptCore/heap/WeakSetInlines.h
index 0515904fc..6e2420c45 100644
--- a/Source/JavaScriptCore/heap/WeakSetInlines.h
+++ b/Source/JavaScriptCore/heap/WeakSetInlines.h
@@ -32,7 +32,7 @@ namespace JSC {
inline WeakImpl* WeakSet::allocate(JSValue jsValue, WeakHandleOwner* weakHandleOwner, void* context)
{
- WeakSet& weakSet = *Heap::heap(jsValue.asCell())->weakSet();
+ WeakSet& weakSet = MarkedBlock::blockFor(jsValue.asCell())->weakSet();
WeakBlock::FreeCell* allocator = weakSet.m_allocator;
if (UNLIKELY(!allocator))
allocator = weakSet.findAllocator();