summaryrefslogtreecommitdiff
path: root/Source/JavaScriptCore/heap
diff options
context:
space:
mode:
Diffstat (limited to 'Source/JavaScriptCore/heap')
-rw-r--r--Source/JavaScriptCore/heap/BlockAllocator.cpp138
-rw-r--r--Source/JavaScriptCore/heap/BlockAllocator.h87
-rw-r--r--Source/JavaScriptCore/heap/CopiedAllocator.h12
-rw-r--r--Source/JavaScriptCore/heap/CopiedBlock.h22
-rw-r--r--Source/JavaScriptCore/heap/CopiedSpace.cpp129
-rw-r--r--Source/JavaScriptCore/heap/CopiedSpace.h14
-rw-r--r--Source/JavaScriptCore/heap/CopiedSpaceInlineMethods.h22
-rw-r--r--Source/JavaScriptCore/heap/Handle.h8
-rw-r--r--Source/JavaScriptCore/heap/HandleHeap.h309
-rw-r--r--Source/JavaScriptCore/heap/HandleSet.cpp (renamed from Source/JavaScriptCore/heap/HandleHeap.cpp)110
-rw-r--r--Source/JavaScriptCore/heap/HandleSet.h208
-rw-r--r--Source/JavaScriptCore/heap/Heap.cpp277
-rw-r--r--Source/JavaScriptCore/heap/Heap.h112
-rw-r--r--Source/JavaScriptCore/heap/Local.h6
-rw-r--r--Source/JavaScriptCore/heap/MachineStackMarker.cpp2
-rw-r--r--Source/JavaScriptCore/heap/MarkStack.cpp12
-rw-r--r--Source/JavaScriptCore/heap/MarkedAllocator.cpp70
-rw-r--r--Source/JavaScriptCore/heap/MarkedAllocator.h23
-rw-r--r--Source/JavaScriptCore/heap/MarkedBlock.cpp27
-rw-r--r--Source/JavaScriptCore/heap/MarkedBlock.h28
-rw-r--r--Source/JavaScriptCore/heap/MarkedSpace.cpp26
-rw-r--r--Source/JavaScriptCore/heap/MarkedSpace.h24
-rw-r--r--Source/JavaScriptCore/heap/PassWeak.h148
-rw-r--r--Source/JavaScriptCore/heap/SlotVisitor.h1
-rw-r--r--Source/JavaScriptCore/heap/Strong.h22
-rw-r--r--Source/JavaScriptCore/heap/StrongInlines.h8
-rw-r--r--Source/JavaScriptCore/heap/Weak.h176
-rw-r--r--Source/JavaScriptCore/heap/WeakBlock.cpp139
-rw-r--r--Source/JavaScriptCore/heap/WeakBlock.h156
-rw-r--r--Source/JavaScriptCore/heap/WeakHandleOwner.cpp47
-rw-r--r--Source/JavaScriptCore/heap/WeakHandleOwner.h44
-rw-r--r--Source/JavaScriptCore/heap/WeakImpl.h115
-rw-r--r--Source/JavaScriptCore/heap/WeakSet.cpp132
-rw-r--r--Source/JavaScriptCore/heap/WeakSet.h79
-rw-r--r--Source/JavaScriptCore/heap/WeakSetInlines.h47
35 files changed, 1842 insertions, 938 deletions
diff --git a/Source/JavaScriptCore/heap/BlockAllocator.cpp b/Source/JavaScriptCore/heap/BlockAllocator.cpp
new file mode 100644
index 000000000..028c84c2d
--- /dev/null
+++ b/Source/JavaScriptCore/heap/BlockAllocator.cpp
@@ -0,0 +1,138 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "BlockAllocator.h"
+
+#include "MarkedBlock.h"
+#include <wtf/CurrentTime.h>
+
+namespace JSC {
+
+BlockAllocator::BlockAllocator()
+ : m_numberOfFreeBlocks(0)
+ , m_blockFreeingThreadShouldQuit(false)
+ , m_blockFreeingThread(createThread(blockFreeingThreadStartFunc, this, "JavaScriptCore::BlockFree"))
+{
+ ASSERT(m_blockFreeingThread);
+}
+
+BlockAllocator::~BlockAllocator()
+{
+ releaseFreeBlocks();
+ {
+ MutexLocker locker(m_freeBlockLock);
+ m_blockFreeingThreadShouldQuit = true;
+ m_freeBlockCondition.broadcast();
+ }
+ waitForThreadCompletion(m_blockFreeingThread);
+}
+
+void BlockAllocator::releaseFreeBlocks()
+{
+ while (true) {
+ MarkedBlock* block;
+ {
+ MutexLocker locker(m_freeBlockLock);
+ if (!m_numberOfFreeBlocks)
+ block = 0;
+ else {
+ // FIXME: How do we know this is a MarkedBlock? It could be a CopiedBlock.
+ block = static_cast<MarkedBlock*>(m_freeBlocks.removeHead());
+ ASSERT(block);
+ m_numberOfFreeBlocks--;
+ }
+ }
+
+ if (!block)
+ break;
+
+ MarkedBlock::destroy(block);
+ }
+}
+
+void BlockAllocator::waitForRelativeTimeWhileHoldingLock(double relative)
+{
+ if (m_blockFreeingThreadShouldQuit)
+ return;
+ m_freeBlockCondition.timedWait(m_freeBlockLock, currentTime() + relative);
+}
+
+void BlockAllocator::waitForRelativeTime(double relative)
+{
+ // If this returns early, that's fine, so long as it doesn't do it too
+ // frequently. It would only be a bug if this function failed to return
+ // when it was asked to do so.
+
+ MutexLocker locker(m_freeBlockLock);
+ waitForRelativeTimeWhileHoldingLock(relative);
+}
+
+void BlockAllocator::blockFreeingThreadStartFunc(void* blockAllocator)
+{
+ static_cast<BlockAllocator*>(blockAllocator)->blockFreeingThreadMain();
+}
+
+void BlockAllocator::blockFreeingThreadMain()
+{
+ while (!m_blockFreeingThreadShouldQuit) {
+ // Generally wait for one second before scavenging free blocks. This
+ // may return early, particularly when we're being asked to quit.
+ waitForRelativeTime(1.0);
+ if (m_blockFreeingThreadShouldQuit)
+ break;
+
+ // Now process the list of free blocks. Keep freeing until half of the
+ // blocks that are currently on the list are gone. Assume that a size_t
+ // field can be accessed atomically.
+ size_t currentNumberOfFreeBlocks = m_numberOfFreeBlocks;
+ if (!currentNumberOfFreeBlocks)
+ continue;
+
+ size_t desiredNumberOfFreeBlocks = currentNumberOfFreeBlocks / 2;
+
+ while (!m_blockFreeingThreadShouldQuit) {
+ MarkedBlock* block;
+ {
+ MutexLocker locker(m_freeBlockLock);
+ if (m_numberOfFreeBlocks <= desiredNumberOfFreeBlocks)
+ block = 0;
+ else {
+ // FIXME: How do we know this is a MarkedBlock? It could be a CopiedBlock.
+ block = static_cast<MarkedBlock*>(m_freeBlocks.removeHead());
+ ASSERT(block);
+ m_numberOfFreeBlocks--;
+ }
+ }
+
+ if (!block)
+ break;
+
+ MarkedBlock::destroy(block);
+ }
+ }
+}
+
+} // namespace JSC
diff --git a/Source/JavaScriptCore/heap/BlockAllocator.h b/Source/JavaScriptCore/heap/BlockAllocator.h
new file mode 100644
index 000000000..4b90d28b9
--- /dev/null
+++ b/Source/JavaScriptCore/heap/BlockAllocator.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef BlockAllocator_h
+#define BlockAllocator_h
+
+#include <wtf/DoublyLinkedList.h>
+#include <wtf/Forward.h>
+#include <wtf/Threading.h>
+
+namespace JSC {
+
+class HeapBlock;
+
+// Simple allocator to reduce VM cost by holding onto blocks of memory for
+// short periods of time and then freeing them on a secondary thread.
+
+class BlockAllocator {
+public:
+ BlockAllocator();
+ ~BlockAllocator();
+
+ HeapBlock* allocate();
+ void deallocate(HeapBlock*);
+
+private:
+ void waitForRelativeTimeWhileHoldingLock(double relative);
+ void waitForRelativeTime(double relative);
+
+ void blockFreeingThreadMain();
+ static void blockFreeingThreadStartFunc(void* heap);
+
+ void releaseFreeBlocks();
+
+ DoublyLinkedList<HeapBlock> m_freeBlocks;
+ size_t m_numberOfFreeBlocks;
+ bool m_blockFreeingThreadShouldQuit;
+ Mutex m_freeBlockLock;
+ ThreadCondition m_freeBlockCondition;
+ ThreadIdentifier m_blockFreeingThread;
+};
+
+inline HeapBlock* BlockAllocator::allocate()
+{
+ MutexLocker locker(m_freeBlockLock);
+ if (!m_numberOfFreeBlocks) {
+ ASSERT(m_freeBlocks.isEmpty());
+ return 0;
+ }
+
+ ASSERT(!m_freeBlocks.isEmpty());
+ m_numberOfFreeBlocks--;
+ return m_freeBlocks.removeHead();
+}
+
+inline void BlockAllocator::deallocate(HeapBlock* block)
+{
+ MutexLocker locker(m_freeBlockLock);
+ m_freeBlocks.push(block);
+ m_numberOfFreeBlocks++;
+}
+
+} // namespace JSC
+
+#endif // BlockAllocator_h
diff --git a/Source/JavaScriptCore/heap/CopiedAllocator.h b/Source/JavaScriptCore/heap/CopiedAllocator.h
index dc3c5dfed..7455ec816 100644
--- a/Source/JavaScriptCore/heap/CopiedAllocator.h
+++ b/Source/JavaScriptCore/heap/CopiedAllocator.h
@@ -39,8 +39,7 @@ public:
bool wasLastAllocation(void*, size_t);
void startedCopying();
void resetCurrentBlock(CopiedBlock*);
- void resetLastAllocation(void*);
- size_t currentUtilization();
+ size_t currentCapacity();
private:
CopiedBlock* currentBlock() { return m_currentBlock; }
@@ -92,14 +91,9 @@ inline void CopiedAllocator::resetCurrentBlock(CopiedBlock* newBlock)
m_currentOffset = static_cast<char*>(newBlock->m_offset);
}
-inline size_t CopiedAllocator::currentUtilization()
+inline size_t CopiedAllocator::currentCapacity()
{
- return static_cast<size_t>(m_currentOffset - m_currentBlock->payload());
-}
-
-inline void CopiedAllocator::resetLastAllocation(void* ptr)
-{
- m_currentOffset = static_cast<char*>(ptr);
+ return m_currentBlock->capacity();
}
} // namespace JSC
diff --git a/Source/JavaScriptCore/heap/CopiedBlock.h b/Source/JavaScriptCore/heap/CopiedBlock.h
index 387b2ddab..431b86c38 100644
--- a/Source/JavaScriptCore/heap/CopiedBlock.h
+++ b/Source/JavaScriptCore/heap/CopiedBlock.h
@@ -55,16 +55,30 @@ public:
#endif
}
- char* payload()
- {
- return reinterpret_cast<char*>(this) + ((sizeof(CopiedBlock) + 7) & ~7);
- }
+ char* payload();
+ size_t size();
+ size_t capacity();
private:
void* m_offset;
uintptr_t m_isPinned;
};
+inline char* CopiedBlock::payload()
+{
+ return reinterpret_cast<char*>(this) + ((sizeof(CopiedBlock) + 7) & ~7);
+}
+
+inline size_t CopiedBlock::size()
+{
+ return static_cast<size_t>(static_cast<char*>(m_offset) - payload());
+}
+
+inline size_t CopiedBlock::capacity()
+{
+ return m_allocation.size();
+}
+
} // namespace JSC
#endif
diff --git a/Source/JavaScriptCore/heap/CopiedSpace.cpp b/Source/JavaScriptCore/heap/CopiedSpace.cpp
index c8470120f..063ea65a2 100644
--- a/Source/JavaScriptCore/heap/CopiedSpace.cpp
+++ b/Source/JavaScriptCore/heap/CopiedSpace.cpp
@@ -27,6 +27,7 @@
#include "CopiedSpace.h"
#include "CopiedSpaceInlineMethods.h"
+#include "GCActivityCallback.h"
namespace JSC {
@@ -34,8 +35,6 @@ CopiedSpace::CopiedSpace(Heap* heap)
: m_heap(heap)
, m_toSpace(0)
, m_fromSpace(0)
- , m_totalMemoryAllocated(0)
- , m_totalMemoryUtilized(0)
, m_inCopyingPhase(false)
, m_numberOfLoanedBlocks(0)
{
@@ -46,8 +45,6 @@ void CopiedSpace::init()
m_toSpace = &m_blocks1;
m_fromSpace = &m_blocks2;
- m_totalMemoryAllocated += HeapBlock::s_blockSize * s_initialBlockNum;
-
if (!addNewBlock())
CRASH();
}
@@ -57,7 +54,8 @@ CheckedBoolean CopiedSpace::tryAllocateSlowCase(size_t bytes, void** outPtr)
if (isOversize(bytes))
return tryAllocateOversize(bytes, outPtr);
- m_totalMemoryUtilized += m_allocator.currentUtilization();
+ m_heap->didAllocate(m_allocator.currentCapacity());
+
if (!addNewBlock()) {
*outPtr = 0;
return false;
@@ -71,22 +69,22 @@ CheckedBoolean CopiedSpace::tryAllocateOversize(size_t bytes, void** outPtr)
{
ASSERT(isOversize(bytes));
- size_t blockSize = WTF::roundUpToMultipleOf<s_pageSize>(sizeof(CopiedBlock) + bytes);
- PageAllocationAligned allocation = PageAllocationAligned::allocate(blockSize, s_pageSize, OSAllocator::JSGCHeapPages);
+ size_t blockSize = WTF::roundUpToMultipleOf(WTF::pageSize(), sizeof(CopiedBlock) + bytes);
+
+ PageAllocationAligned allocation = PageAllocationAligned::allocate(blockSize, WTF::pageSize(), OSAllocator::JSGCHeapPages);
if (!static_cast<bool>(allocation)) {
*outPtr = 0;
return false;
}
+
CopiedBlock* block = new (NotNull, allocation.base()) CopiedBlock(allocation);
m_oversizeBlocks.push(block);
- ASSERT(is8ByteAligned(block->m_offset));
-
m_oversizeFilter.add(reinterpret_cast<Bits>(block));
- m_totalMemoryAllocated += blockSize;
- m_totalMemoryUtilized += bytes;
+ *outPtr = allocateFromBlock(block, bytes);
+
+ m_heap->didAllocate(blockSize);
- *outPtr = block->m_offset;
return true;
}
@@ -102,13 +100,12 @@ CheckedBoolean CopiedSpace::tryReallocate(void** ptr, size_t oldSize, size_t new
return tryReallocateOversize(ptr, oldSize, newSize);
if (m_allocator.wasLastAllocation(oldPtr, oldSize)) {
- m_allocator.resetLastAllocation(oldPtr);
- if (m_allocator.fitsInCurrentBlock(newSize)) {
- m_totalMemoryUtilized += newSize - oldSize;
- return m_allocator.allocate(newSize);
+ size_t delta = newSize - oldSize;
+ if (m_allocator.fitsInCurrentBlock(delta)) {
+ (void)m_allocator.allocate(delta);
+ return true;
}
}
- m_totalMemoryUtilized -= oldSize;
void* result = 0;
if (!tryAllocate(newSize, &result)) {
@@ -132,17 +129,15 @@ CheckedBoolean CopiedSpace::tryReallocateOversize(void** ptr, size_t oldSize, si
*ptr = 0;
return false;
}
+
memcpy(newPtr, oldPtr, oldSize);
if (isOversize(oldSize)) {
CopiedBlock* oldBlock = oversizeBlockFor(oldPtr);
m_oversizeBlocks.remove(oldBlock);
oldBlock->m_allocation.deallocate();
- m_totalMemoryAllocated -= oldSize + sizeof(CopiedBlock);
}
- m_totalMemoryUtilized -= oldSize;
-
*ptr = newPtr;
return true;
}
@@ -166,11 +161,6 @@ void CopiedSpace::doneFillingBlock(CopiedBlock* block)
}
{
- MutexLocker locker(m_memoryStatsLock);
- m_totalMemoryUtilized += static_cast<size_t>(static_cast<char*>(block->m_offset) - block->payload());
- }
-
- {
MutexLocker locker(m_loanedBlocksLock);
ASSERT(m_numberOfLoanedBlocks > 0);
m_numberOfLoanedBlocks--;
@@ -198,11 +188,7 @@ void CopiedSpace::doneCopying()
}
m_toSpaceSet.remove(block);
- {
- MutexLocker locker(m_heap->m_freeBlockLock);
- m_heap->m_freeBlocks.push(block);
- m_heap->m_numberOfFreeBlocks++;
- }
+ m_heap->blockAllocator().deallocate(block);
}
CopiedBlock* curr = static_cast<CopiedBlock*>(m_oversizeBlocks.head());
@@ -210,8 +196,6 @@ void CopiedSpace::doneCopying()
CopiedBlock* next = static_cast<CopiedBlock*>(curr->next());
if (!curr->m_isPinned) {
m_oversizeBlocks.remove(curr);
- m_totalMemoryAllocated -= curr->m_allocation.size();
- m_totalMemoryUtilized -= curr->m_allocation.size() - sizeof(CopiedBlock);
curr->m_allocation.deallocate();
} else
curr->m_isPinned = false;
@@ -227,16 +211,8 @@ void CopiedSpace::doneCopying()
CheckedBoolean CopiedSpace::getFreshBlock(AllocationEffort allocationEffort, CopiedBlock** outBlock)
{
- HeapBlock* heapBlock = 0;
CopiedBlock* block = 0;
- {
- MutexLocker locker(m_heap->m_freeBlockLock);
- if (!m_heap->m_freeBlocks.isEmpty()) {
- heapBlock = m_heap->m_freeBlocks.removeHead();
- m_heap->m_numberOfFreeBlocks--;
- }
- }
- if (heapBlock)
+ if (HeapBlock* heapBlock = m_heap->blockAllocator().allocate())
block = new (NotNull, heapBlock) CopiedBlock(heapBlock->m_allocation);
else if (allocationEffort == AllocationMustSucceed) {
if (!allocateNewBlock(&block)) {
@@ -246,7 +222,7 @@ CheckedBoolean CopiedSpace::getFreshBlock(AllocationEffort allocationEffort, Cop
}
} else {
ASSERT(allocationEffort == AllocationCanFail);
- if (m_heap->waterMark() >= m_heap->highWaterMark() && m_heap->m_isSafeToCollect)
+ if (m_heap->shouldCollect())
m_heap->collect(Heap::DoNotSweep);
if (!getFreshBlock(AllocationMustSucceed, &block)) {
@@ -261,4 +237,73 @@ CheckedBoolean CopiedSpace::getFreshBlock(AllocationEffort allocationEffort, Cop
return true;
}
+void CopiedSpace::freeAllBlocks()
+{
+ while (!m_toSpace->isEmpty())
+ m_heap->blockAllocator().deallocate(m_toSpace->removeHead());
+
+ while (!m_fromSpace->isEmpty())
+ m_heap->blockAllocator().deallocate(m_fromSpace->removeHead());
+
+ while (!m_oversizeBlocks.isEmpty())
+ m_oversizeBlocks.removeHead()->m_allocation.deallocate();
+}
+
+size_t CopiedSpace::size()
+{
+ size_t calculatedSize = 0;
+
+ for (CopiedBlock* block = static_cast<CopiedBlock*>(m_toSpace->head()); block; block = static_cast<CopiedBlock*>(block->next()))
+ calculatedSize += block->size();
+
+ for (CopiedBlock* block = static_cast<CopiedBlock*>(m_fromSpace->head()); block; block = static_cast<CopiedBlock*>(block->next()))
+ calculatedSize += block->size();
+
+ for (CopiedBlock* block = static_cast<CopiedBlock*>(m_oversizeBlocks.head()); block; block = static_cast<CopiedBlock*>(block->next()))
+ calculatedSize += block->size();
+
+ return calculatedSize;
+}
+
+size_t CopiedSpace::capacity()
+{
+ size_t calculatedCapacity = 0;
+
+ for (CopiedBlock* block = static_cast<CopiedBlock*>(m_toSpace->head()); block; block = static_cast<CopiedBlock*>(block->next()))
+ calculatedCapacity += block->capacity();
+
+ for (CopiedBlock* block = static_cast<CopiedBlock*>(m_fromSpace->head()); block; block = static_cast<CopiedBlock*>(block->next()))
+ calculatedCapacity += block->capacity();
+
+ for (CopiedBlock* block = static_cast<CopiedBlock*>(m_oversizeBlocks.head()); block; block = static_cast<CopiedBlock*>(block->next()))
+ calculatedCapacity += block->capacity();
+
+ return calculatedCapacity;
+}
+
+static bool isBlockListPagedOut(double deadline, DoublyLinkedList<HeapBlock>* list)
+{
+ unsigned itersSinceLastTimeCheck = 0;
+ HeapBlock* current = list->head();
+ while (current) {
+ current = current->next();
+ ++itersSinceLastTimeCheck;
+ if (itersSinceLastTimeCheck >= Heap::s_timeCheckResolution) {
+ double currentTime = WTF::monotonicallyIncreasingTime();
+ if (currentTime > deadline)
+ return true;
+ itersSinceLastTimeCheck = 0;
+ }
+ }
+
+ return false;
+}
+
+bool CopiedSpace::isPagedOut(double deadline)
+{
+ return isBlockListPagedOut(deadline, m_toSpace)
+ || isBlockListPagedOut(deadline, m_fromSpace)
+ || isBlockListPagedOut(deadline, &m_oversizeBlocks);
+}
+
} // namespace JSC
diff --git a/Source/JavaScriptCore/heap/CopiedSpace.h b/Source/JavaScriptCore/heap/CopiedSpace.h
index e8fa80055..d3cc040a5 100644
--- a/Source/JavaScriptCore/heap/CopiedSpace.h
+++ b/Source/JavaScriptCore/heap/CopiedSpace.h
@@ -35,6 +35,7 @@
#include <wtf/HashSet.h>
#include <wtf/OSAllocator.h>
#include <wtf/PageAllocationAligned.h>
+#include <wtf/PageBlock.h>
#include <wtf/StdLibExtras.h>
#include <wtf/ThreadingPrimitives.h>
@@ -65,8 +66,11 @@ public:
bool contains(void*, CopiedBlock*&);
- size_t totalMemoryAllocated() { return m_totalMemoryAllocated; }
- size_t totalMemoryUtilized() { return m_totalMemoryUtilized; }
+ size_t size();
+ size_t capacity();
+
+ void freeAllBlocks();
+ bool isPagedOut(double deadline);
static CopiedBlock* blockFor(void*);
@@ -97,7 +101,6 @@ private:
HashSet<CopiedBlock*> m_toSpaceSet;
Mutex m_toSpaceLock;
- Mutex m_memoryStatsLock;
DoublyLinkedList<HeapBlock>* m_toSpace;
DoublyLinkedList<HeapBlock>* m_fromSpace;
@@ -106,9 +109,6 @@ private:
DoublyLinkedList<HeapBlock> m_blocks2;
DoublyLinkedList<HeapBlock> m_oversizeBlocks;
- size_t m_totalMemoryAllocated;
- size_t m_totalMemoryUtilized;
-
bool m_inCopyingPhase;
Mutex m_loanedBlocksLock;
@@ -116,8 +116,6 @@ private:
size_t m_numberOfLoanedBlocks;
static const size_t s_maxAllocationSize = 32 * KB;
- static const size_t s_pageSize = 4 * KB;
- static const size_t s_pageMask = ~(s_pageSize - 1);
static const size_t s_initialBlockNum = 16;
static const size_t s_blockMask = ~(HeapBlock::s_blockSize - 1);
};
diff --git a/Source/JavaScriptCore/heap/CopiedSpaceInlineMethods.h b/Source/JavaScriptCore/heap/CopiedSpaceInlineMethods.h
index 0b1b38d32..a8e45658b 100644
--- a/Source/JavaScriptCore/heap/CopiedSpaceInlineMethods.h
+++ b/Source/JavaScriptCore/heap/CopiedSpaceInlineMethods.h
@@ -56,8 +56,6 @@ inline void CopiedSpace::startedCopying()
m_toSpaceFilter.reset();
m_allocator.startedCopying();
- m_totalMemoryUtilized = 0;
-
ASSERT(!m_inCopyingPhase);
ASSERT(!m_numberOfLoanedBlocks);
m_inCopyingPhase = true;
@@ -65,11 +63,7 @@ inline void CopiedSpace::startedCopying()
inline void CopiedSpace::recycleBlock(CopiedBlock* block)
{
- {
- MutexLocker locker(m_heap->m_freeBlockLock);
- m_heap->m_freeBlocks.push(block);
- m_heap->m_numberOfFreeBlocks++;
- }
+ m_heap->blockAllocator().deallocate(block);
{
MutexLocker locker(m_loanedBlocksLock);
@@ -118,18 +112,13 @@ inline CheckedBoolean CopiedSpace::allocateNewBlock(CopiedBlock** outBlock)
return false;
}
- {
- MutexLocker locker(m_memoryStatsLock);
- m_totalMemoryAllocated += HeapBlock::s_blockSize;
- }
-
*outBlock = new (NotNull, allocation.base()) CopiedBlock(allocation);
return true;
}
inline bool CopiedSpace::fitsInBlock(CopiedBlock* block, size_t bytes)
{
- return static_cast<char*>(block->m_offset) + bytes < reinterpret_cast<char*>(block) + HeapBlock::s_blockSize && static_cast<char*>(block->m_offset) + bytes > block->m_offset;
+ return static_cast<char*>(block->m_offset) + bytes < reinterpret_cast<char*>(block) + block->capacity() && static_cast<char*>(block->m_offset) + bytes > block->m_offset;
}
inline CheckedBoolean CopiedSpace::tryAllocate(size_t bytes, void** outPtr)
@@ -146,14 +135,13 @@ inline CheckedBoolean CopiedSpace::tryAllocate(size_t bytes, void** outPtr)
inline void* CopiedSpace::allocateFromBlock(CopiedBlock* block, size_t bytes)
{
- ASSERT(!isOversize(bytes));
ASSERT(fitsInBlock(block, bytes));
ASSERT(is8ByteAligned(block->m_offset));
void* ptr = block->m_offset;
- ASSERT(block->m_offset >= block->payload() && block->m_offset < reinterpret_cast<char*>(block) + HeapBlock::s_blockSize);
+ ASSERT(block->m_offset >= block->payload() && block->m_offset < reinterpret_cast<char*>(block) + block->capacity());
block->m_offset = static_cast<void*>((static_cast<char*>(ptr) + bytes));
- ASSERT(block->m_offset >= block->payload() && block->m_offset < reinterpret_cast<char*>(block) + HeapBlock::s_blockSize);
+ ASSERT(block->m_offset >= block->payload() && block->m_offset < reinterpret_cast<char*>(block) + block->capacity());
ASSERT(is8ByteAligned(ptr));
return ptr;
@@ -171,7 +159,7 @@ inline bool CopiedSpace::isPinned(void* ptr)
inline CopiedBlock* CopiedSpace::oversizeBlockFor(void* ptr)
{
- return reinterpret_cast<CopiedBlock*>(reinterpret_cast<size_t>(ptr) & s_pageMask);
+ return reinterpret_cast<CopiedBlock*>(reinterpret_cast<size_t>(ptr) & WTF::pageMask());
}
inline CopiedBlock* CopiedSpace::blockFor(void* ptr)
diff --git a/Source/JavaScriptCore/heap/Handle.h b/Source/JavaScriptCore/heap/Handle.h
index 6f467743c..8bf2bd896 100644
--- a/Source/JavaScriptCore/heap/Handle.h
+++ b/Source/JavaScriptCore/heap/Handle.h
@@ -48,7 +48,7 @@ template<typename KeyType, typename MappedType, typename FinalizerCallback, type
class HandleBase {
template <typename T> friend class Weak;
- friend class HandleHeap;
+ friend class HandleSet;
friend struct JSCallbackObjectData;
template <typename KeyType, typename MappedType, typename FinalizerCallback, typename HashArg, typename KeyTraitsArg> friend class WeakGCMap;
@@ -59,6 +59,8 @@ public:
typedef JSValue (HandleBase::*UnspecifiedBoolType);
operator UnspecifiedBoolType*() const { return (m_slot && *m_slot) ? reinterpret_cast<UnspecifiedBoolType*>(1) : 0; }
+ HandleSlot slot() const { return m_slot; }
+
protected:
HandleBase(HandleSlot slot)
: m_slot(slot)
@@ -67,7 +69,6 @@ protected:
void swap(HandleBase& other) { std::swap(m_slot, other.m_slot); }
- HandleSlot slot() const { return m_slot; }
void setSlot(HandleSlot slot)
{
m_slot = slot;
@@ -132,7 +133,8 @@ protected:
}
private:
- friend class HandleHeap;
+ friend class HandleSet;
+ friend class WeakBlock;
static Handle<T> wrapSlot(HandleSlot slot)
{
diff --git a/Source/JavaScriptCore/heap/HandleHeap.h b/Source/JavaScriptCore/heap/HandleHeap.h
deleted file mode 100644
index c9ee11b2f..000000000
--- a/Source/JavaScriptCore/heap/HandleHeap.h
+++ /dev/null
@@ -1,309 +0,0 @@
-/*
- * Copyright (C) 2011 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
- * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
- * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
- * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
- * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
- * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
- * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
- * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
- * THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef HandleHeap_h
-#define HandleHeap_h
-
-#include <wtf/BlockStack.h>
-#include "Handle.h"
-#include <wtf/HashCountedSet.h>
-#include <wtf/SentinelLinkedList.h>
-#include <wtf/SinglyLinkedList.h>
-
-namespace JSC {
-
-class HandleHeap;
-class HeapRootVisitor;
-class JSGlobalData;
-class JSValue;
-class SlotVisitor;
-
-class JS_EXPORT_PRIVATE WeakHandleOwner {
-public:
- virtual ~WeakHandleOwner();
- virtual bool isReachableFromOpaqueRoots(Handle<Unknown>, void* context, SlotVisitor&);
- virtual void finalize(Handle<Unknown>, void* context);
-};
-
-class HandleHeap {
-public:
- static HandleHeap* heapFor(HandleSlot);
-
- HandleHeap(JSGlobalData*);
-
- JSGlobalData* globalData();
-
- HandleSlot allocate();
- void deallocate(HandleSlot);
-
- void makeWeak(HandleSlot, WeakHandleOwner* = 0, void* context = 0);
- HandleSlot copyWeak(HandleSlot);
-
- void visitStrongHandles(HeapRootVisitor&);
- void visitWeakHandles(HeapRootVisitor&);
- void finalizeWeakHandles();
-
- JS_EXPORT_PRIVATE void writeBarrier(HandleSlot, const JSValue&);
-
-#if !ASSERT_DISABLED
- bool hasWeakOwner(HandleSlot, WeakHandleOwner*);
- bool hasFinalizer(HandleSlot);
-#endif
-
- unsigned protectedGlobalObjectCount();
-
- template<typename Functor> void forEachStrongHandle(Functor&, const HashCountedSet<JSCell*>& skipSet);
-
-private:
- class Node {
- public:
- Node(WTF::SentinelTag);
- Node(HandleHeap*);
-
- HandleSlot slot();
- HandleHeap* handleHeap();
-
- void makeWeak(WeakHandleOwner*, void* context);
- bool isWeak();
-
- WeakHandleOwner* weakOwner();
- void* weakOwnerContext();
-
- void setPrev(Node*);
- Node* prev();
-
- void setNext(Node*);
- Node* next();
-
- private:
- WeakHandleOwner* emptyWeakOwner();
-
- JSValue m_value;
- HandleHeap* m_handleHeap;
- WeakHandleOwner* m_weakOwner;
- void* m_weakOwnerContext;
- Node* m_prev;
- Node* m_next;
- };
-
- static HandleSlot toHandle(Node*);
- static Node* toNode(HandleSlot);
-
- JS_EXPORT_PRIVATE void grow();
-
-#if ENABLE(GC_VALIDATION) || !ASSERT_DISABLED
- bool isValidWeakNode(Node*);
- bool isLiveNode(Node*);
-#endif
-
- JSGlobalData* m_globalData;
- BlockStack<Node> m_blockStack;
-
- SentinelLinkedList<Node> m_strongList;
- SentinelLinkedList<Node> m_weakList;
- SentinelLinkedList<Node> m_immediateList;
- SinglyLinkedList<Node> m_freeList;
- Node* m_nextToFinalize;
-};
-
-inline HandleHeap* HandleHeap::heapFor(HandleSlot handle)
-{
- return toNode(handle)->handleHeap();
-}
-
-inline JSGlobalData* HandleHeap::globalData()
-{
- return m_globalData;
-}
-
-inline HandleSlot HandleHeap::toHandle(Node* node)
-{
- return reinterpret_cast<HandleSlot>(node);
-}
-
-inline HandleHeap::Node* HandleHeap::toNode(HandleSlot handle)
-{
- return reinterpret_cast<Node*>(handle);
-}
-
-inline HandleSlot HandleHeap::allocate()
-{
- // Forbid assignment to handles during the finalization phase, since it would violate many GC invariants.
- // File a bug with stack trace if you hit this.
- if (m_nextToFinalize)
- CRASH();
- if (m_freeList.isEmpty())
- grow();
-
- Node* node = m_freeList.pop();
- new (NotNull, node) Node(this);
- m_immediateList.push(node);
- return toHandle(node);
-}
-
-inline void HandleHeap::deallocate(HandleSlot handle)
-{
- Node* node = toNode(handle);
- if (node == m_nextToFinalize) {
- ASSERT(m_nextToFinalize->next());
- m_nextToFinalize = m_nextToFinalize->next();
- }
-
- SentinelLinkedList<Node>::remove(node);
- m_freeList.push(node);
-}
-
-inline HandleSlot HandleHeap::copyWeak(HandleSlot other)
-{
- Node* node = toNode(allocate());
- node->makeWeak(toNode(other)->weakOwner(), toNode(other)->weakOwnerContext());
- writeBarrier(node->slot(), *other);
- *node->slot() = *other;
- return toHandle(node);
-}
-
-inline void HandleHeap::makeWeak(HandleSlot handle, WeakHandleOwner* weakOwner, void* context)
-{
- // Forbid assignment to handles during the finalization phase, since it would violate many GC invariants.
- // File a bug with stack trace if you hit this.
- if (m_nextToFinalize)
- CRASH();
- Node* node = toNode(handle);
- node->makeWeak(weakOwner, context);
-
- SentinelLinkedList<Node>::remove(node);
- if (!*handle || !handle->isCell()) {
- m_immediateList.push(node);
- return;
- }
-
- m_weakList.push(node);
-}
-
-#if !ASSERT_DISABLED
-inline bool HandleHeap::hasWeakOwner(HandleSlot handle, WeakHandleOwner* weakOwner)
-{
- return toNode(handle)->weakOwner() == weakOwner;
-}
-
-inline bool HandleHeap::hasFinalizer(HandleSlot handle)
-{
- return toNode(handle)->weakOwner();
-}
-#endif
-
-inline HandleHeap::Node::Node(HandleHeap* handleHeap)
- : m_handleHeap(handleHeap)
- , m_weakOwner(0)
- , m_weakOwnerContext(0)
- , m_prev(0)
- , m_next(0)
-{
-}
-
-inline HandleHeap::Node::Node(WTF::SentinelTag)
- : m_handleHeap(0)
- , m_weakOwner(0)
- , m_weakOwnerContext(0)
- , m_prev(0)
- , m_next(0)
-{
-}
-
-inline HandleSlot HandleHeap::Node::slot()
-{
- return &m_value;
-}
-
-inline HandleHeap* HandleHeap::Node::handleHeap()
-{
- return m_handleHeap;
-}
-
-inline void HandleHeap::Node::makeWeak(WeakHandleOwner* weakOwner, void* context)
-{
- m_weakOwner = weakOwner ? weakOwner : emptyWeakOwner();
- m_weakOwnerContext = context;
-}
-
-inline bool HandleHeap::Node::isWeak()
-{
- return m_weakOwner; // True for emptyWeakOwner().
-}
-
-inline WeakHandleOwner* HandleHeap::Node::weakOwner()
-{
- return m_weakOwner == emptyWeakOwner() ? 0 : m_weakOwner; // 0 for emptyWeakOwner().
-}
-
-inline void* HandleHeap::Node::weakOwnerContext()
-{
- ASSERT(weakOwner());
- return m_weakOwnerContext;
-}
-
-inline void HandleHeap::Node::setPrev(Node* prev)
-{
- m_prev = prev;
-}
-
-inline HandleHeap::Node* HandleHeap::Node::prev()
-{
- return m_prev;
-}
-
-inline void HandleHeap::Node::setNext(Node* next)
-{
- m_next = next;
-}
-
-inline HandleHeap::Node* HandleHeap::Node::next()
-{
- return m_next;
-}
-
-// Sentinel to indicate that a node is weak, but its owner has no meaningful
-// callbacks. This allows us to optimize by skipping such nodes.
-inline WeakHandleOwner* HandleHeap::Node::emptyWeakOwner()
-{
- return reinterpret_cast<WeakHandleOwner*>(-1);
-}
-
-template<typename Functor> void HandleHeap::forEachStrongHandle(Functor& functor, const HashCountedSet<JSCell*>& skipSet)
-{
- Node* end = m_strongList.end();
- for (Node* node = m_strongList.begin(); node != end; node = node->next()) {
- JSValue value = *node->slot();
- if (!value || !value.isCell())
- continue;
- if (skipSet.contains(value.asCell()))
- continue;
- functor(value.asCell());
- }
-}
-
-}
-
-#endif
diff --git a/Source/JavaScriptCore/heap/HandleHeap.cpp b/Source/JavaScriptCore/heap/HandleSet.cpp
index 2402f7efb..a6ccf29eb 100644
--- a/Source/JavaScriptCore/heap/HandleHeap.cpp
+++ b/Source/JavaScriptCore/heap/HandleSet.cpp
@@ -24,34 +24,21 @@
*/
#include "config.h"
-#include "HandleHeap.h"
+#include "HandleSet.h"
#include "HeapRootVisitor.h"
#include "JSObject.h"
namespace JSC {
-WeakHandleOwner::~WeakHandleOwner()
-{
-}
-
-bool WeakHandleOwner::isReachableFromOpaqueRoots(Handle<Unknown>, void*, SlotVisitor&)
-{
- return false;
-}
-
-void WeakHandleOwner::finalize(Handle<Unknown>, void*)
-{
-}
-
-HandleHeap::HandleHeap(JSGlobalData* globalData)
+HandleSet::HandleSet(JSGlobalData* globalData)
: m_globalData(globalData)
, m_nextToFinalize(0)
{
grow();
}
-void HandleHeap::grow()
+void HandleSet::grow()
{
Node* block = m_blockStack.grow();
for (int i = m_blockStack.blockLength - 1; i >= 0; --i) {
@@ -61,7 +48,7 @@ void HandleHeap::grow()
}
}
-void HandleHeap::visitStrongHandles(HeapRootVisitor& heapRootVisitor)
+void HandleSet::visitStrongHandles(HeapRootVisitor& heapRootVisitor)
{
Node* end = m_strongList.end();
for (Node* node = m_strongList.begin(); node != end; node = node->next()) {
@@ -73,63 +60,7 @@ void HandleHeap::visitStrongHandles(HeapRootVisitor& heapRootVisitor)
}
}
-void HandleHeap::visitWeakHandles(HeapRootVisitor& heapRootVisitor)
-{
- SlotVisitor& visitor = heapRootVisitor.visitor();
-
- Node* end = m_weakList.end();
- for (Node* node = m_weakList.begin(); node != end; node = node->next()) {
-#if ENABLE(GC_VALIDATION)
- if (!isValidWeakNode(node))
- CRASH();
-#endif
- JSCell* cell = node->slot()->asCell();
- if (Heap::isMarked(cell))
- continue;
-
- WeakHandleOwner* weakOwner = node->weakOwner();
- if (!weakOwner)
- continue;
-
- if (!weakOwner->isReachableFromOpaqueRoots(Handle<Unknown>::wrapSlot(node->slot()), node->weakOwnerContext(), visitor))
- continue;
-
- heapRootVisitor.visit(node->slot());
- }
-}
-
-void HandleHeap::finalizeWeakHandles()
-{
- Node* end = m_weakList.end();
- for (Node* node = m_weakList.begin(); node != end; node = m_nextToFinalize) {
- m_nextToFinalize = node->next();
-#if ENABLE(GC_VALIDATION)
- if (!isValidWeakNode(node))
- CRASH();
-#endif
-
- JSCell* cell = node->slot()->asCell();
- if (Heap::isMarked(cell))
- continue;
-
- if (WeakHandleOwner* weakOwner = node->weakOwner()) {
- weakOwner->finalize(Handle<Unknown>::wrapSlot(node->slot()), node->weakOwnerContext());
- if (m_nextToFinalize != node->next()) // Owner deallocated node.
- continue;
- }
-#if ENABLE(GC_VALIDATION)
- if (!isLiveNode(node))
- CRASH();
-#endif
- *node->slot() = JSValue();
- SentinelLinkedList<Node>::remove(node);
- m_immediateList.push(node);
- }
-
- m_nextToFinalize = 0;
-}
-
-void HandleHeap::writeBarrier(HandleSlot slot, const JSValue& value)
+void HandleSet::writeBarrier(HandleSlot slot, const JSValue& value)
{
// Forbid assignment to handles during the finalization phase, since it would violate many GC invariants.
// File a bug with stack trace if you hit this.
@@ -150,15 +81,6 @@ void HandleHeap::writeBarrier(HandleSlot slot, const JSValue& value)
return;
}
- if (node->isWeak()) {
- m_weakList.push(node);
-#if ENABLE(GC_VALIDATION)
- if (!isLiveNode(node))
- CRASH();
-#endif
- return;
- }
-
m_strongList.push(node);
#if ENABLE(GC_VALIDATION)
if (!isLiveNode(node))
@@ -166,7 +88,7 @@ void HandleHeap::writeBarrier(HandleSlot slot, const JSValue& value)
#endif
}
-unsigned HandleHeap::protectedGlobalObjectCount()
+unsigned HandleSet::protectedGlobalObjectCount()
{
unsigned count = 0;
Node* end = m_strongList.end();
@@ -179,7 +101,7 @@ unsigned HandleHeap::protectedGlobalObjectCount()
}
#if ENABLE(GC_VALIDATION) || !ASSERT_DISABLED
-bool HandleHeap::isLiveNode(Node* node)
+bool HandleSet::isLiveNode(Node* node)
{
if (node->prev()->next() != node)
return false;
@@ -188,24 +110,6 @@ bool HandleHeap::isLiveNode(Node* node)
return true;
}
-
-bool HandleHeap::isValidWeakNode(Node* node)
-{
- if (!isLiveNode(node))
- return false;
- if (!node->isWeak())
- return false;
-
- JSValue value = *node->slot();
- if (!value || !value.isCell())
- return false;
-
- JSCell* cell = value.asCell();
- if (!cell || !cell->structure())
- return false;
-
- return true;
-}
#endif
} // namespace JSC
diff --git a/Source/JavaScriptCore/heap/HandleSet.h b/Source/JavaScriptCore/heap/HandleSet.h
new file mode 100644
index 000000000..c22ffa418
--- /dev/null
+++ b/Source/JavaScriptCore/heap/HandleSet.h
@@ -0,0 +1,208 @@
+/*
+ * Copyright (C) 2011 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef HandleSet_h
+#define HandleSet_h
+
+#include <wtf/BlockStack.h>
+#include "Handle.h"
+#include <wtf/HashCountedSet.h>
+#include <wtf/SentinelLinkedList.h>
+#include <wtf/SinglyLinkedList.h>
+
+namespace JSC {
+
+class HandleSet;
+class HeapRootVisitor;
+class JSGlobalData;
+class JSValue;
+class SlotVisitor;
+
+class HandleSet {
+public:
+ static HandleSet* heapFor(HandleSlot);
+
+ HandleSet(JSGlobalData*);
+
+ JSGlobalData* globalData();
+
+ HandleSlot allocate();
+ void deallocate(HandleSlot);
+
+ void visitStrongHandles(HeapRootVisitor&);
+
+ JS_EXPORT_PRIVATE void writeBarrier(HandleSlot, const JSValue&);
+
+ unsigned protectedGlobalObjectCount();
+
+ template<typename Functor> void forEachStrongHandle(Functor&, const HashCountedSet<JSCell*>& skipSet);
+
+private:
+ class Node {
+ public:
+ Node(WTF::SentinelTag);
+ Node(HandleSet*);
+
+ HandleSlot slot();
+ HandleSet* handleSet();
+
+ void setPrev(Node*);
+ Node* prev();
+
+ void setNext(Node*);
+ Node* next();
+
+ private:
+ JSValue m_value;
+ HandleSet* m_handleSet;
+ Node* m_prev;
+ Node* m_next;
+ };
+
+ static HandleSlot toHandle(Node*);
+ static Node* toNode(HandleSlot);
+
+ JS_EXPORT_PRIVATE void grow();
+
+#if ENABLE(GC_VALIDATION) || !ASSERT_DISABLED
+ bool isLiveNode(Node*);
+#endif
+
+ JSGlobalData* m_globalData;
+ BlockStack<Node> m_blockStack;
+
+ SentinelLinkedList<Node> m_strongList;
+ SentinelLinkedList<Node> m_immediateList;
+ SinglyLinkedList<Node> m_freeList;
+ Node* m_nextToFinalize;
+};
+
+inline HandleSet* HandleSet::heapFor(HandleSlot handle)
+{
+ return toNode(handle)->handleSet();
+}
+
+inline JSGlobalData* HandleSet::globalData()
+{
+ return m_globalData;
+}
+
+inline HandleSlot HandleSet::toHandle(Node* node)
+{
+ return reinterpret_cast<HandleSlot>(node);
+}
+
+inline HandleSet::Node* HandleSet::toNode(HandleSlot handle)
+{
+ return reinterpret_cast<Node*>(handle);
+}
+
+inline HandleSlot HandleSet::allocate()
+{
+ // Forbid assignment to handles during the finalization phase, since it would violate many GC invariants.
+ // File a bug with stack trace if you hit this.
+ if (m_nextToFinalize)
+ CRASH();
+ if (m_freeList.isEmpty())
+ grow();
+
+ Node* node = m_freeList.pop();
+ new (NotNull, node) Node(this);
+ m_immediateList.push(node);
+ return toHandle(node);
+}
+
+inline void HandleSet::deallocate(HandleSlot handle)
+{
+ Node* node = toNode(handle);
+ if (node == m_nextToFinalize) {
+ ASSERT(m_nextToFinalize->next());
+ m_nextToFinalize = m_nextToFinalize->next();
+ }
+
+ SentinelLinkedList<Node>::remove(node);
+ m_freeList.push(node);
+}
+
+inline HandleSet::Node::Node(HandleSet* handleSet)
+ : m_handleSet(handleSet)
+ , m_prev(0)
+ , m_next(0)
+{
+}
+
+inline HandleSet::Node::Node(WTF::SentinelTag)
+ : m_handleSet(0)
+ , m_prev(0)
+ , m_next(0)
+{
+}
+
+inline HandleSlot HandleSet::Node::slot()
+{
+ return &m_value;
+}
+
+inline HandleSet* HandleSet::Node::handleSet()
+{
+ return m_handleSet;
+}
+
+inline void HandleSet::Node::setPrev(Node* prev)
+{
+ m_prev = prev;
+}
+
+inline HandleSet::Node* HandleSet::Node::prev()
+{
+ return m_prev;
+}
+
+inline void HandleSet::Node::setNext(Node* next)
+{
+ m_next = next;
+}
+
+inline HandleSet::Node* HandleSet::Node::next()
+{
+ return m_next;
+}
+
+template<typename Functor> void HandleSet::forEachStrongHandle(Functor& functor, const HashCountedSet<JSCell*>& skipSet)
+{
+ Node* end = m_strongList.end();
+ for (Node* node = m_strongList.begin(); node != end; node = node->next()) {
+ JSValue value = *node->slot();
+ if (!value || !value.isCell())
+ continue;
+ if (skipSet.contains(value.asCell()))
+ continue;
+ functor(value.asCell());
+ }
+}
+
+}
+
+#endif
diff --git a/Source/JavaScriptCore/heap/Heap.cpp b/Source/JavaScriptCore/heap/Heap.cpp
index a5d4a063f..d0dbc3172 100644
--- a/Source/JavaScriptCore/heap/Heap.cpp
+++ b/Source/JavaScriptCore/heap/Heap.cpp
@@ -33,6 +33,7 @@
#include "JSLock.h"
#include "JSONObject.h"
#include "Tracing.h"
+#include "WeakSetInlines.h"
#include <algorithm>
#include <wtf/CurrentTime.h>
@@ -312,144 +313,65 @@ inline PassOwnPtr<TypeCountSet> RecordType::returnValue()
Heap::Heap(JSGlobalData* globalData, HeapSize heapSize)
: m_heapSize(heapSize)
, m_minBytesPerCycle(heapSizeForHint(heapSize))
- , m_lastFullGCSize(0)
- , m_waterMark(0)
- , m_highWaterMark(m_minBytesPerCycle)
+ , m_sizeAfterLastCollect(0)
+ , m_bytesAllocatedLimit(m_minBytesPerCycle)
+ , m_bytesAllocated(0)
+ , m_bytesAbandoned(0)
, m_operationInProgress(NoOperation)
, m_objectSpace(this)
, m_storageSpace(this)
- , m_blockFreeingThreadShouldQuit(false)
- , m_extraCost(0)
, m_markListSet(0)
, m_activityCallback(DefaultGCActivityCallback::create(this))
, m_machineThreads(this)
, m_sharedData(globalData)
, m_slotVisitor(m_sharedData)
- , m_handleHeap(globalData)
+ , m_weakSet(this)
+ , m_handleSet(globalData)
, m_isSafeToCollect(false)
, m_globalData(globalData)
, m_lastGCLength(0)
+ , m_lastCodeDiscardTime(WTF::currentTime())
{
- (*m_activityCallback)();
- m_numberOfFreeBlocks = 0;
- m_blockFreeingThread = createThread(blockFreeingThreadStartFunc, this, "JavaScriptCore::BlockFree");
-
- ASSERT(m_blockFreeingThread);
m_storageSpace.init();
}
Heap::~Heap()
{
- // Destroy our block freeing thread.
- {
- MutexLocker locker(m_freeBlockLock);
- m_blockFreeingThreadShouldQuit = true;
- m_freeBlockCondition.broadcast();
- }
- waitForThreadCompletion(m_blockFreeingThread);
+ delete m_markListSet;
+
+ m_objectSpace.shrink();
+ m_storageSpace.freeAllBlocks();
- // The destroy function must already have been called, so assert this.
- ASSERT(!m_globalData);
+ ASSERT(!size());
+ ASSERT(!capacity());
}
-void Heap::destroy()
+bool Heap::isPagedOut(double deadline)
{
- JSLock lock(SilenceAssertionsOnly);
-
- if (!m_globalData)
- return;
+ return m_objectSpace.isPagedOut(deadline) || m_storageSpace.isPagedOut(deadline);
+}
+// The JSGlobalData is being destroyed and the collector will never run again.
+// Run all pending finalizers now because we won't get another chance.
+void Heap::lastChanceToFinalize()
+{
ASSERT(!m_globalData->dynamicGlobalObject);
ASSERT(m_operationInProgress == NoOperation);
-
- // The global object is not GC protected at this point, so sweeping may delete it
- // (and thus the global data) before other objects that may use the global data.
- RefPtr<JSGlobalData> protect(m_globalData);
-#if ENABLE(JIT)
- m_globalData->jitStubs->clearHostFunctionStubs();
-#endif
-
- delete m_markListSet;
- m_markListSet = 0;
+ // FIXME: Make this a release-mode crash once we're sure no one's doing this.
+ if (size_t size = m_protectedValues.size())
+ WTFLogAlways("ERROR: JavaScriptCore heap deallocated while %ld values were still protected", static_cast<unsigned long>(size));
+ m_weakSet.finalizeAll();
canonicalizeCellLivenessData();
clearMarks();
-
- m_handleHeap.finalizeWeakHandles();
+ sweep();
m_globalData->smallStrings.finalizeSmallStrings();
- shrink();
- ASSERT(!size());
-
+
#if ENABLE(SIMPLE_HEAP_PROFILING)
m_slotVisitor.m_visitedTypeCounts.dump(WTF::dataFile(), "Visited Type Counts");
m_destroyedTypeCounts.dump(WTF::dataFile(), "Destroyed Type Counts");
#endif
-
- releaseFreeBlocks();
-
- m_globalData = 0;
-}
-
-void Heap::waitForRelativeTimeWhileHoldingLock(double relative)
-{
- if (m_blockFreeingThreadShouldQuit)
- return;
- m_freeBlockCondition.timedWait(m_freeBlockLock, currentTime() + relative);
-}
-
-void Heap::waitForRelativeTime(double relative)
-{
- // If this returns early, that's fine, so long as it doesn't do it too
- // frequently. It would only be a bug if this function failed to return
- // when it was asked to do so.
-
- MutexLocker locker(m_freeBlockLock);
- waitForRelativeTimeWhileHoldingLock(relative);
-}
-
-void Heap::blockFreeingThreadStartFunc(void* heap)
-{
- static_cast<Heap*>(heap)->blockFreeingThreadMain();
-}
-
-void Heap::blockFreeingThreadMain()
-{
- while (!m_blockFreeingThreadShouldQuit) {
- // Generally wait for one second before scavenging free blocks. This
- // may return early, particularly when we're being asked to quit.
- waitForRelativeTime(1.0);
- if (m_blockFreeingThreadShouldQuit)
- break;
-
- // Now process the list of free blocks. Keep freeing until half of the
- // blocks that are currently on the list are gone. Assume that a size_t
- // field can be accessed atomically.
- size_t currentNumberOfFreeBlocks = m_numberOfFreeBlocks;
- if (!currentNumberOfFreeBlocks)
- continue;
-
- size_t desiredNumberOfFreeBlocks = currentNumberOfFreeBlocks / 2;
-
- while (!m_blockFreeingThreadShouldQuit) {
- MarkedBlock* block;
- {
- MutexLocker locker(m_freeBlockLock);
- if (m_numberOfFreeBlocks <= desiredNumberOfFreeBlocks)
- block = 0;
- else {
- block = static_cast<MarkedBlock*>(m_freeBlocks.removeHead());
- ASSERT(block);
- m_numberOfFreeBlocks--;
- }
- }
-
- if (!block)
- break;
-
- MarkedBlock::destroy(block);
- }
- }
}
void Heap::reportExtraMemoryCostSlowCase(size_t cost)
@@ -465,9 +387,28 @@ void Heap::reportExtraMemoryCostSlowCase(size_t cost)
// if a large value survives one garbage collection, there is not much point to
// collecting more frequently as long as it stays alive.
- if (m_extraCost > maxExtraCost && m_extraCost > highWaterMark() / 2)
- collectAllGarbage();
- m_extraCost += cost;
+ didAllocate(cost);
+ if (shouldCollect())
+ collect(DoNotSweep);
+}
+
+void Heap::reportAbandonedObjectGraph()
+{
+ // Our clients don't know exactly how much memory they
+ // are abandoning so we just guess for them.
+ double abandonedBytes = 0.10 * m_sizeAfterLastCollect;
+
+ // We want to accelerate the next collection. Because memory has just
+ // been abandoned, the next collection has the potential to
+ // be more profitable. Since allocation is the trigger for collection,
+ // we hasten the next collection by pretending that we've allocated more memory.
+ didAbandon(abandonedBytes);
+}
+
+void Heap::didAbandon(size_t bytes)
+{
+ m_activityCallback->didAllocate(m_bytesAllocated + m_bytesAbandoned);
+ m_bytesAbandoned += bytes;
}
void Heap::protect(JSValue k)
@@ -662,7 +603,7 @@ void Heap::markRoots(bool fullGC)
{
GCPHASE(VisitStrongHandles);
- m_handleHeap.visitStrongHandles(heapRootVisitor);
+ m_handleSet.visitStrongHandles(heapRootVisitor);
visitor.donateAndDrain();
}
@@ -686,12 +627,12 @@ void Heap::markRoots(bool fullGC)
#endif
}
- // Weak handles must be marked last, because their owners use the set of
- // opaque roots to determine reachability.
+ // Weak references must be marked last because their liveness depends on
+ // the liveness of the rest of the object graph.
{
- GCPHASE(VisitingWeakHandles);
+ GCPHASE(VisitingLiveWeakHandles);
while (true) {
- m_handleHeap.visitWeakHandles(heapRootVisitor);
+ m_weakSet.visitLiveWeakImpls(heapRootVisitor);
harvestWeakReferences();
if (visitor.isEmpty())
break;
@@ -704,6 +645,12 @@ void Heap::markRoots(bool fullGC)
}
}
}
+
+ {
+ GCPHASE(VisitingDeadWeakHandles);
+ m_weakSet.visitDeadWeakImpls(heapRootVisitor);
+ }
+
GCCOUNTER(VisitedValueCount, visitor.visitCount());
visitor.doneCopying();
@@ -731,12 +678,12 @@ size_t Heap::objectCount()
size_t Heap::size()
{
- return m_objectSpace.forEachBlock<Size>();
+ return m_objectSpace.forEachBlock<Size>() + m_storageSpace.size();
}
size_t Heap::capacity()
{
- return m_objectSpace.forEachBlock<Capacity>();
+ return m_objectSpace.forEachBlock<Capacity>() + m_storageSpace.capacity();
}
size_t Heap::protectedGlobalObjectCount()
@@ -764,16 +711,27 @@ PassOwnPtr<TypeCountSet> Heap::objectTypeCounts()
return m_objectSpace.forEachCell<RecordType>();
}
+void Heap::discardAllCompiledCode()
+{
+ // If JavaScript is running, it's not safe to recompile, since we'll end
+ // up throwing away code that is live on the stack.
+ if (m_globalData->dynamicGlobalObject)
+ return;
+
+ for (FunctionExecutable* current = m_functions.head(); current; current = current->next())
+ current->discardCode();
+}
+
void Heap::collectAllGarbage()
{
if (!m_isSafeToCollect)
return;
- if (!m_globalData->dynamicGlobalObject)
- m_globalData->recompileAllJSFunctions();
collect(DoSweep);
}
+static double minute = 60.0;
+
void Heap::collect(SweepToggle sweepToggle)
{
SamplingRegion samplingRegion("Garbage Collection");
@@ -782,11 +740,19 @@ void Heap::collect(SweepToggle sweepToggle)
ASSERT(globalData()->identifierTable == wtfThreadData().currentIdentifierTable());
ASSERT(m_isSafeToCollect);
JAVASCRIPTCORE_GC_BEGIN();
+
+ m_activityCallback->willCollect();
+
double lastGCStartTime = WTF::currentTime();
+ if (lastGCStartTime - m_lastCodeDiscardTime > minute) {
+ discardAllCompiledCode();
+ m_lastCodeDiscardTime = WTF::currentTime();
+ }
+
#if ENABLE(GGC)
bool fullGC = sweepToggle == DoSweep;
if (!fullGC)
- fullGC = (capacity() > 4 * m_lastFullGCSize);
+ fullGC = (capacity() > 4 * m_sizeAfterLastCollect);
#else
bool fullGC = true;
#endif
@@ -804,7 +770,7 @@ void Heap::collect(SweepToggle sweepToggle)
{
GCPHASE(FinalizeWeakHandles);
- m_handleHeap.finalizeWeakHandles();
+ m_weakSet.sweep();
m_globalData->smallStrings.finalizeSmallStrings();
}
@@ -824,24 +790,25 @@ void Heap::collect(SweepToggle sweepToggle)
SamplingRegion samplingRegion("Garbage Collection: Sweeping");
GCPHASE(Sweeping);
sweep();
- shrink();
+ m_objectSpace.shrink();
+ m_weakSet.shrink();
+ m_bytesAbandoned = 0;
}
- // To avoid pathological GC churn in large heaps, we set the allocation high
- // water mark to be proportional to the current size of the heap. The exact
- // proportion is a bit arbitrary. A 2X multiplier gives a 1:1 (heap size :
+ // To avoid pathological GC churn in large heaps, we set the new allocation
+ // limit to be the current size of the heap. This heuristic
+ // is a bit arbitrary. Using the current size of the heap after this
+ // collection gives us a 2X multiplier, which is a 1:1 (heap size :
// new bytes allocated) proportion, and seems to work well in benchmarks.
- size_t newSize = size() + m_storageSpace.totalMemoryUtilized();
- size_t proportionalBytes = 2 * newSize;
+ size_t newSize = size();
if (fullGC) {
- m_lastFullGCSize = newSize;
- setHighWaterMark(max(proportionalBytes, m_minBytesPerCycle));
+ m_sizeAfterLastCollect = newSize;
+ m_bytesAllocatedLimit = max(newSize, m_minBytesPerCycle);
}
+ m_bytesAllocated = 0;
double lastGCEndTime = WTF::currentTime();
m_lastGCLength = lastGCEndTime - lastGCStartTime;
JAVASCRIPTCORE_GC_END();
-
- (*m_activityCallback)();
}
void Heap::canonicalizeCellLivenessData()
@@ -851,8 +818,8 @@ void Heap::canonicalizeCellLivenessData()
void Heap::resetAllocators()
{
- m_extraCost = 0;
m_objectSpace.resetAllocators();
+ m_weakSet.resetAllocator();
}
void Heap::setActivityCallback(PassOwnPtr<GCActivityCallback> activityCallback)
@@ -865,6 +832,12 @@ GCActivityCallback* Heap::activityCallback()
return m_activityCallback.get();
}
+void Heap::didAllocate(size_t bytes)
+{
+ m_activityCallback->didAllocate(m_bytesAllocated + m_bytesAbandoned);
+ m_bytesAllocated += bytes;
+}
+
bool Heap::isValidAllocation(size_t bytes)
{
if (!isValidThreadState(m_globalData))
@@ -879,49 +852,27 @@ bool Heap::isValidAllocation(size_t bytes)
return true;
}
-void Heap::freeBlocks(MarkedBlock* head)
-{
- m_objectSpace.freeBlocks(head);
-}
-
-void Heap::shrink()
+void Heap::addFinalizer(JSCell* cell, Finalizer finalizer)
{
- m_objectSpace.shrink();
+ WeakSet::allocate(cell, &m_finalizerOwner, reinterpret_cast<void*>(finalizer)); // Balanced by FinalizerOwner::finalize().
}
-void Heap::releaseFreeBlocks()
+void Heap::FinalizerOwner::finalize(Handle<Unknown> handle, void* context)
{
- while (true) {
- MarkedBlock* block;
- {
- MutexLocker locker(m_freeBlockLock);
- if (!m_numberOfFreeBlocks)
- block = 0;
- else {
- block = static_cast<MarkedBlock*>(m_freeBlocks.removeHead());
- ASSERT(block);
- m_numberOfFreeBlocks--;
- }
- }
-
- if (!block)
- break;
-
- MarkedBlock::destroy(block);
- }
+ HandleSlot slot = handle.slot();
+ Finalizer finalizer = reinterpret_cast<Finalizer>(context);
+ finalizer(slot->asCell());
+ WeakSet::deallocate(WeakImpl::asWeakImpl(slot));
}
-void Heap::addFinalizer(JSCell* cell, Finalizer finalizer)
+void Heap::addFunctionExecutable(FunctionExecutable* executable)
{
- Weak<JSCell> weak(*globalData(), cell, &m_finalizerOwner, reinterpret_cast<void*>(finalizer));
- weak.leakHandle(); // Balanced by FinalizerOwner::finalize().
+ m_functions.append(executable);
}
-void Heap::FinalizerOwner::finalize(Handle<Unknown> handle, void* context)
+void Heap::removeFunctionExecutable(FunctionExecutable* executable)
{
- Weak<JSCell> weak(Weak<JSCell>::Adopt, handle);
- Finalizer finalizer = reinterpret_cast<Finalizer>(context);
- finalizer(weak.get());
+ m_functions.remove(executable);
}
} // namespace JSC
diff --git a/Source/JavaScriptCore/heap/Heap.h b/Source/JavaScriptCore/heap/Heap.h
index 09a95882b..10fdb07be 100644
--- a/Source/JavaScriptCore/heap/Heap.h
+++ b/Source/JavaScriptCore/heap/Heap.h
@@ -22,17 +22,18 @@
#ifndef Heap_h
#define Heap_h
+#include "BlockAllocator.h"
#include "DFGCodeBlocks.h"
-#include "HandleHeap.h"
+#include "HandleSet.h"
#include "HandleStack.h"
#include "MarkedAllocator.h"
#include "MarkedBlock.h"
#include "MarkedBlockSet.h"
#include "MarkedSpace.h"
#include "SlotVisitor.h"
+#include "WeakHandleOwner.h"
+#include "WeakSet.h"
#include "WriteBarrierSupport.h"
-#include <wtf/DoublyLinkedList.h>
-#include <wtf/Forward.h>
#include <wtf/HashCountedSet.h>
#include <wtf/HashSet.h>
@@ -42,6 +43,7 @@ namespace JSC {
class CopiedSpace;
class CodeBlock;
+ class FunctionExecutable;
class GCActivityCallback;
class GlobalCodeBlock;
class Heap;
@@ -71,8 +73,14 @@ namespace JSC {
public:
friend class JIT;
friend class MarkStackThreadSharedData;
- static Heap* heap(JSValue); // 0 for immediate values
- static Heap* heap(JSCell*);
+ static Heap* heap(const JSValue); // 0 for immediate values
+ static Heap* heap(const JSCell*);
+
+ // This constant determines how many blocks we iterate between checks of our
+ // deadline when calling Heap::isPagedOut. Decreasing it will cause us to detect
+ // overstepping our deadline more quickly, while increasing it will cause
+ // our scan to run faster.
+ static const unsigned s_timeCheckResolution = 16;
static bool isMarked(const void*);
static bool testAndSetMarked(const void*);
@@ -84,7 +92,7 @@ namespace JSC {
Heap(JSGlobalData*, HeapSize);
~Heap();
- JS_EXPORT_PRIVATE void destroy(); // JSGlobalData must call destroy() before ~Heap().
+ JS_EXPORT_PRIVATE void lastChanceToFinalize();
JSGlobalData* globalData() const { return m_globalData; }
MarkedSpace& objectSpace() { return m_objectSpace; }
@@ -105,11 +113,18 @@ namespace JSC {
typedef void (*Finalizer)(JSCell*);
JS_EXPORT_PRIVATE void addFinalizer(JSCell*, Finalizer);
+ void addFunctionExecutable(FunctionExecutable*);
+ void removeFunctionExecutable(FunctionExecutable*);
void notifyIsSafeToCollect() { m_isSafeToCollect = true; }
+
JS_EXPORT_PRIVATE void collectAllGarbage();
+ enum SweepToggle { DoNotSweep, DoSweep };
+ bool shouldCollect();
+ void collect(SweepToggle);
void reportExtraMemoryCost(size_t cost);
+ JS_EXPORT_PRIVATE void reportAbandonedObjectGraph();
JS_EXPORT_PRIVATE void protect(JSValue);
JS_EXPORT_PRIVATE bool unprotect(JSValue); // True when the protect count drops to 0.
@@ -133,12 +148,21 @@ namespace JSC {
template<typename Functor> typename Functor::ReturnType forEachProtectedCell(Functor&);
template<typename Functor> typename Functor::ReturnType forEachProtectedCell();
- HandleHeap* handleHeap() { return &m_handleHeap; }
+ WeakSet* weakSet() { return &m_weakSet; }
+ HandleSet* handleSet() { return &m_handleSet; }
HandleStack* handleStack() { return &m_handleStack; }
void getConservativeRegisterRoots(HashSet<JSCell*>& roots);
double lastGCLength() { return m_lastGCLength; }
+ void increaseLastGCLength(double amount) { m_lastGCLength += amount; }
+
+ JS_EXPORT_PRIVATE void discardAllCompiledCode();
+
+ void didAllocate(size_t);
+ void didAbandon(size_t);
+
+ bool isPagedOut(double deadline);
private:
friend class CodeBlock;
@@ -153,10 +177,6 @@ namespace JSC {
void* allocateWithDestructor(size_t);
void* allocateWithoutDestructor(size_t);
- size_t waterMark();
- size_t highWaterMark();
- void setHighWaterMark(size_t);
-
static const size_t minExtraCost = 256;
static const size_t maxExtraCost = 1024 * 1024;
@@ -173,7 +193,6 @@ namespace JSC {
void canonicalizeCellLivenessData();
void resetAllocators();
- void freeBlocks(MarkedBlock*);
void clearMarks();
void markRoots(bool fullGC);
@@ -182,43 +201,29 @@ namespace JSC {
void harvestWeakReferences();
void finalizeUnconditionalFinalizers();
- enum SweepToggle { DoNotSweep, DoSweep };
- void collect(SweepToggle);
- void shrink();
- void releaseFreeBlocks();
void sweep();
RegisterFile& registerFile();
+ BlockAllocator& blockAllocator();
- void waitForRelativeTimeWhileHoldingLock(double relative);
- void waitForRelativeTime(double relative);
- void blockFreeingThreadMain();
- static void blockFreeingThreadStartFunc(void* heap);
-
const HeapSize m_heapSize;
const size_t m_minBytesPerCycle;
- size_t m_lastFullGCSize;
- size_t m_waterMark;
- size_t m_highWaterMark;
+ size_t m_sizeAfterLastCollect;
+
+ size_t m_bytesAllocatedLimit;
+ size_t m_bytesAllocated;
+ size_t m_bytesAbandoned;
OperationInProgress m_operationInProgress;
MarkedSpace m_objectSpace;
CopiedSpace m_storageSpace;
- DoublyLinkedList<HeapBlock> m_freeBlocks;
- size_t m_numberOfFreeBlocks;
-
- ThreadIdentifier m_blockFreeingThread;
- Mutex m_freeBlockLock;
- ThreadCondition m_freeBlockCondition;
- bool m_blockFreeingThreadShouldQuit;
+ BlockAllocator m_blockAllocator;
#if ENABLE(SIMPLE_HEAP_PROFILING)
VTableSpectrum m_destroyedTypeCounts;
#endif
- size_t m_extraCost;
-
ProtectCountSet m_protectedValues;
Vector<Vector<ValueStringPair>* > m_tempSortingVectors;
HashSet<MarkedArgumentBuffer*>* m_markListSet;
@@ -230,7 +235,8 @@ namespace JSC {
MarkStackThreadSharedData m_sharedData;
SlotVisitor m_slotVisitor;
- HandleHeap m_handleHeap;
+ WeakSet m_weakSet;
+ HandleSet m_handleSet;
HandleStack m_handleStack;
DFGCodeBlocks m_dfgCodeBlocks;
FinalizerOwner m_finalizerOwner;
@@ -239,19 +245,31 @@ namespace JSC {
JSGlobalData* m_globalData;
double m_lastGCLength;
+ double m_lastCodeDiscardTime;
+
+ DoublyLinkedList<FunctionExecutable> m_functions;
};
+ inline bool Heap::shouldCollect()
+ {
+#if ENABLE(GGC)
+ return m_objectSpace.nurseryWaterMark() >= m_minBytesPerCycle && m_isSafeToCollect;
+#else
+ return m_bytesAllocated > m_bytesAllocatedLimit && m_isSafeToCollect;
+#endif
+ }
+
bool Heap::isBusy()
{
return m_operationInProgress != NoOperation;
}
- inline Heap* Heap::heap(JSCell* cell)
+ inline Heap* Heap::heap(const JSCell* cell)
{
return MarkedBlock::blockFor(cell)->heap();
}
- inline Heap* Heap::heap(JSValue v)
+ inline Heap* Heap::heap(const JSValue v)
{
if (!v.isCell())
return 0;
@@ -273,21 +291,6 @@ namespace JSC {
MarkedBlock::blockFor(cell)->setMarked(cell);
}
- inline size_t Heap::waterMark()
- {
- return m_objectSpace.waterMark() + m_storageSpace.totalMemoryUtilized();
- }
-
- inline size_t Heap::highWaterMark()
- {
- return m_highWaterMark;
- }
-
- inline void Heap::setHighWaterMark(size_t newHighWaterMark)
- {
- m_highWaterMark = newHighWaterMark;
- }
-
#if ENABLE(GGC)
inline uint8_t* Heap::addressOfCardFor(JSCell* cell)
{
@@ -334,7 +337,7 @@ namespace JSC {
ProtectCountSet::iterator end = m_protectedValues.end();
for (ProtectCountSet::iterator it = m_protectedValues.begin(); it != end; ++it)
functor(it->first);
- m_handleHeap.forEachStrongHandle(functor, m_protectedValues);
+ m_handleSet.forEachStrongHandle(functor, m_protectedValues);
return functor.returnValue();
}
@@ -367,6 +370,11 @@ namespace JSC {
return m_storageSpace.tryReallocate(ptr, oldSize, newSize);
}
+ inline BlockAllocator& Heap::blockAllocator()
+ {
+ return m_blockAllocator;
+ }
+
} // namespace JSC
#endif // Heap_h
diff --git a/Source/JavaScriptCore/heap/Local.h b/Source/JavaScriptCore/heap/Local.h
index afcfe42b8..5d1f06439 100644
--- a/Source/JavaScriptCore/heap/Local.h
+++ b/Source/JavaScriptCore/heap/Local.h
@@ -102,7 +102,7 @@ template <typename T, unsigned inlineCapacity = 0> class LocalStack {
typedef typename Handle<T>::ExternalType ExternalType;
public:
LocalStack(JSGlobalData& globalData)
- : m_globalData(&globalData)
+ : m_globalData(globalData)
, m_count(0)
{
}
@@ -122,7 +122,7 @@ public:
void push(ExternalType value)
{
if (m_count == m_stack.size())
- m_stack.append(Local<T>(*m_globalData, value));
+ m_stack.append(Local<T>(m_globalData, value));
else
m_stack[m_count] = value;
m_count++;
@@ -132,7 +132,7 @@ public:
unsigned size() const { return m_count; }
private:
- RefPtr<JSGlobalData> m_globalData;
+ JSGlobalData& m_globalData;
Vector<Local<T>, inlineCapacity> m_stack;
unsigned m_count;
};
diff --git a/Source/JavaScriptCore/heap/MachineStackMarker.cpp b/Source/JavaScriptCore/heap/MachineStackMarker.cpp
index fd828d5de..30915eaf8 100644
--- a/Source/JavaScriptCore/heap/MachineStackMarker.cpp
+++ b/Source/JavaScriptCore/heap/MachineStackMarker.cpp
@@ -96,6 +96,7 @@ typedef HANDLE PlatformThread;
typedef pthread_t PlatformThread;
static const int SigThreadSuspendResume = SIGUSR2;
+#if defined(SA_RESTART)
static void pthreadSignalHandlerSuspendResume(int signo)
{
sigset_t signalSet;
@@ -104,6 +105,7 @@ static void pthreadSignalHandlerSuspendResume(int signo)
sigsuspend(&signalSet);
}
#endif
+#endif
class MachineThreads::Thread {
public:
diff --git a/Source/JavaScriptCore/heap/MarkStack.cpp b/Source/JavaScriptCore/heap/MarkStack.cpp
index 129a7ab67..cf6e3513c 100644
--- a/Source/JavaScriptCore/heap/MarkStack.cpp
+++ b/Source/JavaScriptCore/heap/MarkStack.cpp
@@ -486,16 +486,6 @@ void* SlotVisitor::allocateNewSpace(void* ptr, size_t bytes)
return CopiedSpace::allocateFromBlock(m_copyBlock, bytes);
}
-void SlotVisitor::copy(void** ptr, size_t bytes)
-{
- void* newPtr = 0;
- if (!(newPtr = allocateNewSpace(*ptr, bytes)))
- return;
-
- memcpy(newPtr, *ptr, bytes);
- *ptr = newPtr;
-}
-
void SlotVisitor::copyAndAppend(void** ptr, size_t bytes, JSValue* values, unsigned length)
{
void* oldPtr = *ptr;
@@ -503,7 +493,7 @@ void SlotVisitor::copyAndAppend(void** ptr, size_t bytes, JSValue* values, unsig
if (newPtr) {
size_t jsValuesOffset = static_cast<size_t>(reinterpret_cast<char*>(values) - static_cast<char*>(oldPtr));
- JSValue* newValues = reinterpret_cast<JSValue*>(static_cast<char*>(newPtr) + jsValuesOffset);
+ JSValue* newValues = reinterpret_cast_ptr<JSValue*>(static_cast<char*>(newPtr) + jsValuesOffset);
for (unsigned i = 0; i < length; i++) {
JSValue& value = values[i];
newValues[i] = value;
diff --git a/Source/JavaScriptCore/heap/MarkedAllocator.cpp b/Source/JavaScriptCore/heap/MarkedAllocator.cpp
index eb6d2c691..b5e5fff77 100644
--- a/Source/JavaScriptCore/heap/MarkedAllocator.cpp
+++ b/Source/JavaScriptCore/heap/MarkedAllocator.cpp
@@ -1,29 +1,48 @@
#include "config.h"
#include "MarkedAllocator.h"
+#include "GCActivityCallback.h"
#include "Heap.h"
+#include <wtf/CurrentTime.h>
namespace JSC {
+bool MarkedAllocator::isPagedOut(double deadline)
+{
+ unsigned itersSinceLastTimeCheck = 0;
+ HeapBlock* block = m_blockList.head();
+ while (block) {
+ block = block->next();
+ ++itersSinceLastTimeCheck;
+ if (itersSinceLastTimeCheck >= Heap::s_timeCheckResolution) {
+ double currentTime = WTF::monotonicallyIncreasingTime();
+ if (currentTime > deadline)
+ return true;
+ itersSinceLastTimeCheck = 0;
+ }
+ }
+
+ return false;
+}
+
inline void* MarkedAllocator::tryAllocateHelper()
{
- MarkedBlock::FreeCell* firstFreeCell = m_firstFreeCell;
- if (!firstFreeCell) {
+ if (!m_freeList.head) {
for (MarkedBlock*& block = m_currentBlock; block; block = static_cast<MarkedBlock*>(block->next())) {
- firstFreeCell = block->sweep(MarkedBlock::SweepToFreeList);
- if (firstFreeCell)
+ m_freeList = block->sweep(MarkedBlock::SweepToFreeList);
+ if (m_freeList.head)
break;
- m_markedSpace->didConsumeFreeList(block);
block->didConsumeFreeList();
}
- if (!firstFreeCell)
+ if (!m_freeList.head)
return 0;
}
- ASSERT(firstFreeCell);
- m_firstFreeCell = firstFreeCell->next;
- return firstFreeCell;
+ MarkedBlock::FreeCell* head = m_freeList.head;
+ m_freeList.head = head->next;
+ ASSERT(head);
+ return head;
}
inline void* MarkedAllocator::tryAllocate()
@@ -41,6 +60,9 @@ void* MarkedAllocator::allocateSlowCase()
ASSERT(m_heap->m_operationInProgress == NoOperation);
#endif
+ ASSERT(!m_freeList.head);
+ m_heap->didAllocate(m_freeList.bytes);
+
void* result = tryAllocate();
if (LIKELY(result != 0))
@@ -48,16 +70,10 @@ void* MarkedAllocator::allocateSlowCase()
AllocationEffort allocationEffort;
- if ((
-#if ENABLE(GGC)
- nurseryWaterMark() < m_heap->m_minBytesPerCycle
-#else
- m_heap->waterMark() < m_heap->highWaterMark()
-#endif
- ) || !m_heap->m_isSafeToCollect)
- allocationEffort = AllocationMustSucceed;
- else
+ if (m_heap->shouldCollect())
allocationEffort = AllocationCanFail;
+ else
+ allocationEffort = AllocationMustSucceed;
MarkedBlock* block = allocateBlock(allocationEffort);
if (block) {
@@ -74,7 +90,7 @@ void* MarkedAllocator::allocateSlowCase()
if (result)
return result;
- ASSERT(m_heap->waterMark() < m_heap->highWaterMark());
+ ASSERT(!m_heap->shouldCollect());
addBlock(allocateBlock(AllocationMustSucceed));
@@ -85,17 +101,7 @@ void* MarkedAllocator::allocateSlowCase()
MarkedBlock* MarkedAllocator::allocateBlock(AllocationEffort allocationEffort)
{
- MarkedBlock* block;
-
- {
- MutexLocker locker(m_heap->m_freeBlockLock);
- if (m_heap->m_numberOfFreeBlocks) {
- block = static_cast<MarkedBlock*>(m_heap->m_freeBlocks.removeHead());
- ASSERT(block);
- m_heap->m_numberOfFreeBlocks--;
- } else
- block = 0;
- }
+ MarkedBlock* block = static_cast<MarkedBlock*>(m_heap->blockAllocator().allocate());
if (block)
block = MarkedBlock::recycle(block, m_heap, m_cellSize, m_cellsNeedDestruction);
else if (allocationEffort == AllocationCanFail)
@@ -111,11 +117,11 @@ MarkedBlock* MarkedAllocator::allocateBlock(AllocationEffort allocationEffort)
void MarkedAllocator::addBlock(MarkedBlock* block)
{
ASSERT(!m_currentBlock);
- ASSERT(!m_firstFreeCell);
+ ASSERT(!m_freeList.head);
m_blockList.append(block);
m_currentBlock = block;
- m_firstFreeCell = block->sweep(MarkedBlock::SweepToFreeList);
+ m_freeList = block->sweep(MarkedBlock::SweepToFreeList);
}
void MarkedAllocator::removeBlock(MarkedBlock* block)
diff --git a/Source/JavaScriptCore/heap/MarkedAllocator.h b/Source/JavaScriptCore/heap/MarkedAllocator.h
index 1c6af77a2..8ad7e925f 100644
--- a/Source/JavaScriptCore/heap/MarkedAllocator.h
+++ b/Source/JavaScriptCore/heap/MarkedAllocator.h
@@ -32,7 +32,9 @@ public:
void addBlock(MarkedBlock*);
void removeBlock(MarkedBlock*);
void init(Heap*, MarkedSpace*, size_t cellSize, bool cellsNeedDestruction);
-
+
+ bool isPagedOut(double deadline);
+
private:
friend class LLIntOffsetsExtractor;
@@ -41,7 +43,7 @@ private:
void* tryAllocateHelper();
MarkedBlock* allocateBlock(AllocationEffort);
- MarkedBlock::FreeCell* m_firstFreeCell;
+ MarkedBlock::FreeList m_freeList;
MarkedBlock* m_currentBlock;
DoublyLinkedList<HeapBlock> m_blockList;
size_t m_cellSize;
@@ -51,8 +53,7 @@ private:
};
inline MarkedAllocator::MarkedAllocator()
- : m_firstFreeCell(0)
- , m_currentBlock(0)
+ : m_currentBlock(0)
, m_cellSize(0)
, m_cellsNeedDestruction(true)
, m_heap(0)
@@ -70,13 +71,13 @@ inline void MarkedAllocator::init(Heap* heap, MarkedSpace* markedSpace, size_t c
inline void* MarkedAllocator::allocate()
{
- MarkedBlock::FreeCell* firstFreeCell = m_firstFreeCell;
+ MarkedBlock::FreeCell* head = m_freeList.head;
// This is a light-weight fast path to cover the most common case.
- if (UNLIKELY(!firstFreeCell))
+ if (UNLIKELY(!head))
return allocateSlowCase();
- m_firstFreeCell = firstFreeCell->next;
- return firstFreeCell;
+ m_freeList.head = head->next;
+ return head;
}
inline void MarkedAllocator::reset()
@@ -87,12 +88,12 @@ inline void MarkedAllocator::reset()
inline void MarkedAllocator::zapFreeList()
{
if (!m_currentBlock) {
- ASSERT(!m_firstFreeCell);
+ ASSERT(!m_freeList.head);
return;
}
- m_currentBlock->zapFreeList(m_firstFreeCell);
- m_firstFreeCell = 0;
+ m_currentBlock->zapFreeList(m_freeList);
+ m_freeList = MarkedBlock::FreeList();
}
template <typename Functor> inline void MarkedAllocator::forEachBlock(Functor& functor)
diff --git a/Source/JavaScriptCore/heap/MarkedBlock.cpp b/Source/JavaScriptCore/heap/MarkedBlock.cpp
index 75c21e7dd..3a58b5a42 100644
--- a/Source/JavaScriptCore/heap/MarkedBlock.cpp
+++ b/Source/JavaScriptCore/heap/MarkedBlock.cpp
@@ -77,7 +77,7 @@ inline void MarkedBlock::callDestructor(JSCell* cell)
}
template<MarkedBlock::BlockState blockState, MarkedBlock::SweepMode sweepMode, bool destructorCallNeeded>
-MarkedBlock::FreeCell* MarkedBlock::specializedSweep()
+MarkedBlock::FreeList MarkedBlock::specializedSweep()
{
ASSERT(blockState != Allocated && blockState != FreeListed);
ASSERT(destructorCallNeeded || sweepMode != SweepOnly);
@@ -86,6 +86,7 @@ MarkedBlock::FreeCell* MarkedBlock::specializedSweep()
// This is fine, since the allocation code makes no assumptions about the
// order of the free list.
FreeCell* head = 0;
+ size_t count = 0;
for (size_t i = firstAtom(); i < m_endAtom; i += m_atomsPerCell) {
if (blockState == Marked && m_marks.get(i))
continue;
@@ -101,19 +102,20 @@ MarkedBlock::FreeCell* MarkedBlock::specializedSweep()
FreeCell* freeCell = reinterpret_cast<FreeCell*>(cell);
freeCell->next = head;
head = freeCell;
+ ++count;
}
}
m_state = ((sweepMode == SweepToFreeList) ? FreeListed : Zapped);
- return head;
+ return FreeList(head, count * cellSize());
}
-MarkedBlock::FreeCell* MarkedBlock::sweep(SweepMode sweepMode)
+MarkedBlock::FreeList MarkedBlock::sweep(SweepMode sweepMode)
{
HEAP_LOG_BLOCK_STATE_TRANSITION(this);
if (sweepMode == SweepOnly && !m_cellsNeedDestruction)
- return 0;
+ return FreeList();
if (m_cellsNeedDestruction)
return sweepHelper<true>(sweepMode);
@@ -121,7 +123,7 @@ MarkedBlock::FreeCell* MarkedBlock::sweep(SweepMode sweepMode)
}
template<bool destructorCallNeeded>
-MarkedBlock::FreeCell* MarkedBlock::sweepHelper(SweepMode sweepMode)
+MarkedBlock::FreeList MarkedBlock::sweepHelper(SweepMode sweepMode)
{
switch (m_state) {
case New:
@@ -130,10 +132,10 @@ MarkedBlock::FreeCell* MarkedBlock::sweepHelper(SweepMode sweepMode)
case FreeListed:
// Happens when a block transitions to fully allocated.
ASSERT(sweepMode == SweepToFreeList);
- return 0;
+ return FreeList();
case Allocated:
ASSERT_NOT_REACHED();
- return 0;
+ return FreeList();
case Marked:
return sweepMode == SweepToFreeList
? specializedSweep<Marked, SweepToFreeList, destructorCallNeeded>()
@@ -145,12 +147,13 @@ MarkedBlock::FreeCell* MarkedBlock::sweepHelper(SweepMode sweepMode)
}
ASSERT_NOT_REACHED();
- return 0;
+ return FreeList();
}
-void MarkedBlock::zapFreeList(FreeCell* firstFreeCell)
+void MarkedBlock::zapFreeList(const FreeList& freeList)
{
HEAP_LOG_BLOCK_STATE_TRANSITION(this);
+ FreeCell* head = freeList.head;
if (m_state == Marked) {
// If the block is in the Marked state then we know that:
@@ -159,7 +162,7 @@ void MarkedBlock::zapFreeList(FreeCell* firstFreeCell)
// fact that their mark bits are unset.
// Hence if the block is Marked we need to leave it Marked.
- ASSERT(!firstFreeCell);
+ ASSERT(!head);
return;
}
@@ -176,7 +179,7 @@ void MarkedBlock::zapFreeList(FreeCell* firstFreeCell)
// dead objects will have 0 in their vtables and live objects will have
// non-zero vtables, which is consistent with the block being zapped.
- ASSERT(!firstFreeCell);
+ ASSERT(!head);
return;
}
@@ -188,7 +191,7 @@ void MarkedBlock::zapFreeList(FreeCell* firstFreeCell)
// way to tell what's live vs dead. We use zapping for that.
FreeCell* next;
- for (FreeCell* current = firstFreeCell; current; current = next) {
+ for (FreeCell* current = head; current; current = next) {
next = current->next;
reinterpret_cast<JSCell*>(current)->zap();
}
diff --git a/Source/JavaScriptCore/heap/MarkedBlock.h b/Source/JavaScriptCore/heap/MarkedBlock.h
index 3d0182eb8..429b7c08e 100644
--- a/Source/JavaScriptCore/heap/MarkedBlock.h
+++ b/Source/JavaScriptCore/heap/MarkedBlock.h
@@ -87,6 +87,14 @@ namespace JSC {
FreeCell* next;
};
+ struct FreeList {
+ FreeCell* head;
+ size_t bytes;
+
+ FreeList();
+ FreeList(FreeCell*, size_t);
+ };
+
struct VoidFunctor {
typedef void ReturnType;
void returnValue() { }
@@ -105,13 +113,13 @@ namespace JSC {
void* allocate();
enum SweepMode { SweepOnly, SweepToFreeList };
- FreeCell* sweep(SweepMode = SweepOnly);
+ FreeList sweep(SweepMode = SweepOnly);
// While allocating from a free list, MarkedBlock temporarily has bogus
// cell liveness data. To restore accurate cell liveness data, call one
// of these functions:
void didConsumeFreeList(); // Call this once you've allocated all the items in the free list.
- void zapFreeList(FreeCell* firstFreeCell); // Call this to undo the free list.
+ void zapFreeList(const FreeList&); // Call this to undo the free list.
void clearMarks();
size_t markCount();
@@ -163,7 +171,7 @@ namespace JSC {
static const size_t atomAlignmentMask = atomSize - 1; // atomSize must be a power of two.
enum BlockState { New, FreeListed, Allocated, Marked, Zapped };
- template<bool destructorCallNeeded> FreeCell* sweepHelper(SweepMode = SweepOnly);
+ template<bool destructorCallNeeded> FreeList sweepHelper(SweepMode = SweepOnly);
typedef char Atom[atomSize];
@@ -171,7 +179,7 @@ namespace JSC {
Atom* atoms();
size_t atomNumber(const void*);
void callDestructor(JSCell*);
- template<BlockState, SweepMode, bool destructorCallNeeded> FreeCell* specializedSweep();
+ template<BlockState, SweepMode, bool destructorCallNeeded> FreeList specializedSweep();
#if ENABLE(GGC)
CardSet<bytesPerCard, blockSize> m_cards;
@@ -189,6 +197,18 @@ namespace JSC {
Heap* m_heap;
};
+ inline MarkedBlock::FreeList::FreeList()
+ : head(0)
+ , bytes(0)
+ {
+ }
+
+ inline MarkedBlock::FreeList::FreeList(FreeCell* head, size_t bytes)
+ : head(head)
+ , bytes(bytes)
+ {
+ }
+
inline size_t MarkedBlock::firstAtom()
{
return WTF::roundUpToMultipleOf<atomSize>(sizeof(MarkedBlock)) / atomSize;
diff --git a/Source/JavaScriptCore/heap/MarkedSpace.cpp b/Source/JavaScriptCore/heap/MarkedSpace.cpp
index bf839011d..405ed571a 100644
--- a/Source/JavaScriptCore/heap/MarkedSpace.cpp
+++ b/Source/JavaScriptCore/heap/MarkedSpace.cpp
@@ -31,9 +31,7 @@ namespace JSC {
class Structure;
MarkedSpace::MarkedSpace(Heap* heap)
- : m_waterMark(0)
- , m_nurseryWaterMark(0)
- , m_heap(heap)
+ : m_heap(heap)
{
for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
allocatorFor(cellSize).init(heap, this, cellSize, false);
@@ -48,9 +46,6 @@ MarkedSpace::MarkedSpace(Heap* heap)
void MarkedSpace::resetAllocators()
{
- m_waterMark = 0;
- m_nurseryWaterMark = 0;
-
for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
allocatorFor(cellSize).reset();
destructorAllocatorFor(cellSize).reset();
@@ -75,6 +70,20 @@ void MarkedSpace::canonicalizeCellLivenessData()
}
}
+bool MarkedSpace::isPagedOut(double deadline)
+{
+ for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
+ if (allocatorFor(cellSize).isPagedOut(deadline) || destructorAllocatorFor(cellSize).isPagedOut(deadline))
+ return true;
+ }
+
+ for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
+ if (allocatorFor(cellSize).isPagedOut(deadline) || destructorAllocatorFor(cellSize).isPagedOut(deadline))
+ return true;
+ }
+
+ return false;
+}
void MarkedSpace::freeBlocks(MarkedBlock* head)
{
@@ -84,9 +93,8 @@ void MarkedSpace::freeBlocks(MarkedBlock* head)
m_blocks.remove(block);
block->sweep();
- MutexLocker locker(m_heap->m_freeBlockLock);
- m_heap->m_freeBlocks.append(block);
- m_heap->m_numberOfFreeBlocks++;
+
+ m_heap->blockAllocator().deallocate(block);
}
}
diff --git a/Source/JavaScriptCore/heap/MarkedSpace.h b/Source/JavaScriptCore/heap/MarkedSpace.h
index b553eb1b1..19061a12b 100644
--- a/Source/JavaScriptCore/heap/MarkedSpace.h
+++ b/Source/JavaScriptCore/heap/MarkedSpace.h
@@ -65,9 +65,6 @@ public:
void canonicalizeCellLivenessData();
- size_t waterMark();
- size_t nurseryWaterMark();
-
typedef HashSet<MarkedBlock*>::iterator BlockIterator;
template<typename Functor> typename Functor::ReturnType forEachCell(Functor&);
@@ -77,9 +74,12 @@ public:
void shrink();
void freeBlocks(MarkedBlock* head);
+
void didAddBlock(MarkedBlock*);
void didConsumeFreeList(MarkedBlock*);
+ bool isPagedOut(double deadline);
+
private:
friend class LLIntOffsetsExtractor;
@@ -101,22 +101,10 @@ private:
Subspace m_destructorSpace;
Subspace m_normalSpace;
- size_t m_waterMark;
- size_t m_nurseryWaterMark;
Heap* m_heap;
MarkedBlockSet m_blocks;
};
-inline size_t MarkedSpace::waterMark()
-{
- return m_waterMark;
-}
-
-inline size_t MarkedSpace::nurseryWaterMark()
-{
- return m_nurseryWaterMark;
-}
-
template<typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachCell(Functor& functor)
{
canonicalizeCellLivenessData();
@@ -197,12 +185,6 @@ inline void MarkedSpace::didAddBlock(MarkedBlock* block)
m_blocks.add(block);
}
-inline void MarkedSpace::didConsumeFreeList(MarkedBlock* block)
-{
- m_nurseryWaterMark += block->capacity() - block->size();
- m_waterMark += block->capacity();
-}
-
} // namespace JSC
#endif // MarkedSpace_h
diff --git a/Source/JavaScriptCore/heap/PassWeak.h b/Source/JavaScriptCore/heap/PassWeak.h
index 0d86e6c65..8c6364e4b 100644
--- a/Source/JavaScriptCore/heap/PassWeak.h
+++ b/Source/JavaScriptCore/heap/PassWeak.h
@@ -26,8 +26,9 @@
#ifndef PassWeak_h
#define PassWeak_h
+#include "JSCell.h"
+#include "WeakSetInlines.h"
#include <wtf/Assertions.h>
-#include "Handle.h"
#include <wtf/NullPtr.h>
#include <wtf/TypeTraits.h>
@@ -35,61 +36,136 @@ namespace JSC {
template<typename T> class Weak;
template<typename T> class PassWeak;
-template<typename T> PassWeak<T> adoptWeak(HandleSlot);
-
-template<typename T> class PassWeak : public Handle<T> {
- using Handle<T>::slot;
- using Handle<T>::setSlot;
+template<typename T> PassWeak<T> adoptWeak(WeakImpl*);
+template<typename Base, typename T> class WeakImplAccessor {
public:
- typedef typename Handle<T>::ExternalType ExternalType;
+ typedef T* GetType;
+
+ T* operator->() const;
+ T& operator*() const;
+ GetType get() const;
+
+#if !ASSERT_DISABLED
+ bool was(GetType) const;
+#endif
+};
- PassWeak() : Handle<T>() { }
- PassWeak(std::nullptr_t) : Handle<T>() { }
+template<typename T> class PassWeak : public WeakImplAccessor<PassWeak<T>, T> {
+public:
+ friend class WeakImplAccessor<PassWeak<T>, T>;
+ typedef typename WeakImplAccessor<PassWeak<T>, T>::GetType GetType;
- PassWeak(JSGlobalData& globalData, ExternalType externalType = ExternalType(), WeakHandleOwner* weakOwner = 0, void* context = 0)
- : Handle<T>(globalData.heap.handleHeap()->allocate())
- {
- HandleHeap::heapFor(slot())->makeWeak(slot(), weakOwner, context);
- JSValue value = HandleTypes<T>::toJSValue(externalType);
- HandleHeap::heapFor(slot())->writeBarrier(slot(), value);
- *slot() = value;
- }
+ PassWeak();
+ PassWeak(std::nullptr_t);
+ PassWeak(GetType, WeakHandleOwner* = 0, void* context = 0);
// It somewhat breaks the type system to allow transfer of ownership out of
// a const PassWeak. However, it makes it much easier to work with PassWeak
// temporaries, and we don't have a need to use real const PassWeaks anyway.
- PassWeak(const PassWeak& o) : Handle<T>(o.leakHandle()) { }
- template<typename U> PassWeak(const PassWeak<U>& o) : Handle<T>(o.leakHandle()) { }
+ PassWeak(const PassWeak&);
+ template<typename U> PassWeak(const PassWeak<U>&);
+
+ ~PassWeak();
- ~PassWeak()
- {
- if (!slot())
- return;
- HandleHeap::heapFor(slot())->deallocate(slot());
- setSlot(0);
- }
+ bool operator!() const;
- ExternalType get() const { return HandleTypes<T>::getFromSlot(slot()); }
+ // This conversion operator allows implicit conversion to bool but not to other integer types.
+ typedef JSValue (PassWeak::*UnspecifiedBoolType);
+ operator UnspecifiedBoolType*() const;
- HandleSlot leakHandle() const WARN_UNUSED_RETURN;
+ WeakImpl* leakImpl() const WARN_UNUSED_RETURN;
private:
- friend PassWeak adoptWeak<T>(HandleSlot);
+ friend PassWeak adoptWeak<T>(WeakImpl*);
+ explicit PassWeak(WeakImpl*);
- explicit PassWeak(HandleSlot slot) : Handle<T>(slot) { }
+ WeakImpl* m_impl;
};
-template<typename T> inline HandleSlot PassWeak<T>::leakHandle() const
+template<typename Base, typename T> inline T* WeakImplAccessor<Base, T>::operator->() const
+{
+ ASSERT(static_cast<const Base*>(this)->m_impl && static_cast<const Base*>(this)->m_impl->state() == WeakImpl::Live);
+ return jsCast<T*>(static_cast<const Base*>(this)->m_impl->jsValue().asCell());
+}
+
+template<typename Base, typename T> inline T& WeakImplAccessor<Base, T>::operator*() const
+{
+ ASSERT(static_cast<const Base*>(this)->m_impl && static_cast<const Base*>(this)->m_impl->state() == WeakImpl::Live);
+ return *jsCast<T*>(static_cast<const Base*>(this)->m_impl->jsValue().asCell());
+}
+
+template<typename Base, typename T> inline typename WeakImplAccessor<Base, T>::GetType WeakImplAccessor<Base, T>::get() const
+{
+ if (!static_cast<const Base*>(this)->m_impl || static_cast<const Base*>(this)->m_impl->state() != WeakImpl::Live)
+ return GetType();
+ return jsCast<T*>(static_cast<const Base*>(this)->m_impl->jsValue().asCell());
+}
+
+#if !ASSERT_DISABLED
+template<typename Base, typename T> inline bool WeakImplAccessor<Base, T>::was(typename WeakImplAccessor<Base, T>::GetType other) const
+{
+ return jsCast<T*>(static_cast<const Base*>(this)->m_impl->jsValue().asCell()) == other;
+}
+#endif
+
+template<typename T> inline PassWeak<T>::PassWeak()
+ : m_impl(0)
+{
+}
+
+template<typename T> inline PassWeak<T>::PassWeak(std::nullptr_t)
+ : m_impl(0)
+{
+}
+
+template<typename T> inline PassWeak<T>::PassWeak(typename PassWeak<T>::GetType getType, WeakHandleOwner* weakOwner, void* context)
+ : m_impl(getType ? WeakSet::allocate(getType, weakOwner, context) : 0)
+{
+}
+
+template<typename T> inline PassWeak<T>::PassWeak(const PassWeak& o)
+ : m_impl(o.leakImpl())
+{
+}
+
+template<typename T> template<typename U> inline PassWeak<T>::PassWeak(const PassWeak<U>& o)
+ : m_impl(o.leakImpl())
+{
+}
+
+template<typename T> inline PassWeak<T>::~PassWeak()
+{
+ if (!m_impl)
+ return;
+ WeakSet::deallocate(m_impl);
+}
+
+template<typename T> inline bool PassWeak<T>::operator!() const
+{
+ return !m_impl || m_impl->state() != WeakImpl::Live || !m_impl->jsValue();
+}
+
+template<typename T> inline PassWeak<T>::operator UnspecifiedBoolType*() const
+{
+ return reinterpret_cast<UnspecifiedBoolType*>(!!*this);
+}
+
+template<typename T> inline PassWeak<T>::PassWeak(WeakImpl* impl)
+: m_impl(impl)
+{
+}
+
+template<typename T> inline WeakImpl* PassWeak<T>::leakImpl() const
{
- HandleSlot slot = this->slot();
- const_cast<PassWeak<T>*>(this)->setSlot(0);
- return slot;
+ WeakImpl* tmp = 0;
+ std::swap(tmp, const_cast<WeakImpl*&>(m_impl));
+ return tmp;
}
-template<typename T> PassWeak<T> adoptWeak(HandleSlot slot)
+template<typename T> PassWeak<T> inline adoptWeak(WeakImpl* impl)
{
- return PassWeak<T>(slot);
+ return PassWeak<T>(impl);
}
template<typename T, typename U> inline bool operator==(const PassWeak<T>& a, const PassWeak<U>& b)
diff --git a/Source/JavaScriptCore/heap/SlotVisitor.h b/Source/JavaScriptCore/heap/SlotVisitor.h
index 6584db703..01eb219fc 100644
--- a/Source/JavaScriptCore/heap/SlotVisitor.h
+++ b/Source/JavaScriptCore/heap/SlotVisitor.h
@@ -62,7 +62,6 @@ public:
void finalizeUnconditionalFinalizers();
void startCopying();
- void copy(void**, size_t);
void copyAndAppend(void**, size_t, JSValue*, unsigned);
void doneCopying();
diff --git a/Source/JavaScriptCore/heap/Strong.h b/Source/JavaScriptCore/heap/Strong.h
index d2f2a2278..7fafaeab5 100644
--- a/Source/JavaScriptCore/heap/Strong.h
+++ b/Source/JavaScriptCore/heap/Strong.h
@@ -28,7 +28,7 @@
#include <wtf/Assertions.h>
#include "Handle.h"
-#include "HandleHeap.h"
+#include "HandleSet.h"
namespace JSC {
@@ -56,7 +56,7 @@ public:
{
if (!other.slot())
return;
- setSlot(HandleHeap::heapFor(other.slot())->allocate());
+ setSlot(HandleSet::heapFor(other.slot())->allocate());
set(other.get());
}
@@ -65,7 +65,7 @@ public:
{
if (!other.slot())
return;
- setSlot(HandleHeap::heapFor(other.slot())->allocate());
+ setSlot(HandleSet::heapFor(other.slot())->allocate());
set(other.get());
}
@@ -81,11 +81,19 @@ public:
clear();
}
+ bool operator!() const { return !slot() || !*slot(); }
+
+ // This conversion operator allows implicit conversion to bool but not to other integer types.
+ typedef JSValue (HandleBase::*UnspecifiedBoolType);
+ operator UnspecifiedBoolType*() const { return !!*this ? reinterpret_cast<UnspecifiedBoolType*>(1) : 0; }
+
void swap(Strong& other)
{
Handle<T>::swap(other);
}
+ ExternalType get() const { return HandleTypes<T>::getFromSlot(this->slot()); }
+
void set(JSGlobalData&, ExternalType);
template <typename U> Strong& operator=(const Strong<U>& other)
@@ -95,7 +103,7 @@ public:
return *this;
}
- set(*HandleHeap::heapFor(other.slot())->globalData(), other.get());
+ set(*HandleSet::heapFor(other.slot())->globalData(), other.get());
return *this;
}
@@ -106,7 +114,7 @@ public:
return *this;
}
- set(*HandleHeap::heapFor(other.slot())->globalData(), other.get());
+ set(*HandleSet::heapFor(other.slot())->globalData(), other.get());
return *this;
}
@@ -114,7 +122,7 @@ public:
{
if (!slot())
return;
- HandleHeap::heapFor(slot())->deallocate(slot());
+ HandleSet::heapFor(slot())->deallocate(slot());
setSlot(0);
}
@@ -125,7 +133,7 @@ private:
{
ASSERT(slot());
JSValue value = HandleTypes<T>::toJSValue(externalType);
- HandleHeap::heapFor(slot())->writeBarrier(slot(), value);
+ HandleSet::heapFor(slot())->writeBarrier(slot(), value);
*slot() = value;
}
};
diff --git a/Source/JavaScriptCore/heap/StrongInlines.h b/Source/JavaScriptCore/heap/StrongInlines.h
index 46049096a..2308bf6f6 100644
--- a/Source/JavaScriptCore/heap/StrongInlines.h
+++ b/Source/JavaScriptCore/heap/StrongInlines.h
@@ -26,18 +26,20 @@
#ifndef StrongInlines_h
#define StrongInlines_h
+#include "JSGlobalData.h"
+
namespace JSC {
template <typename T>
inline Strong<T>::Strong(JSGlobalData& globalData, ExternalType value)
- : Handle<T>(globalData.heap.handleHeap()->allocate())
+ : Handle<T>(globalData.heap.handleSet()->allocate())
{
set(value);
}
template <typename T>
inline Strong<T>::Strong(JSGlobalData& globalData, Handle<T> handle)
- : Handle<T>(globalData.heap.handleHeap()->allocate())
+ : Handle<T>(globalData.heap.handleSet()->allocate())
{
set(handle.get());
}
@@ -46,7 +48,7 @@ template <typename T>
inline void Strong<T>::set(JSGlobalData& globalData, ExternalType value)
{
if (!slot())
- setSlot(globalData.heap.handleHeap()->allocate());
+ setSlot(globalData.heap.handleSet()->allocate());
set(value);
}
diff --git a/Source/JavaScriptCore/heap/Weak.h b/Source/JavaScriptCore/heap/Weak.h
index 8291e4440..0938249b8 100644
--- a/Source/JavaScriptCore/heap/Weak.h
+++ b/Source/JavaScriptCore/heap/Weak.h
@@ -27,109 +27,129 @@
#define Weak_h
#include <wtf/Assertions.h>
-#include "Handle.h"
-#include "HandleHeap.h"
-#include "JSGlobalData.h"
#include "PassWeak.h"
+#include "WeakSetInlines.h"
namespace JSC {
-// A weakly referenced handle that becomes 0 when the value it points to is garbage collected.
-template <typename T> class Weak : public Handle<T> {
+template<typename T> class Weak : public WeakImplAccessor<Weak<T>, T> {
WTF_MAKE_NONCOPYABLE(Weak);
-
- using Handle<T>::slot;
- using Handle<T>::setSlot;
-
public:
- typedef typename Handle<T>::ExternalType ExternalType;
-
- Weak()
- : Handle<T>()
- {
- }
-
- Weak(std::nullptr_t)
- : Handle<T>()
- {
- }
-
- Weak(JSGlobalData& globalData, ExternalType externalType = ExternalType(), WeakHandleOwner* weakOwner = 0, void* context = 0)
- : Handle<T>(globalData.heap.handleHeap()->allocate())
- {
- HandleHeap::heapFor(slot())->makeWeak(slot(), weakOwner, context);
- JSValue value = HandleTypes<T>::toJSValue(externalType);
- HandleHeap::heapFor(slot())->writeBarrier(slot(), value);
- *slot() = value;
- }
-
- enum AdoptTag { Adopt };
- template<typename U> Weak(AdoptTag, Handle<U> handle)
- : Handle<T>(handle.slot())
- {
- validateCell(get());
- }
+ friend class WeakImplAccessor<Weak<T>, T>;
+ typedef typename WeakImplAccessor<Weak<T>, T>::GetType GetType;
+
+ Weak();
+ Weak(std::nullptr_t);
+ Weak(GetType, WeakHandleOwner* = 0, void* context = 0);
enum HashTableDeletedValueTag { HashTableDeletedValue };
- bool isHashTableDeletedValue() const { return slot() == hashTableDeletedValue(); }
- Weak(HashTableDeletedValueTag)
- : Handle<T>(hashTableDeletedValue())
- {
- }
-
- template<typename U> Weak(const PassWeak<U>& other)
- : Handle<T>(other.leakHandle())
- {
- }
-
- ~Weak()
- {
- clear();
- }
-
- void swap(Weak& other)
- {
- Handle<T>::swap(other);
- }
+ bool isHashTableDeletedValue() const;
+ Weak(HashTableDeletedValueTag);
- Weak& operator=(const PassWeak<T>&);
+ template<typename U> Weak(const PassWeak<U>&);
- ExternalType get() const { return HandleTypes<T>::getFromSlot(slot()); }
-
- PassWeak<T> release() { PassWeak<T> tmp = adoptWeak<T>(slot()); setSlot(0); return tmp; }
-
- void clear()
- {
- if (!slot())
- return;
- HandleHeap::heapFor(slot())->deallocate(slot());
- setSlot(0);
- }
+ ~Weak();
+
+ void swap(Weak&);
+ Weak& operator=(const PassWeak<T>&);
- HandleSlot leakHandle()
- {
- ASSERT(HandleHeap::heapFor(slot())->hasFinalizer(slot()));
- HandleSlot result = slot();
- setSlot(0);
- return result;
- }
+ bool operator!() const;
+ // This conversion operator allows implicit conversion to bool but not to other integer types.
+ typedef JSValue (HandleBase::*UnspecifiedBoolType);
+ operator UnspecifiedBoolType*() const;
+
+ PassWeak<T> release();
+ void clear();
+
private:
- static HandleSlot hashTableDeletedValue() { return reinterpret_cast<HandleSlot>(-1); }
+ static WeakImpl* hashTableDeletedValue();
+
+ WeakImpl* m_impl;
};
+template<typename T> inline Weak<T>::Weak()
+ : m_impl(0)
+{
+}
+
+template<typename T> inline Weak<T>::Weak(std::nullptr_t)
+ : m_impl(0)
+{
+}
+
+template<typename T> inline Weak<T>::Weak(typename Weak<T>::GetType getType, WeakHandleOwner* weakOwner, void* context)
+ : m_impl(getType ? WeakSet::allocate(getType, weakOwner, context) : 0)
+{
+}
+
+template<typename T> inline bool Weak<T>::isHashTableDeletedValue() const
+{
+ return m_impl == hashTableDeletedValue();
+}
+
+template<typename T> inline Weak<T>::Weak(typename Weak<T>::HashTableDeletedValueTag)
+ : m_impl(hashTableDeletedValue())
+{
+}
+
+template<typename T> template<typename U> inline Weak<T>::Weak(const PassWeak<U>& other)
+ : m_impl(other.leakImpl())
+{
+}
+
+template<typename T> inline Weak<T>::~Weak()
+{
+ clear();
+}
+
template<class T> inline void swap(Weak<T>& a, Weak<T>& b)
{
a.swap(b);
}
+template<typename T> inline void Weak<T>::swap(Weak& other)
+{
+ std::swap(m_impl, other.m_impl);
+}
+
template<typename T> inline Weak<T>& Weak<T>::operator=(const PassWeak<T>& o)
{
clear();
- setSlot(o.leakHandle());
+ m_impl = o.leakImpl();
return *this;
}
+template<typename T> inline bool Weak<T>::operator!() const
+{
+ return !m_impl || !m_impl->jsValue() || m_impl->state() != WeakImpl::Live;
+}
+
+template<typename T> inline Weak<T>::operator UnspecifiedBoolType*() const
+{
+ return reinterpret_cast<UnspecifiedBoolType*>(!!*this);
+}
+
+template<typename T> inline PassWeak<T> Weak<T>::release()
+{
+ PassWeak<T> tmp = adoptWeak<T>(m_impl);
+ m_impl = 0;
+ return tmp;
+}
+
+template<typename T> inline void Weak<T>::clear()
+{
+ if (!m_impl)
+ return;
+ WeakSet::deallocate(m_impl);
+ m_impl = 0;
+}
+
+template<typename T> inline WeakImpl* Weak<T>::hashTableDeletedValue()
+{
+ return reinterpret_cast<WeakImpl*>(-1);
+}
+
} // namespace JSC
namespace WTF {
@@ -151,7 +171,7 @@ template<typename T> struct HashTraits<JSC::Weak<T> > : SimpleClassHashTraits<JS
static PassOutType passOut(StorageType& value) { return value.release(); }
static PassOutType passOut(EmptyValueType) { return PassOutType(); }
- typedef typename StorageType::ExternalType PeekType;
+ typedef typename StorageType::GetType PeekType;
static PeekType peek(const StorageType& value) { return value.get(); }
static PeekType peek(EmptyValueType) { return PeekType(); }
};
diff --git a/Source/JavaScriptCore/heap/WeakBlock.cpp b/Source/JavaScriptCore/heap/WeakBlock.cpp
new file mode 100644
index 000000000..f307e111e
--- /dev/null
+++ b/Source/JavaScriptCore/heap/WeakBlock.cpp
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "WeakBlock.h"
+
+#include "Heap.h"
+#include "HeapRootVisitor.h"
+#include "JSObject.h"
+#include "ScopeChain.h"
+#include "Structure.h"
+
+namespace JSC {
+
+WeakBlock* WeakBlock::create()
+{
+ PageAllocation allocation = PageAllocation::allocate(blockSize, OSAllocator::JSGCHeapPages);
+ if (!static_cast<bool>(allocation))
+ CRASH();
+ return new (NotNull, allocation.base()) WeakBlock(allocation);
+}
+
+void WeakBlock::destroy(WeakBlock* block)
+{
+ block->m_allocation.deallocate();
+}
+
+WeakBlock::WeakBlock(PageAllocation& allocation)
+ : m_allocation(allocation)
+{
+ for (size_t i = 0; i < weakImplCount(); ++i) {
+ WeakImpl* weakImpl = &weakImpls()[i];
+ new (NotNull, weakImpl) WeakImpl;
+ addToFreeList(&m_sweepResult.freeList, weakImpl);
+ }
+
+ ASSERT(!m_sweepResult.isNull() && m_sweepResult.blockIsFree);
+}
+
+void WeakBlock::finalizeAll()
+{
+ for (size_t i = 0; i < weakImplCount(); ++i) {
+ WeakImpl* weakImpl = &weakImpls()[i];
+ if (weakImpl->state() >= WeakImpl::Finalized)
+ continue;
+ weakImpl->setState(WeakImpl::Dead);
+ finalize(weakImpl);
+ }
+}
+
+void WeakBlock::sweep()
+{
+ if (!m_sweepResult.isNull())
+ return;
+
+ SweepResult sweepResult;
+ for (size_t i = 0; i < weakImplCount(); ++i) {
+ WeakImpl* weakImpl = &weakImpls()[i];
+ if (weakImpl->state() == WeakImpl::Dead)
+ finalize(weakImpl);
+ if (weakImpl->state() == WeakImpl::Deallocated)
+ addToFreeList(&sweepResult.freeList, weakImpl);
+ else
+ sweepResult.blockIsFree = false;
+ }
+
+ m_sweepResult = sweepResult;
+ ASSERT(!m_sweepResult.isNull());
+}
+
+void WeakBlock::visitLiveWeakImpls(HeapRootVisitor& heapRootVisitor)
+{
+ // If a block is completely empty, a visit won't have any effect.
+ if (!m_sweepResult.isNull() && m_sweepResult.blockIsFree)
+ return;
+
+ SlotVisitor& visitor = heapRootVisitor.visitor();
+
+ for (size_t i = 0; i < weakImplCount(); ++i) {
+ WeakImpl* weakImpl = &weakImpls()[i];
+ if (weakImpl->state() != WeakImpl::Live)
+ continue;
+
+ const JSValue& jsValue = weakImpl->jsValue();
+ if (Heap::isMarked(jsValue.asCell()))
+ continue;
+
+ WeakHandleOwner* weakHandleOwner = weakImpl->weakHandleOwner();
+ if (!weakHandleOwner)
+ continue;
+
+ if (!weakHandleOwner->isReachableFromOpaqueRoots(Handle<Unknown>::wrapSlot(&const_cast<JSValue&>(jsValue)), weakImpl->context(), visitor))
+ continue;
+
+ heapRootVisitor.visit(&const_cast<JSValue&>(jsValue));
+ }
+}
+
+void WeakBlock::visitDeadWeakImpls(HeapRootVisitor&)
+{
+ // If a block is completely empty, a visit won't have any effect.
+ if (!m_sweepResult.isNull() && m_sweepResult.blockIsFree)
+ return;
+
+ for (size_t i = 0; i < weakImplCount(); ++i) {
+ WeakImpl* weakImpl = &weakImpls()[i];
+ if (weakImpl->state() > WeakImpl::Dead)
+ continue;
+
+ if (Heap::isMarked(weakImpl->jsValue().asCell()))
+ continue;
+
+ weakImpl->setState(WeakImpl::Dead);
+ }
+}
+
+} // namespace JSC
diff --git a/Source/JavaScriptCore/heap/WeakBlock.h b/Source/JavaScriptCore/heap/WeakBlock.h
new file mode 100644
index 000000000..9e546ea32
--- /dev/null
+++ b/Source/JavaScriptCore/heap/WeakBlock.h
@@ -0,0 +1,156 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WeakBlock_h
+#define WeakBlock_h
+
+#include "HeapBlock.h"
+#include "WeakHandleOwner.h"
+#include "WeakImpl.h"
+#include <wtf/DoublyLinkedList.h>
+#include <wtf/PageAllocation.h>
+#include <wtf/StdLibExtras.h>
+
+namespace JSC {
+
+class HeapRootVisitor;
+class JSValue;
+class WeakHandleOwner;
+
+class WeakBlock : public DoublyLinkedListNode<WeakBlock> {
+public:
+ friend class WTF::DoublyLinkedListNode<WeakBlock>;
+ static const size_t blockSize = 4 * KB;
+
+ struct FreeCell {
+ FreeCell* next;
+ };
+
+ struct SweepResult {
+ SweepResult();
+ bool isNull() const;
+
+ bool blockIsFree;
+ FreeCell* freeList;
+ };
+
+ static WeakBlock* create();
+ static void destroy(WeakBlock*);
+
+ static WeakImpl* asWeakImpl(FreeCell*);
+
+ void sweep();
+ const SweepResult& sweepResult();
+ SweepResult takeSweepResult();
+
+ void visitLiveWeakImpls(HeapRootVisitor&);
+ void visitDeadWeakImpls(HeapRootVisitor&);
+
+ void finalizeAll();
+
+private:
+ static FreeCell* asFreeCell(WeakImpl*);
+
+ WeakBlock(PageAllocation&);
+ WeakImpl* firstWeakImpl();
+ void finalize(WeakImpl*);
+ WeakImpl* weakImpls();
+ size_t weakImplCount();
+ void addToFreeList(FreeCell**, WeakImpl*);
+
+ PageAllocation m_allocation;
+ WeakBlock* m_prev;
+ WeakBlock* m_next;
+ SweepResult m_sweepResult;
+};
+
+inline WeakBlock::SweepResult::SweepResult()
+ : blockIsFree(true)
+ , freeList(0)
+{
+ ASSERT(isNull());
+}
+
+inline bool WeakBlock::SweepResult::isNull() const
+{
+ return blockIsFree && !freeList; // This state is impossible, so we can use it to mean null.
+}
+
+inline WeakImpl* WeakBlock::asWeakImpl(FreeCell* freeCell)
+{
+ return reinterpret_cast<WeakImpl*>(freeCell);
+}
+
+inline WeakBlock::SweepResult WeakBlock::takeSweepResult()
+{
+ SweepResult tmp;
+ std::swap(tmp, m_sweepResult);
+ ASSERT(m_sweepResult.isNull());
+ return tmp;
+}
+
+inline const WeakBlock::SweepResult& WeakBlock::sweepResult()
+{
+ return m_sweepResult;
+}
+
+inline WeakBlock::FreeCell* WeakBlock::asFreeCell(WeakImpl* weakImpl)
+{
+ return reinterpret_cast<FreeCell*>(weakImpl);
+}
+
+inline void WeakBlock::finalize(WeakImpl* weakImpl)
+{
+ ASSERT(weakImpl->state() == WeakImpl::Dead);
+ weakImpl->setState(WeakImpl::Finalized);
+ WeakHandleOwner* weakHandleOwner = weakImpl->weakHandleOwner();
+ if (!weakHandleOwner)
+ return;
+ weakHandleOwner->finalize(Handle<Unknown>::wrapSlot(&const_cast<JSValue&>(weakImpl->jsValue())), weakImpl->context());
+}
+
+inline WeakImpl* WeakBlock::weakImpls()
+{
+ return reinterpret_cast<WeakImpl*>(this) + ((sizeof(WeakBlock) + sizeof(WeakImpl) - 1) / sizeof(WeakImpl));
+}
+
+inline size_t WeakBlock::weakImplCount()
+{
+ return (blockSize / sizeof(WeakImpl)) - ((sizeof(WeakBlock) + sizeof(WeakImpl) - 1) / sizeof(WeakImpl));
+}
+
+inline void WeakBlock::addToFreeList(FreeCell** freeList, WeakImpl* weakImpl)
+{
+ ASSERT(weakImpl->state() == WeakImpl::Deallocated);
+ FreeCell* freeCell = asFreeCell(weakImpl);
+ ASSERT(!*freeList || ((char*)*freeList > (char*)this && (char*)*freeList < (char*)this + blockSize));
+ ASSERT((char*)freeCell > (char*)this && (char*)freeCell < (char*)this + blockSize);
+ freeCell->next = *freeList;
+ *freeList = freeCell;
+}
+
+} // namespace JSC
+
+#endif // WeakBlock_h
diff --git a/Source/JavaScriptCore/heap/WeakHandleOwner.cpp b/Source/JavaScriptCore/heap/WeakHandleOwner.cpp
new file mode 100644
index 000000000..67e1774df
--- /dev/null
+++ b/Source/JavaScriptCore/heap/WeakHandleOwner.cpp
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "WeakHandleOwner.h"
+
+namespace JSC {
+
+class SlotVisitor;
+template<typename T> class Handle;
+
+WeakHandleOwner::~WeakHandleOwner()
+{
+}
+
+bool WeakHandleOwner::isReachableFromOpaqueRoots(Handle<Unknown>, void*, SlotVisitor&)
+{
+ return false;
+}
+
+void WeakHandleOwner::finalize(Handle<Unknown>, void*)
+{
+}
+
+} // namespace JSC
diff --git a/Source/JavaScriptCore/heap/WeakHandleOwner.h b/Source/JavaScriptCore/heap/WeakHandleOwner.h
new file mode 100644
index 000000000..6304dd20b
--- /dev/null
+++ b/Source/JavaScriptCore/heap/WeakHandleOwner.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WeakHandleOwner_h
+#define WeakHandleOwner_h
+
+#include "Handle.h"
+
+namespace JSC {
+
+class SlotVisitor;
+
+class JS_EXPORT_PRIVATE WeakHandleOwner {
+public:
+ virtual ~WeakHandleOwner();
+ virtual bool isReachableFromOpaqueRoots(Handle<Unknown>, void* context, SlotVisitor&);
+ virtual void finalize(Handle<Unknown>, void* context);
+};
+
+} // namespace JSC
+
+#endif // WeakHandleOwner_h
diff --git a/Source/JavaScriptCore/heap/WeakImpl.h b/Source/JavaScriptCore/heap/WeakImpl.h
new file mode 100644
index 000000000..9924923f9
--- /dev/null
+++ b/Source/JavaScriptCore/heap/WeakImpl.h
@@ -0,0 +1,115 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WeakImpl_h
+#define WeakImpl_h
+
+#include "JSValue.h"
+
+namespace JSC {
+
+class WeakHandleOwner;
+
+class WeakImpl {
+public:
+ enum State {
+ Live = 0x0,
+ Dead = 0x1,
+ Finalized = 0x2,
+ Deallocated = 0x3
+ };
+
+ enum {
+ StateMask = 0x3
+ };
+
+ WeakImpl();
+ WeakImpl(JSValue, WeakHandleOwner*, void* context);
+
+ State state();
+ void setState(State);
+
+ const JSValue& jsValue();
+ WeakHandleOwner* weakHandleOwner();
+ void* context();
+
+ static WeakImpl* asWeakImpl(JSValue*);
+
+private:
+ const JSValue m_jsValue;
+ WeakHandleOwner* m_weakHandleOwner;
+ void* m_context;
+};
+
+inline WeakImpl::WeakImpl()
+ : m_weakHandleOwner(0)
+ , m_context(0)
+{
+ setState(Deallocated);
+}
+
+inline WeakImpl::WeakImpl(JSValue jsValue, WeakHandleOwner* weakHandleOwner, void* context)
+ : m_jsValue(jsValue)
+ , m_weakHandleOwner(weakHandleOwner)
+ , m_context(context)
+{
+ ASSERT(state() == Live);
+ ASSERT(m_jsValue && m_jsValue.isCell());
+}
+
+inline WeakImpl::State WeakImpl::state()
+{
+ return static_cast<State>(reinterpret_cast<uintptr_t>(m_weakHandleOwner) & StateMask);
+}
+
+inline void WeakImpl::setState(WeakImpl::State state)
+{
+ ASSERT(state >= this->state());
+ m_weakHandleOwner = reinterpret_cast<WeakHandleOwner*>((reinterpret_cast<uintptr_t>(m_weakHandleOwner) & ~StateMask) | state);
+}
+
+inline const JSValue& WeakImpl::jsValue()
+{
+ return m_jsValue;
+}
+
+inline WeakHandleOwner* WeakImpl::weakHandleOwner()
+{
+ return reinterpret_cast<WeakHandleOwner*>((reinterpret_cast<uintptr_t>(m_weakHandleOwner) & ~StateMask));
+}
+
+inline void* WeakImpl::context()
+{
+ return m_context;
+}
+
+inline WeakImpl* WeakImpl::asWeakImpl(JSValue* slot)
+{
+ return reinterpret_cast<WeakImpl*>(reinterpret_cast<char*>(slot) + OBJECT_OFFSETOF(WeakImpl, m_jsValue));
+}
+
+} // namespace JSC
+
+#endif // WeakImpl_h
diff --git a/Source/JavaScriptCore/heap/WeakSet.cpp b/Source/JavaScriptCore/heap/WeakSet.cpp
new file mode 100644
index 000000000..d9c773cef
--- /dev/null
+++ b/Source/JavaScriptCore/heap/WeakSet.cpp
@@ -0,0 +1,132 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "WeakSet.h"
+
+#include "Heap.h"
+
+namespace JSC {
+
+WeakSet::~WeakSet()
+{
+ WeakBlock* next = 0;
+ for (WeakBlock* block = m_blocks.head(); block; block = next) {
+ next = block->next();
+ WeakBlock::destroy(block);
+ }
+ m_blocks.clear();
+}
+
+void WeakSet::finalizeAll()
+{
+ for (WeakBlock* block = m_blocks.head(); block; block = block->next())
+ block->finalizeAll();
+}
+
+void WeakSet::visitLiveWeakImpls(HeapRootVisitor& visitor)
+{
+ for (WeakBlock* block = m_blocks.head(); block; block = block->next())
+ block->visitLiveWeakImpls(visitor);
+}
+
+void WeakSet::visitDeadWeakImpls(HeapRootVisitor& visitor)
+{
+ for (WeakBlock* block = m_blocks.head(); block; block = block->next())
+ block->visitDeadWeakImpls(visitor);
+}
+
+void WeakSet::sweep()
+{
+ WeakBlock* next;
+ for (WeakBlock* block = m_blocks.head(); block; block = next) {
+ next = block->next();
+
+ // If a block is completely empty, a new sweep won't have any effect.
+ if (!block->sweepResult().isNull() && block->sweepResult().blockIsFree)
+ continue;
+
+ block->takeSweepResult(); // Force a new sweep by discarding the last sweep.
+ block->sweep();
+ }
+}
+
+void WeakSet::shrink()
+{
+ WeakBlock* next;
+ for (WeakBlock* block = m_blocks.head(); block; block = next) {
+ next = block->next();
+
+ if (!block->sweepResult().isNull() && block->sweepResult().blockIsFree)
+ removeAllocator(block);
+ }
+}
+
+void WeakSet::resetAllocator()
+{
+ m_allocator = 0;
+ m_nextAllocator = m_blocks.head();
+}
+
+WeakBlock::FreeCell* WeakSet::findAllocator()
+{
+ if (WeakBlock::FreeCell* allocator = tryFindAllocator())
+ return allocator;
+
+ return addAllocator();
+}
+
+WeakBlock::FreeCell* WeakSet::tryFindAllocator()
+{
+ while (m_nextAllocator) {
+ WeakBlock* block = m_nextAllocator;
+ m_nextAllocator = m_nextAllocator->next();
+
+ block->sweep();
+ WeakBlock::SweepResult sweepResult = block->takeSweepResult();
+ if (sweepResult.freeList)
+ return sweepResult.freeList;
+ }
+
+ return 0;
+}
+
+WeakBlock::FreeCell* WeakSet::addAllocator()
+{
+ WeakBlock* block = WeakBlock::create();
+ m_heap->didAllocate(WeakBlock::blockSize);
+ m_blocks.append(block);
+ WeakBlock::SweepResult sweepResult = block->takeSweepResult();
+ ASSERT(!sweepResult.isNull() && sweepResult.freeList);
+ return sweepResult.freeList;
+}
+
+void WeakSet::removeAllocator(WeakBlock* block)
+{
+ m_blocks.remove(block);
+ WeakBlock::destroy(block);
+}
+
+} // namespace JSC
diff --git a/Source/JavaScriptCore/heap/WeakSet.h b/Source/JavaScriptCore/heap/WeakSet.h
new file mode 100644
index 000000000..0a683bd5f
--- /dev/null
+++ b/Source/JavaScriptCore/heap/WeakSet.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WeakSet_h
+#define WeakSet_h
+
+#include "WeakBlock.h"
+
+namespace JSC {
+
+class Heap;
+class WeakImpl;
+
+class WeakSet {
+public:
+ WeakSet(Heap*);
+ void finalizeAll();
+ ~WeakSet();
+
+ static WeakImpl* allocate(JSValue, WeakHandleOwner* = 0, void* context = 0);
+ static void deallocate(WeakImpl*);
+
+ void visitLiveWeakImpls(HeapRootVisitor&);
+ void visitDeadWeakImpls(HeapRootVisitor&);
+
+ void sweep();
+ void resetAllocator();
+
+ void shrink();
+
+private:
+ JS_EXPORT_PRIVATE WeakBlock::FreeCell* findAllocator();
+ WeakBlock::FreeCell* tryFindAllocator();
+ WeakBlock::FreeCell* addAllocator();
+ void removeAllocator(WeakBlock*);
+
+ WeakBlock::FreeCell* m_allocator;
+ WeakBlock* m_nextAllocator;
+ DoublyLinkedList<WeakBlock> m_blocks;
+ Heap* m_heap;
+};
+
+inline WeakSet::WeakSet(Heap* heap)
+ : m_allocator(0)
+ , m_nextAllocator(0)
+ , m_heap(heap)
+{
+}
+
+inline void WeakSet::deallocate(WeakImpl* weakImpl)
+{
+ weakImpl->setState(WeakImpl::Deallocated);
+}
+
+} // namespace JSC
+
+#endif // WeakSet_h
diff --git a/Source/JavaScriptCore/heap/WeakSetInlines.h b/Source/JavaScriptCore/heap/WeakSetInlines.h
new file mode 100644
index 000000000..0515904fc
--- /dev/null
+++ b/Source/JavaScriptCore/heap/WeakSetInlines.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2012 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WeakSetInlines_h
+#define WeakSetInlines_h
+
+#include "WeakSet.h"
+
+namespace JSC {
+
+inline WeakImpl* WeakSet::allocate(JSValue jsValue, WeakHandleOwner* weakHandleOwner, void* context)
+{
+ WeakSet& weakSet = *Heap::heap(jsValue.asCell())->weakSet();
+ WeakBlock::FreeCell* allocator = weakSet.m_allocator;
+ if (UNLIKELY(!allocator))
+ allocator = weakSet.findAllocator();
+ weakSet.m_allocator = allocator->next;
+
+ WeakImpl* weakImpl = WeakBlock::asWeakImpl(allocator);
+ return new (NotNull, weakImpl) WeakImpl(jsValue, weakHandleOwner, context);
+}
+
+} // namespace JSC
+
+#endif // WeakSetInlines_h