summaryrefslogtreecommitdiff
path: root/Source/JavaScriptCore/heap
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@digia.com>2013-09-13 12:51:20 +0200
committerThe Qt Project <gerrit-noreply@qt-project.org>2013-09-19 20:50:05 +0200
commitd441d6f39bb846989d95bcf5caf387b42414718d (patch)
treee367e64a75991c554930278175d403c072de6bb8 /Source/JavaScriptCore/heap
parent0060b2994c07842f4c59de64b5e3e430525c4b90 (diff)
downloadqtwebkit-d441d6f39bb846989d95bcf5caf387b42414718d.tar.gz
Import Qt5x2 branch of QtWebkit for Qt 5.2
Importing a new snapshot of webkit. Change-Id: I2d01ad12cdc8af8cb015387641120a9d7ea5f10c Reviewed-by: Allan Sandfeld Jensen <allan.jensen@digia.com>
Diffstat (limited to 'Source/JavaScriptCore/heap')
-rw-r--r--Source/JavaScriptCore/heap/BlockAllocator.cpp25
-rw-r--r--Source/JavaScriptCore/heap/BlockAllocator.h151
-rw-r--r--Source/JavaScriptCore/heap/CardSet.h91
-rw-r--r--Source/JavaScriptCore/heap/CopiedBlock.h5
-rw-r--r--Source/JavaScriptCore/heap/CopiedSpace.cpp5
-rw-r--r--Source/JavaScriptCore/heap/CopiedSpaceInlines.h4
-rw-r--r--Source/JavaScriptCore/heap/DFGCodeBlocks.cpp12
-rw-r--r--Source/JavaScriptCore/heap/GCAssertions.h9
-rw-r--r--Source/JavaScriptCore/heap/GCThread.cpp4
-rw-r--r--Source/JavaScriptCore/heap/GCThreadSharedData.cpp18
-rw-r--r--Source/JavaScriptCore/heap/GCThreadSharedData.h8
-rw-r--r--Source/JavaScriptCore/heap/Handle.h5
-rw-r--r--Source/JavaScriptCore/heap/HandleBlock.h73
-rw-r--r--Source/JavaScriptCore/heap/HandleBlockInlines.h74
-rw-r--r--Source/JavaScriptCore/heap/HandleSet.cpp36
-rw-r--r--Source/JavaScriptCore/heap/HandleSet.h108
-rw-r--r--Source/JavaScriptCore/heap/HandleStack.cpp2
-rw-r--r--Source/JavaScriptCore/heap/HandleStack.h2
-rw-r--r--Source/JavaScriptCore/heap/HandleTypes.h2
-rw-r--r--Source/JavaScriptCore/heap/Heap.cpp149
-rw-r--r--Source/JavaScriptCore/heap/Heap.h58
-rw-r--r--Source/JavaScriptCore/heap/HeapBlock.h2
-rw-r--r--Source/JavaScriptCore/heap/HeapStatistics.cpp1
-rw-r--r--Source/JavaScriptCore/heap/HeapStatistics.h1
-rw-r--r--Source/JavaScriptCore/heap/HeapTimer.cpp152
-rw-r--r--Source/JavaScriptCore/heap/HeapTimer.h18
-rw-r--r--Source/JavaScriptCore/heap/IncrementalSweeper.cpp24
-rw-r--r--Source/JavaScriptCore/heap/IncrementalSweeper.h4
-rw-r--r--Source/JavaScriptCore/heap/Local.h22
-rw-r--r--Source/JavaScriptCore/heap/LocalScope.h8
-rw-r--r--Source/JavaScriptCore/heap/MachineStackMarker.cpp4
-rw-r--r--Source/JavaScriptCore/heap/MarkedAllocator.cpp4
-rw-r--r--Source/JavaScriptCore/heap/MarkedAllocator.h12
-rw-r--r--Source/JavaScriptCore/heap/MarkedBlock.cpp11
-rw-r--r--Source/JavaScriptCore/heap/MarkedBlock.h135
-rw-r--r--Source/JavaScriptCore/heap/MarkedSpace.cpp31
-rw-r--r--Source/JavaScriptCore/heap/MarkedSpace.h9
-rw-r--r--Source/JavaScriptCore/heap/PassWeak.h74
-rw-r--r--Source/JavaScriptCore/heap/Region.h319
-rw-r--r--Source/JavaScriptCore/heap/SlotVisitor.cpp52
-rw-r--r--Source/JavaScriptCore/heap/SlotVisitor.h3
-rw-r--r--Source/JavaScriptCore/heap/SlotVisitorInlines.h11
-rw-r--r--Source/JavaScriptCore/heap/Strong.h13
-rw-r--r--Source/JavaScriptCore/heap/StrongInlines.h14
-rw-r--r--Source/JavaScriptCore/heap/SuperRegion.cpp82
-rw-r--r--Source/JavaScriptCore/heap/SuperRegion.h58
-rw-r--r--Source/JavaScriptCore/heap/VTableSpectrum.cpp1
-rw-r--r--Source/JavaScriptCore/heap/Weak.cpp41
-rw-r--r--Source/JavaScriptCore/heap/Weak.h190
-rw-r--r--Source/JavaScriptCore/heap/WeakBlock.cpp2
-rw-r--r--Source/JavaScriptCore/heap/WeakBlock.h6
-rw-r--r--Source/JavaScriptCore/heap/WeakImpl.h4
-rw-r--r--Source/JavaScriptCore/heap/WeakInlines.h178
-rw-r--r--Source/JavaScriptCore/heap/WeakSet.cpp2
-rw-r--r--Source/JavaScriptCore/heap/WeakSet.h14
-rw-r--r--Source/JavaScriptCore/heap/WeakSetInlines.h2
56 files changed, 1338 insertions, 1007 deletions
diff --git a/Source/JavaScriptCore/heap/BlockAllocator.cpp b/Source/JavaScriptCore/heap/BlockAllocator.cpp
index f94025c1a..aebee6a4e 100644
--- a/Source/JavaScriptCore/heap/BlockAllocator.cpp
+++ b/Source/JavaScriptCore/heap/BlockAllocator.cpp
@@ -35,16 +35,17 @@
namespace JSC {
BlockAllocator::BlockAllocator()
- : m_copiedRegionSet(CopiedBlock::blockSize)
+ : m_superRegion()
+ , m_copiedRegionSet(CopiedBlock::blockSize)
, m_markedRegionSet(MarkedBlock::blockSize)
- , m_weakAndMarkStackRegionSet(WeakBlock::blockSize)
+ , m_fourKBBlockRegionSet(WeakBlock::blockSize)
, m_workListRegionSet(CopyWorkListSegment::blockSize)
, m_numberOfEmptyRegions(0)
, m_isCurrentlyAllocating(false)
, m_blockFreeingThreadShouldQuit(false)
, m_blockFreeingThread(createThread(blockFreeingThreadStartFunc, this, "JavaScriptCore::BlockFree"))
{
- ASSERT(m_blockFreeingThread);
+ RELEASE_ASSERT(m_blockFreeingThread);
m_regionLock.Init();
}
@@ -57,6 +58,16 @@ BlockAllocator::~BlockAllocator()
m_emptyRegionCondition.broadcast();
}
waitForThreadCompletion(m_blockFreeingThread);
+ ASSERT(allRegionSetsAreEmpty());
+ ASSERT(m_emptyRegions.isEmpty());
+}
+
+bool BlockAllocator::allRegionSetsAreEmpty() const
+{
+ return m_copiedRegionSet.isEmpty()
+ && m_markedRegionSet.isEmpty()
+ && m_fourKBBlockRegionSet.isEmpty()
+ && m_workListRegionSet.isEmpty();
}
void BlockAllocator::releaseFreeRegions()
@@ -69,7 +80,7 @@ void BlockAllocator::releaseFreeRegions()
region = 0;
else {
region = m_emptyRegions.removeHead();
- ASSERT(region);
+ RELEASE_ASSERT(region);
m_numberOfEmptyRegions--;
}
}
@@ -77,7 +88,7 @@ void BlockAllocator::releaseFreeRegions()
if (!region)
break;
- delete region;
+ region->destroy();
}
}
@@ -141,7 +152,7 @@ void BlockAllocator::blockFreeingThreadMain()
region = 0;
else {
region = m_emptyRegions.removeHead();
- ASSERT(region);
+ RELEASE_ASSERT(region);
m_numberOfEmptyRegions--;
}
}
@@ -149,7 +160,7 @@ void BlockAllocator::blockFreeingThreadMain()
if (!region)
break;
- delete region;
+ region->destroy();
}
}
}
diff --git a/Source/JavaScriptCore/heap/BlockAllocator.h b/Source/JavaScriptCore/heap/BlockAllocator.h
index 90210c1fa..afd3259fe 100644
--- a/Source/JavaScriptCore/heap/BlockAllocator.h
+++ b/Source/JavaScriptCore/heap/BlockAllocator.h
@@ -27,6 +27,7 @@
#define BlockAllocator_h
#include "HeapBlock.h"
+#include "Region.h"
#include <wtf/DoublyLinkedList.h>
#include <wtf/Forward.h>
#include <wtf/PageAllocationAligned.h>
@@ -38,123 +39,15 @@ namespace JSC {
class BlockAllocator;
class CopiedBlock;
class CopyWorkListSegment;
+class HandleBlock;
+class VM;
class MarkStackSegment;
class MarkedBlock;
-class Region;
class WeakBlock;
// Simple allocator to reduce VM cost by holding onto blocks of memory for
// short periods of time and then freeing them on a secondary thread.
-class DeadBlock : public HeapBlock<DeadBlock> {
-public:
- DeadBlock(Region*);
-};
-
-inline DeadBlock::DeadBlock(Region* region)
- : HeapBlock<DeadBlock>(region)
-{
-}
-
-class Region : public DoublyLinkedListNode<Region> {
- friend CLASS_IF_GCC DoublyLinkedListNode<Region>;
- friend class BlockAllocator;
-public:
- ~Region();
- static Region* create(size_t blockSize);
- static Region* createCustomSize(size_t blockSize, size_t blockAlignment);
- Region* reset(size_t blockSize);
-
- size_t blockSize() const { return m_blockSize; }
- bool isFull() const { return m_blocksInUse == m_totalBlocks; }
- bool isEmpty() const { return !m_blocksInUse; }
- bool isCustomSize() const { return m_isCustomSize; }
-
- DeadBlock* allocate();
- void deallocate(void*);
-
- static const size_t s_regionSize = 64 * KB;
-
-private:
- Region(PageAllocationAligned&, size_t blockSize, size_t totalBlocks);
-
- PageAllocationAligned m_allocation;
- size_t m_totalBlocks;
- size_t m_blocksInUse;
- size_t m_blockSize;
- bool m_isCustomSize;
- Region* m_prev;
- Region* m_next;
- DoublyLinkedList<DeadBlock> m_deadBlocks;
-};
-
-inline Region* Region::create(size_t blockSize)
-{
- ASSERT(blockSize <= s_regionSize);
- ASSERT(!(s_regionSize % blockSize));
- PageAllocationAligned allocation = PageAllocationAligned::allocate(s_regionSize, s_regionSize, OSAllocator::JSGCHeapPages);
- if (!static_cast<bool>(allocation))
- CRASH();
- return new Region(allocation, blockSize, s_regionSize / blockSize);
-}
-
-inline Region* Region::createCustomSize(size_t blockSize, size_t blockAlignment)
-{
- PageAllocationAligned allocation = PageAllocationAligned::allocate(blockSize, blockAlignment, OSAllocator::JSGCHeapPages);
- if (!static_cast<bool>(allocation))
- CRASH();
- Region* region = new Region(allocation, blockSize, 1);
- region->m_isCustomSize = true;
- return region;
-}
-
-inline Region::Region(PageAllocationAligned& allocation, size_t blockSize, size_t totalBlocks)
- : DoublyLinkedListNode<Region>()
- , m_allocation(allocation)
- , m_totalBlocks(totalBlocks)
- , m_blocksInUse(0)
- , m_blockSize(blockSize)
- , m_isCustomSize(false)
- , m_prev(0)
- , m_next(0)
-{
- ASSERT(allocation);
- char* start = static_cast<char*>(m_allocation.base());
- char* end = start + m_allocation.size();
- for (char* current = start; current < end; current += blockSize)
- m_deadBlocks.append(new (NotNull, current) DeadBlock(this));
-}
-
-inline Region::~Region()
-{
- ASSERT(isEmpty());
- m_allocation.deallocate();
-}
-
-inline Region* Region::reset(size_t blockSize)
-{
- ASSERT(isEmpty());
- PageAllocationAligned allocation = m_allocation;
- return new (NotNull, this) Region(allocation, blockSize, s_regionSize / blockSize);
-}
-
-inline DeadBlock* Region::allocate()
-{
- ASSERT(!isFull());
- m_blocksInUse++;
- return m_deadBlocks.removeHead();
-}
-
-inline void Region::deallocate(void* base)
-{
- ASSERT(base);
- ASSERT(m_blocksInUse);
- ASSERT(base >= m_allocation.base() && base < static_cast<char*>(m_allocation.base()) + m_allocation.size());
- DeadBlock* block = new (NotNull, base) DeadBlock(this);
- m_deadBlocks.push(block);
- m_blocksInUse--;
-}
-
class BlockAllocator {
public:
BlockAllocator();
@@ -178,6 +71,12 @@ private:
, m_blockSize(blockSize)
{
}
+
+ bool isEmpty() const
+ {
+ return m_fullRegions.isEmpty() && m_partialRegions.isEmpty();
+ }
+
DoublyLinkedList<Region> m_fullRegions;
DoublyLinkedList<Region> m_partialRegions;
size_t m_numberOfPartialRegions;
@@ -186,14 +85,16 @@ private:
DeadBlock* tryAllocateFromRegion(RegionSet&, DoublyLinkedList<Region>&, size_t&);
+ bool allRegionSetsAreEmpty() const;
void releaseFreeRegions();
template <typename T> RegionSet& regionSetFor();
+ SuperRegion m_superRegion;
RegionSet m_copiedRegionSet;
RegionSet m_markedRegionSet;
// WeakBlocks and MarkStackSegments use the same RegionSet since they're the same size.
- RegionSet m_weakAndMarkStackRegionSet;
+ RegionSet m_fourKBBlockRegionSet;
RegionSet m_workListRegionSet;
DoublyLinkedList<Region> m_emptyRegions;
@@ -248,7 +149,7 @@ inline DeadBlock* BlockAllocator::allocate()
return block;
}
- Region* newRegion = Region::create(T::blockSize);
+ Region* newRegion = Region::create(&m_superRegion, T::blockSize);
SpinLockHolder locker(&m_regionLock);
m_emptyRegions.push(newRegion);
@@ -261,7 +162,7 @@ inline DeadBlock* BlockAllocator::allocate()
inline DeadBlock* BlockAllocator::allocateCustomSize(size_t blockSize, size_t blockAlignment)
{
size_t realSize = WTF::roundUpToMultipleOf(blockAlignment, blockSize);
- Region* newRegion = Region::createCustomSize(realSize, blockAlignment);
+ Region* newRegion = Region::createCustomSize(&m_superRegion, realSize, blockAlignment);
DeadBlock* block = newRegion->allocate();
ASSERT(block);
return block;
@@ -307,7 +208,7 @@ inline void BlockAllocator::deallocateCustomSize(T* block)
Region* region = block->region();
ASSERT(region->isCustomSize());
region->deallocate(block);
- delete region;
+ region->destroy();
}
template <>
@@ -325,13 +226,13 @@ inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<MarkedBlock>()
template <>
inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<WeakBlock>()
{
- return m_weakAndMarkStackRegionSet;
+ return m_fourKBBlockRegionSet;
}
template <>
inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<MarkStackSegment>()
{
- return m_weakAndMarkStackRegionSet;
+ return m_fourKBBlockRegionSet;
}
template <>
@@ -341,6 +242,12 @@ inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<CopyWorkListSegme
}
template <>
+inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<HandleBlock>()
+{
+ return m_fourKBBlockRegionSet;
+}
+
+template <>
inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<HeapBlock<CopiedBlock> >()
{
return m_copiedRegionSet;
@@ -355,13 +262,13 @@ inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<HeapBlock<MarkedB
template <>
inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<HeapBlock<WeakBlock> >()
{
- return m_weakAndMarkStackRegionSet;
+ return m_fourKBBlockRegionSet;
}
template <>
inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<HeapBlock<MarkStackSegment> >()
{
- return m_weakAndMarkStackRegionSet;
+ return m_fourKBBlockRegionSet;
}
template <>
@@ -370,10 +277,16 @@ inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<HeapBlock<CopyWor
return m_workListRegionSet;
}
+template <>
+inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<HeapBlock<HandleBlock> >()
+{
+ return m_fourKBBlockRegionSet;
+}
+
template <typename T>
inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor()
{
- ASSERT_NOT_REACHED();
+ RELEASE_ASSERT_NOT_REACHED();
return *(RegionSet*)0;
}
diff --git a/Source/JavaScriptCore/heap/CardSet.h b/Source/JavaScriptCore/heap/CardSet.h
deleted file mode 100644
index dc44c024d..000000000
--- a/Source/JavaScriptCore/heap/CardSet.h
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Copyright (C) 2011 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
- * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
- * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
- * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
- * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
- * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
- * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
- * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
- * THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef CardSet_h
-#define CardSet_h
-
-#include <stdint.h>
-#include <wtf/Assertions.h>
-#include <wtf/Noncopyable.h>
-
-namespace JSC {
-
-template <size_t cardSize, size_t blockSize> class CardSet {
- WTF_MAKE_NONCOPYABLE(CardSet);
-
-public:
- static const size_t cardCount = (blockSize + cardSize - 1) / cardSize;
-
- CardSet()
- {
- memset(m_cards, 0, cardCount);
- }
-
- bool isCardMarkedForAtom(const void*);
- void markCardForAtom(const void*);
- uint8_t& cardForAtom(const void*);
- bool isCardMarked(size_t);
- bool testAndClear(size_t);
-
-private:
- uint8_t m_cards[cardCount];
- COMPILE_ASSERT(!(cardSize & (cardSize - 1)), cardSet_cardSize_is_power_of_two);
- COMPILE_ASSERT(!(cardCount & (cardCount - 1)), cardSet_cardCount_is_power_of_two);
-};
-
-template <size_t cardSize, size_t blockSize> uint8_t& CardSet<cardSize, blockSize>::cardForAtom(const void* ptr)
-{
- ASSERT(ptr > this && ptr < (reinterpret_cast<char*>(this) + cardCount * cardSize));
- uintptr_t card = (reinterpret_cast<uintptr_t>(ptr) / cardSize) % cardCount;
- return m_cards[card];
-}
-
-template <size_t cardSize, size_t blockSize> bool CardSet<cardSize, blockSize>::isCardMarkedForAtom(const void* ptr)
-{
- return cardForAtom(ptr);
-}
-
-template <size_t cardSize, size_t blockSize> void CardSet<cardSize, blockSize>::markCardForAtom(const void* ptr)
-{
- cardForAtom(ptr) = 1;
-}
-
-template <size_t cardSize, size_t blockSize> bool CardSet<cardSize, blockSize>::isCardMarked(size_t i)
-{
- ASSERT(i < cardCount);
- return m_cards[i];
-}
-
-template <size_t cardSize, size_t blockSize> bool CardSet<cardSize, blockSize>::testAndClear(size_t i)
-{
- ASSERT(i < cardCount);
- bool result = m_cards[i];
- m_cards[i] = 0;
- return result;
-}
-
-}
-
-#endif
diff --git a/Source/JavaScriptCore/heap/CopiedBlock.h b/Source/JavaScriptCore/heap/CopiedBlock.h
index cc60a0103..86cca8cbe 100644
--- a/Source/JavaScriptCore/heap/CopiedBlock.h
+++ b/Source/JavaScriptCore/heap/CopiedBlock.h
@@ -29,8 +29,7 @@
#include "BlockAllocator.h"
#include "CopyWorkList.h"
#include "HeapBlock.h"
-#include "JSValue.h"
-#include "JSValueInlines.h"
+#include "JSCJSValue.h"
#include "Options.h"
#include <wtf/Atomics.h>
#include <wtf/OwnPtr.h>
@@ -117,7 +116,7 @@ inline void CopiedBlock::zeroFillWilderness()
#else
JSValue emptyValue;
JSValue* limit = reinterpret_cast_ptr<JSValue*>(wildernessEnd());
- for (JSValue* currentValue = reinterpret_cast<JSValue*>(wilderness()); currentValue < limit; currentValue++)
+ for (JSValue* currentValue = reinterpret_cast_ptr<JSValue*>(wilderness()); currentValue < limit; currentValue++)
*currentValue = emptyValue;
#endif
}
diff --git a/Source/JavaScriptCore/heap/CopiedSpace.cpp b/Source/JavaScriptCore/heap/CopiedSpace.cpp
index b235de1dd..b23e87397 100644
--- a/Source/JavaScriptCore/heap/CopiedSpace.cpp
+++ b/Source/JavaScriptCore/heap/CopiedSpace.cpp
@@ -28,6 +28,7 @@
#include "CopiedSpaceInlines.h"
#include "GCActivityCallback.h"
+#include "Operations.h"
#include "Options.h"
namespace JSC {
@@ -68,7 +69,7 @@ CheckedBoolean CopiedSpace::tryAllocateSlowCase(size_t bytes, void** outPtr)
if (isOversize(bytes))
return tryAllocateOversize(bytes, outPtr);
- ASSERT(m_heap->globalData()->apiLock().currentThreadIsHoldingLock());
+ ASSERT(m_heap->vm()->apiLock().currentThreadIsHoldingLock());
m_heap->didAllocate(m_allocator.currentCapacity());
allocateBlock();
@@ -102,7 +103,7 @@ CheckedBoolean CopiedSpace::tryReallocate(void** ptr, size_t oldSize, size_t new
return true;
void* oldPtr = *ptr;
- ASSERT(!m_heap->globalData()->isInitializingObject());
+ ASSERT(!m_heap->vm()->isInitializingObject());
if (CopiedSpace::blockFor(oldPtr)->isOversize() || isOversize(newSize))
return tryReallocateOversize(ptr, oldSize, newSize);
diff --git a/Source/JavaScriptCore/heap/CopiedSpaceInlines.h b/Source/JavaScriptCore/heap/CopiedSpaceInlines.h
index 6087cf4c2..47f2414f3 100644
--- a/Source/JavaScriptCore/heap/CopiedSpaceInlines.h
+++ b/Source/JavaScriptCore/heap/CopiedSpaceInlines.h
@@ -30,7 +30,7 @@
#include "CopiedSpace.h"
#include "Heap.h"
#include "HeapBlock.h"
-#include "JSGlobalData.h"
+#include "VM.h"
#include <wtf/CheckedBoolean.h>
namespace JSC {
@@ -150,7 +150,7 @@ inline void CopiedSpace::allocateBlock()
inline CheckedBoolean CopiedSpace::tryAllocate(size_t bytes, void** outPtr)
{
- ASSERT(!m_heap->globalData()->isInitializingObject());
+ ASSERT(!m_heap->vm()->isInitializingObject());
if (!m_allocator.tryAllocate(bytes, outPtr))
return tryAllocateSlowCase(bytes, outPtr);
diff --git a/Source/JavaScriptCore/heap/DFGCodeBlocks.cpp b/Source/JavaScriptCore/heap/DFGCodeBlocks.cpp
index f0d7c0c89..e3cc75919 100644
--- a/Source/JavaScriptCore/heap/DFGCodeBlocks.cpp
+++ b/Source/JavaScriptCore/heap/DFGCodeBlocks.cpp
@@ -38,14 +38,12 @@ DFGCodeBlocks::DFGCodeBlocks() { }
DFGCodeBlocks::~DFGCodeBlocks()
{
- Vector<CodeBlock*, 16> toRemove;
+ Vector<OwnPtr<CodeBlock>, 16> toRemove;
for (HashSet<CodeBlock*>::iterator iter = m_set.begin(); iter != m_set.end(); ++iter) {
if ((*iter)->m_dfgData->isJettisoned)
- toRemove.append(*iter);
+ toRemove.append(adoptPtr(*iter));
}
-
- WTF::deleteAllValues(toRemove);
}
void DFGCodeBlocks::jettison(PassOwnPtr<CodeBlock> codeBlockPtr)
@@ -75,14 +73,12 @@ void DFGCodeBlocks::clearMarks()
void DFGCodeBlocks::deleteUnmarkedJettisonedCodeBlocks()
{
- Vector<CodeBlock*, 16> toRemove;
+ Vector<OwnPtr<CodeBlock>, 16> toRemove;
for (HashSet<CodeBlock*>::iterator iter = m_set.begin(); iter != m_set.end(); ++iter) {
if ((*iter)->m_dfgData->isJettisoned && !(*iter)->m_dfgData->mayBeExecuting)
- toRemove.append(*iter);
+ toRemove.append(adoptPtr(*iter));
}
-
- WTF::deleteAllValues(toRemove);
}
void DFGCodeBlocks::traceMarkedCodeBlocks(SlotVisitor& visitor)
diff --git a/Source/JavaScriptCore/heap/GCAssertions.h b/Source/JavaScriptCore/heap/GCAssertions.h
index 9feefe1d1..7c7054deb 100644
--- a/Source/JavaScriptCore/heap/GCAssertions.h
+++ b/Source/JavaScriptCore/heap/GCAssertions.h
@@ -30,16 +30,13 @@
#if ENABLE(GC_VALIDATION)
#define ASSERT_GC_OBJECT_LOOKS_VALID(cell) do { \
- if (!(cell))\
- CRASH();\
- if (cell->unvalidatedStructure()->unvalidatedStructure() != cell->unvalidatedStructure()->unvalidatedStructure()->unvalidatedStructure())\
- CRASH();\
+ RELEASE_ASSERT(cell);\
+ RELEASE_ASSERT(cell->unvalidatedStructure()->unvalidatedStructure() == cell->unvalidatedStructure()->unvalidatedStructure()->unvalidatedStructure()); \
} while (0)
#define ASSERT_GC_OBJECT_INHERITS(object, classInfo) do {\
ASSERT_GC_OBJECT_LOOKS_VALID(object); \
- if (!object->inherits(classInfo)) \
- CRASH();\
+ RELEASE_ASSERT(object->inherits(classInfo)); \
} while (0)
#else
diff --git a/Source/JavaScriptCore/heap/GCThread.cpp b/Source/JavaScriptCore/heap/GCThread.cpp
index 7caa7d588..aa868f1b3 100644
--- a/Source/JavaScriptCore/heap/GCThread.cpp
+++ b/Source/JavaScriptCore/heap/GCThread.cpp
@@ -119,10 +119,10 @@ void GCThread::gcThreadMain()
m_copyVisitor->doneCopying();
break;
case NoPhase:
- ASSERT_NOT_REACHED();
+ RELEASE_ASSERT_NOT_REACHED();
break;
case Exit:
- ASSERT_NOT_REACHED();
+ RELEASE_ASSERT_NOT_REACHED();
break;
}
}
diff --git a/Source/JavaScriptCore/heap/GCThreadSharedData.cpp b/Source/JavaScriptCore/heap/GCThreadSharedData.cpp
index 5d2e908f1..b39ab5763 100644
--- a/Source/JavaScriptCore/heap/GCThreadSharedData.cpp
+++ b/Source/JavaScriptCore/heap/GCThreadSharedData.cpp
@@ -29,7 +29,7 @@
#include "CopyVisitor.h"
#include "CopyVisitorInlines.h"
#include "GCThread.h"
-#include "JSGlobalData.h"
+#include "VM.h"
#include "MarkStack.h"
#include "SlotVisitor.h"
#include "SlotVisitorInlines.h"
@@ -52,11 +52,11 @@ size_t GCThreadSharedData::childVisitCount()
}
#endif
-GCThreadSharedData::GCThreadSharedData(JSGlobalData* globalData)
- : m_globalData(globalData)
- , m_copiedSpace(&globalData->heap.m_storageSpace)
- , m_shouldHashConst(false)
- , m_sharedMarkStack(globalData->heap.blockAllocator())
+GCThreadSharedData::GCThreadSharedData(VM* vm)
+ : m_vm(vm)
+ , m_copiedSpace(&vm->heap.m_storageSpace)
+ , m_shouldHashCons(false)
+ , m_sharedMarkStack(vm->heap.blockAllocator())
, m_numberOfActiveParallelMarkers(0)
, m_parallelMarkersShouldExit(false)
, m_copyIndex(0)
@@ -115,9 +115,9 @@ void GCThreadSharedData::reset()
#endif
m_weakReferenceHarvesters.removeAll();
- if (m_shouldHashConst) {
- m_globalData->resetNewStringsSinceLastHashConst();
- m_shouldHashConst = false;
+ if (m_shouldHashCons) {
+ m_vm->resetNewStringsSinceLastHashCons();
+ m_shouldHashCons = false;
}
}
diff --git a/Source/JavaScriptCore/heap/GCThreadSharedData.h b/Source/JavaScriptCore/heap/GCThreadSharedData.h
index dbc11b552..47a53ebd8 100644
--- a/Source/JavaScriptCore/heap/GCThreadSharedData.h
+++ b/Source/JavaScriptCore/heap/GCThreadSharedData.h
@@ -39,7 +39,7 @@
namespace JSC {
class GCThread;
-class JSGlobalData;
+class VM;
class CopiedSpace;
class CopyVisitor;
@@ -52,7 +52,7 @@ enum GCPhase {
class GCThreadSharedData {
public:
- GCThreadSharedData(JSGlobalData*);
+ GCThreadSharedData(VM*);
~GCThreadSharedData();
void reset();
@@ -77,10 +77,10 @@ private:
void startNextPhase(GCPhase);
void endCurrentPhase();
- JSGlobalData* m_globalData;
+ VM* m_vm;
CopiedSpace* m_copiedSpace;
- bool m_shouldHashConst;
+ bool m_shouldHashCons;
Vector<GCThread*> m_gcThreads;
diff --git a/Source/JavaScriptCore/heap/Handle.h b/Source/JavaScriptCore/heap/Handle.h
index 3b62e2054..28ac30cd9 100644
--- a/Source/JavaScriptCore/heap/Handle.h
+++ b/Source/JavaScriptCore/heap/Handle.h
@@ -43,14 +43,11 @@ template <class T> class Handle;
// Creating a JSValue Handle is invalid
template <> class Handle<JSValue>;
-// Forward declare WeakGCMap
-template<typename KeyType, typename MappedType, typename FinalizerCallback, typename HashArg, typename KeyTraitsArg> class WeakGCMap;
-
class HandleBase {
template <typename T> friend class Weak;
+ template <typename T> friend class Strong;
friend class HandleSet;
friend struct JSCallbackObjectData;
- template <typename KeyType, typename MappedType, typename FinalizerCallback, typename HashArg, typename KeyTraitsArg> friend class WeakGCMap;
public:
bool operator!() const { return !m_slot || !*m_slot; }
diff --git a/Source/JavaScriptCore/heap/HandleBlock.h b/Source/JavaScriptCore/heap/HandleBlock.h
new file mode 100644
index 000000000..962d37c5e
--- /dev/null
+++ b/Source/JavaScriptCore/heap/HandleBlock.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2013 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef HandleBlock_h
+#define HandleBlock_h
+
+#include "HeapBlock.h"
+
+namespace JSC {
+
+class DeadBlock;
+class HandleSet;
+class HandleNode;
+
+class HandleBlock : public HeapBlock<HandleBlock> {
+public:
+ static HandleBlock* create(DeadBlock*, HandleSet*);
+ static HandleBlock* blockFor(HandleNode*);
+
+ static const size_t blockSize = 4 * KB;
+
+ HandleSet* handleSet();
+
+ HandleNode* nodes();
+ HandleNode* nodeAtIndex(unsigned);
+ unsigned nodeCapacity();
+
+private:
+ HandleBlock(Region*, HandleSet*);
+
+ char* payload();
+ char* payloadEnd();
+
+ static const size_t s_blockMask = ~(blockSize - 1);
+
+ HandleSet* m_handleSet;
+};
+
+inline HandleBlock* HandleBlock::blockFor(HandleNode* node)
+{
+ return reinterpret_cast<HandleBlock*>(reinterpret_cast<size_t>(node) & s_blockMask);
+}
+
+inline HandleSet* HandleBlock::handleSet()
+{
+ return m_handleSet;
+}
+
+} // namespace JSC
+
+#endif // HandleBlock_h
diff --git a/Source/JavaScriptCore/heap/HandleBlockInlines.h b/Source/JavaScriptCore/heap/HandleBlockInlines.h
new file mode 100644
index 000000000..7c771935e
--- /dev/null
+++ b/Source/JavaScriptCore/heap/HandleBlockInlines.h
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2013 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef HandleBlockInlines_h
+#define HandleBlockInlines_h
+
+#include "BlockAllocator.h"
+#include "HandleBlock.h"
+
+namespace JSC {
+
+inline HandleBlock* HandleBlock::create(DeadBlock* block, HandleSet* handleSet)
+{
+ Region* region = block->region();
+ return new (NotNull, block) HandleBlock(region, handleSet);
+}
+
+inline HandleBlock::HandleBlock(Region* region, HandleSet* handleSet)
+ : HeapBlock<HandleBlock>(region)
+ , m_handleSet(handleSet)
+{
+}
+
+inline char* HandleBlock::payloadEnd()
+{
+ return reinterpret_cast<char*>(this) + region()->blockSize();
+}
+
+inline char* HandleBlock::payload()
+{
+ return reinterpret_cast<char*>(this) + WTF::roundUpToMultipleOf<sizeof(double)>(sizeof(HandleBlock));
+}
+
+inline HandleNode* HandleBlock::nodes()
+{
+ return reinterpret_cast_ptr<HandleNode*>(payload());
+}
+
+inline HandleNode* HandleBlock::nodeAtIndex(unsigned i)
+{
+ ASSERT(i < nodeCapacity());
+ return &nodes()[i];
+}
+
+inline unsigned HandleBlock::nodeCapacity()
+{
+ return (payloadEnd() - payload()) / sizeof(HandleNode);
+}
+
+} // namespace JSC
+
+#endif // HandleBlockInlines_h
diff --git a/Source/JavaScriptCore/heap/HandleSet.cpp b/Source/JavaScriptCore/heap/HandleSet.cpp
index a6ccf29eb..fdb554448 100644
--- a/Source/JavaScriptCore/heap/HandleSet.cpp
+++ b/Source/JavaScriptCore/heap/HandleSet.cpp
@@ -26,24 +26,36 @@
#include "config.h"
#include "HandleSet.h"
+#include "HandleBlock.h"
+#include "HandleBlockInlines.h"
#include "HeapRootVisitor.h"
#include "JSObject.h"
+#include "Operations.h"
+#include <wtf/DataLog.h>
namespace JSC {
-HandleSet::HandleSet(JSGlobalData* globalData)
- : m_globalData(globalData)
+HandleSet::HandleSet(VM* vm)
+ : m_vm(vm)
, m_nextToFinalize(0)
{
grow();
}
+HandleSet::~HandleSet()
+{
+ while (!m_blockList.isEmpty())
+ m_vm->heap.blockAllocator().deallocate(HandleBlock::destroy(m_blockList.removeHead()));
+}
+
void HandleSet::grow()
{
- Node* block = m_blockStack.grow();
- for (int i = m_blockStack.blockLength - 1; i >= 0; --i) {
- Node* node = &block[i];
- new (NotNull, node) Node(this);
+ HandleBlock* newBlock = HandleBlock::create(m_vm->heap.blockAllocator().allocate<HandleBlock>(), this);
+ m_blockList.append(newBlock);
+
+ for (int i = newBlock->nodeCapacity() - 1; i >= 0; --i) {
+ Node* node = newBlock->nodeAtIndex(i);
+ new (NotNull, node) Node;
m_freeList.push(node);
}
}
@@ -53,8 +65,7 @@ void HandleSet::visitStrongHandles(HeapRootVisitor& heapRootVisitor)
Node* end = m_strongList.end();
for (Node* node = m_strongList.begin(); node != end; node = node->next()) {
#if ENABLE(GC_VALIDATION)
- if (!isLiveNode(node))
- CRASH();
+ RELEASE_ASSERT(isLiveNode(node));
#endif
heapRootVisitor.visit(node->slot());
}
@@ -64,16 +75,14 @@ void HandleSet::writeBarrier(HandleSlot slot, const JSValue& value)
{
// Forbid assignment to handles during the finalization phase, since it would violate many GC invariants.
// File a bug with stack trace if you hit this.
- if (m_nextToFinalize)
- CRASH();
+ RELEASE_ASSERT(!m_nextToFinalize);
if (!value == !*slot && slot->isCell() == value.isCell())
return;
Node* node = toNode(slot);
#if ENABLE(GC_VALIDATION)
- if (!isLiveNode(node))
- CRASH();
+ RELEASE_ASSERT(isLiveNode(node));
#endif
SentinelLinkedList<Node>::remove(node);
if (!value || !value.isCell()) {
@@ -83,8 +92,7 @@ void HandleSet::writeBarrier(HandleSlot slot, const JSValue& value)
m_strongList.push(node);
#if ENABLE(GC_VALIDATION)
- if (!isLiveNode(node))
- CRASH();
+ RELEASE_ASSERT(isLiveNode(node));
#endif
}
diff --git a/Source/JavaScriptCore/heap/HandleSet.h b/Source/JavaScriptCore/heap/HandleSet.h
index c22ffa418..58251f66a 100644
--- a/Source/JavaScriptCore/heap/HandleSet.h
+++ b/Source/JavaScriptCore/heap/HandleSet.h
@@ -26,27 +26,51 @@
#ifndef HandleSet_h
#define HandleSet_h
-#include <wtf/BlockStack.h>
#include "Handle.h"
+#include "HandleBlock.h"
+#include <wtf/DoublyLinkedList.h>
#include <wtf/HashCountedSet.h>
#include <wtf/SentinelLinkedList.h>
#include <wtf/SinglyLinkedList.h>
namespace JSC {
+class HandleBlock;
class HandleSet;
class HeapRootVisitor;
-class JSGlobalData;
+class VM;
class JSValue;
class SlotVisitor;
+class HandleNode {
+public:
+ HandleNode(WTF::SentinelTag);
+ HandleNode();
+
+ HandleSlot slot();
+ HandleSet* handleSet();
+
+ void setPrev(HandleNode*);
+ HandleNode* prev();
+
+ void setNext(HandleNode*);
+ HandleNode* next();
+
+private:
+ JSValue m_value;
+ HandleNode* m_prev;
+ HandleNode* m_next;
+};
+
class HandleSet {
+ friend class HandleBlock;
public:
static HandleSet* heapFor(HandleSlot);
- HandleSet(JSGlobalData*);
-
- JSGlobalData* globalData();
+ HandleSet(VM*);
+ ~HandleSet();
+
+ VM* vm();
HandleSlot allocate();
void deallocate(HandleSlot);
@@ -60,27 +84,7 @@ public:
template<typename Functor> void forEachStrongHandle(Functor&, const HashCountedSet<JSCell*>& skipSet);
private:
- class Node {
- public:
- Node(WTF::SentinelTag);
- Node(HandleSet*);
-
- HandleSlot slot();
- HandleSet* handleSet();
-
- void setPrev(Node*);
- Node* prev();
-
- void setNext(Node*);
- Node* next();
-
- private:
- JSValue m_value;
- HandleSet* m_handleSet;
- Node* m_prev;
- Node* m_next;
- };
-
+ typedef HandleNode Node;
static HandleSlot toHandle(Node*);
static Node* toNode(HandleSlot);
@@ -90,8 +94,8 @@ private:
bool isLiveNode(Node*);
#endif
- JSGlobalData* m_globalData;
- BlockStack<Node> m_blockStack;
+ VM* m_vm;
+ DoublyLinkedList<HandleBlock> m_blockList;
SentinelLinkedList<Node> m_strongList;
SentinelLinkedList<Node> m_immediateList;
@@ -104,96 +108,94 @@ inline HandleSet* HandleSet::heapFor(HandleSlot handle)
return toNode(handle)->handleSet();
}
-inline JSGlobalData* HandleSet::globalData()
+inline VM* HandleSet::vm()
{
- return m_globalData;
+ return m_vm;
}
-inline HandleSlot HandleSet::toHandle(Node* node)
+inline HandleSlot HandleSet::toHandle(HandleSet::Node* node)
{
return reinterpret_cast<HandleSlot>(node);
}
inline HandleSet::Node* HandleSet::toNode(HandleSlot handle)
{
- return reinterpret_cast<Node*>(handle);
+ return reinterpret_cast<HandleSet::Node*>(handle);
}
inline HandleSlot HandleSet::allocate()
{
// Forbid assignment to handles during the finalization phase, since it would violate many GC invariants.
// File a bug with stack trace if you hit this.
- if (m_nextToFinalize)
- CRASH();
+ RELEASE_ASSERT(!m_nextToFinalize);
+
if (m_freeList.isEmpty())
grow();
- Node* node = m_freeList.pop();
- new (NotNull, node) Node(this);
+ HandleSet::Node* node = m_freeList.pop();
+ new (NotNull, node) HandleSet::Node();
m_immediateList.push(node);
return toHandle(node);
}
inline void HandleSet::deallocate(HandleSlot handle)
{
- Node* node = toNode(handle);
+ HandleSet::Node* node = toNode(handle);
if (node == m_nextToFinalize) {
ASSERT(m_nextToFinalize->next());
m_nextToFinalize = m_nextToFinalize->next();
}
- SentinelLinkedList<Node>::remove(node);
+ SentinelLinkedList<HandleSet::Node>::remove(node);
m_freeList.push(node);
}
-inline HandleSet::Node::Node(HandleSet* handleSet)
- : m_handleSet(handleSet)
- , m_prev(0)
+inline HandleNode::HandleNode()
+ : m_prev(0)
, m_next(0)
{
}
-inline HandleSet::Node::Node(WTF::SentinelTag)
- : m_handleSet(0)
- , m_prev(0)
+inline HandleNode::HandleNode(WTF::SentinelTag)
+ : m_prev(0)
, m_next(0)
{
}
-inline HandleSlot HandleSet::Node::slot()
+inline HandleSlot HandleNode::slot()
{
return &m_value;
}
-inline HandleSet* HandleSet::Node::handleSet()
+inline HandleSet* HandleNode::handleSet()
{
- return m_handleSet;
+ return HandleBlock::blockFor(this)->handleSet();
}
-inline void HandleSet::Node::setPrev(Node* prev)
+inline void HandleNode::setPrev(HandleNode* prev)
{
m_prev = prev;
}
-inline HandleSet::Node* HandleSet::Node::prev()
+inline HandleNode* HandleNode::prev()
{
return m_prev;
}
-inline void HandleSet::Node::setNext(Node* next)
+inline void HandleNode::setNext(HandleNode* next)
{
m_next = next;
}
-inline HandleSet::Node* HandleSet::Node::next()
+inline HandleNode* HandleNode::next()
{
return m_next;
}
template<typename Functor> void HandleSet::forEachStrongHandle(Functor& functor, const HashCountedSet<JSCell*>& skipSet)
{
- Node* end = m_strongList.end();
- for (Node* node = m_strongList.begin(); node != end; node = node->next()) {
+ HandleSet::Node* end = m_strongList.end();
+ for (HandleSet::Node* node = m_strongList.begin(); node != end; node = node->next()) {
JSValue value = *node->slot();
if (!value || !value.isCell())
continue;
diff --git a/Source/JavaScriptCore/heap/HandleStack.cpp b/Source/JavaScriptCore/heap/HandleStack.cpp
index a5653c748..41b2ada5f 100644
--- a/Source/JavaScriptCore/heap/HandleStack.cpp
+++ b/Source/JavaScriptCore/heap/HandleStack.cpp
@@ -28,7 +28,7 @@
#include "HeapRootVisitor.h"
#include "JSObject.h"
-#include "JSValueInlines.h"
+#include "Operations.h"
namespace JSC {
diff --git a/Source/JavaScriptCore/heap/HandleStack.h b/Source/JavaScriptCore/heap/HandleStack.h
index 858ebf23f..a7ce97650 100644
--- a/Source/JavaScriptCore/heap/HandleStack.h
+++ b/Source/JavaScriptCore/heap/HandleStack.h
@@ -30,8 +30,6 @@
#include <wtf/BlockStack.h>
#include "Handle.h"
-#include <wtf/UnusedParam.h>
-
namespace JSC {
class LocalScope;
diff --git a/Source/JavaScriptCore/heap/HandleTypes.h b/Source/JavaScriptCore/heap/HandleTypes.h
index bdfbcfda3..42a267e57 100644
--- a/Source/JavaScriptCore/heap/HandleTypes.h
+++ b/Source/JavaScriptCore/heap/HandleTypes.h
@@ -26,7 +26,7 @@
#ifndef HandleTypes_h
#define HandleTypes_h
-#include "JSValue.h"
+#include "JSCJSValue.h"
namespace JSC {
diff --git a/Source/JavaScriptCore/heap/Heap.cpp b/Source/JavaScriptCore/heap/Heap.cpp
index 9ff318b08..35a9bf71f 100644
--- a/Source/JavaScriptCore/heap/Heap.cpp
+++ b/Source/JavaScriptCore/heap/Heap.cpp
@@ -31,10 +31,11 @@
#include "HeapStatistics.h"
#include "IncrementalSweeper.h"
#include "Interpreter.h"
-#include "JSGlobalData.h"
+#include "VM.h"
#include "JSGlobalObject.h"
#include "JSLock.h"
#include "JSONObject.h"
+#include "Operations.h"
#include "Tracing.h"
#include "UnlinkedCodeBlock.h"
#include "WeakSetInlines.h"
@@ -163,17 +164,17 @@ static inline size_t proportionalHeapSize(size_t heapSize, size_t ramSize)
return 1.25 * heapSize;
}
-static inline bool isValidSharedInstanceThreadState(JSGlobalData* globalData)
+static inline bool isValidSharedInstanceThreadState(VM* vm)
{
- return globalData->apiLock().currentThreadIsHoldingLock();
+ return vm->apiLock().currentThreadIsHoldingLock();
}
-static inline bool isValidThreadState(JSGlobalData* globalData)
+static inline bool isValidThreadState(VM* vm)
{
- if (globalData->identifierTable != wtfThreadData().currentIdentifierTable())
+ if (vm->identifierTable != wtfThreadData().currentIdentifierTable())
return false;
- if (globalData->isSharedInstance() && !isValidSharedInstanceThreadState(globalData))
+ if (vm->isSharedInstance() && !isValidSharedInstanceThreadState(vm))
return false;
return true;
@@ -240,7 +241,7 @@ inline PassOwnPtr<TypeCountSet> RecordType::returnValue()
} // anonymous namespace
-Heap::Heap(JSGlobalData* globalData, HeapType heapType)
+Heap::Heap(VM* vm, HeapType heapType)
: m_heapType(heapType)
, m_ramSize(ramSize())
, m_minBytesPerCycle(minHeapSize(m_heapType, m_ramSize))
@@ -249,15 +250,16 @@ Heap::Heap(JSGlobalData* globalData, HeapType heapType)
, m_bytesAllocated(0)
, m_bytesAbandoned(0)
, m_operationInProgress(NoOperation)
+ , m_blockAllocator()
, m_objectSpace(this)
, m_storageSpace(this)
, m_machineThreads(this)
- , m_sharedData(globalData)
+ , m_sharedData(vm)
, m_slotVisitor(m_sharedData)
, m_copyVisitor(m_sharedData)
- , m_handleSet(globalData)
+ , m_handleSet(vm)
, m_isSafeToCollect(false)
- , m_globalData(globalData)
+ , m_vm(vm)
, m_lastGCLength(0)
, m_lastCodeDiscardTime(WTF::currentTime())
, m_activityCallback(DefaultGCActivityCallback::create(this))
@@ -275,12 +277,12 @@ bool Heap::isPagedOut(double deadline)
return m_objectSpace.isPagedOut(deadline) || m_storageSpace.isPagedOut(deadline);
}
-// The JSGlobalData is being destroyed and the collector will never run again.
+// The VM is being destroyed and the collector will never run again.
// Run all pending finalizers now because we won't get another chance.
void Heap::lastChanceToFinalize()
{
- ASSERT(!m_globalData->dynamicGlobalObject);
- ASSERT(m_operationInProgress == NoOperation);
+ RELEASE_ASSERT(!m_vm->dynamicGlobalObject);
+ RELEASE_ASSERT(m_operationInProgress == NoOperation);
m_objectSpace.lastChanceToFinalize();
@@ -330,7 +332,7 @@ void Heap::didAbandon(size_t bytes)
void Heap::protect(JSValue k)
{
ASSERT(k);
- ASSERT(m_globalData->apiLock().currentThreadIsHoldingLock());
+ ASSERT(m_vm->apiLock().currentThreadIsHoldingLock());
if (!k.isCell())
return;
@@ -341,7 +343,7 @@ void Heap::protect(JSValue k)
bool Heap::unprotect(JSValue k)
{
ASSERT(k);
- ASSERT(m_globalData->apiLock().currentThreadIsHoldingLock());
+ ASSERT(m_vm->apiLock().currentThreadIsHoldingLock());
if (!k.isCell())
return false;
@@ -361,12 +363,12 @@ void Heap::markProtectedObjects(HeapRootVisitor& heapRootVisitor)
heapRootVisitor.visit(&it->key);
}
-void Heap::pushTempSortVector(Vector<ValueStringPair>* tempVector)
+void Heap::pushTempSortVector(Vector<ValueStringPair, 0, UnsafeVectorOverflow>* tempVector)
{
m_tempSortingVectors.append(tempVector);
}
-void Heap::popTempSortVector(Vector<ValueStringPair>* tempVector)
+void Heap::popTempSortVector(Vector<ValueStringPair, 0, UnsafeVectorOverflow>* tempVector)
{
ASSERT_UNUSED(tempVector, tempVector == m_tempSortingVectors.last());
m_tempSortingVectors.removeLast();
@@ -374,11 +376,11 @@ void Heap::popTempSortVector(Vector<ValueStringPair>* tempVector)
void Heap::markTempSortVectors(HeapRootVisitor& heapRootVisitor)
{
- typedef Vector<Vector<ValueStringPair>* > VectorOfValueStringVectors;
+ typedef Vector<Vector<ValueStringPair, 0, UnsafeVectorOverflow>* > VectorOfValueStringVectors;
VectorOfValueStringVectors::iterator end = m_tempSortingVectors.end();
for (VectorOfValueStringVectors::iterator it = m_tempSortingVectors.begin(); it != end; ++it) {
- Vector<ValueStringPair>* tempSortingVector = *it;
+ Vector<ValueStringPair, 0, UnsafeVectorOverflow>* tempSortingVector = *it;
Vector<ValueStringPair>::iterator vectorEnd = tempSortingVector->end();
for (Vector<ValueStringPair>::iterator vectorIt = tempSortingVector->begin(); vectorIt != vectorEnd; ++vectorIt) {
@@ -400,12 +402,17 @@ void Heap::finalizeUnconditionalFinalizers()
inline JSStack& Heap::stack()
{
- return m_globalData->interpreter->stack();
+ return m_vm->interpreter->stack();
+}
+
+void Heap::canonicalizeCellLivenessData()
+{
+ m_objectSpace.canonicalizeCellLivenessData();
}
void Heap::getConservativeRegisterRoots(HashSet<JSCell*>& roots)
{
- ASSERT(isValidThreadState(m_globalData));
+ ASSERT(isValidThreadState(m_vm));
ConservativeRoots stackRoots(&m_objectSpace.blocks(), &m_storageSpace);
stack().gatherConservativeRoots(stackRoots);
size_t stackRootCount = stackRoots.size();
@@ -416,13 +423,12 @@ void Heap::getConservativeRegisterRoots(HashSet<JSCell*>& roots)
}
}
-void Heap::markRoots(bool fullGC)
+void Heap::markRoots()
{
SamplingRegion samplingRegion("Garbage Collection: Tracing");
- COND_GCPHASE(fullGC, MarkFullRoots, MarkYoungRoots);
- UNUSED_PARAM(fullGC);
- ASSERT(isValidThreadState(m_globalData));
+ GCPHASE(MarkRoots);
+ ASSERT(isValidThreadState(m_vm));
#if ENABLE(OBJECT_MARK_LOGGING)
double gcStartTime = WTF::currentTime();
@@ -451,17 +457,10 @@ void Heap::markRoots(bool fullGC)
ConservativeRoots scratchBufferRoots(&m_objectSpace.blocks(), &m_storageSpace);
{
GCPHASE(GatherScratchBufferRoots);
- m_globalData->gatherConservativeRoots(scratchBufferRoots);
+ m_vm->gatherConservativeRoots(scratchBufferRoots);
}
#endif
-#if ENABLE(GGC)
- MarkedBlock::DirtyCellVector dirtyCells;
- if (!fullGC) {
- GCPHASE(GatheringDirtyCells);
- m_objectSpace.gatherDirtyCells(dirtyCells);
- } else
-#endif
{
GCPHASE(clearMarks);
m_objectSpace.clearMarks();
@@ -474,24 +473,15 @@ void Heap::markRoots(bool fullGC)
{
ParallelModeEnabler enabler(visitor);
-#if ENABLE(GGC)
- {
- size_t dirtyCellCount = dirtyCells.size();
- GCPHASE(VisitDirtyCells);
- GCCOUNTER(DirtyCellCount, dirtyCellCount);
- for (size_t i = 0; i < dirtyCellCount; i++) {
- heapRootVisitor.visitChildren(dirtyCells[i]);
- visitor.donateAndDrain();
- }
- }
-#endif
- if (m_globalData->codeBlocksBeingCompiled.size()) {
+ if (m_vm->codeBlocksBeingCompiled.size()) {
GCPHASE(VisitActiveCodeBlock);
- for (size_t i = 0; i < m_globalData->codeBlocksBeingCompiled.size(); i++)
- m_globalData->codeBlocksBeingCompiled[i]->visitAggregate(visitor);
+ for (size_t i = 0; i < m_vm->codeBlocksBeingCompiled.size(); i++)
+ m_vm->codeBlocksBeingCompiled[i]->visitAggregate(visitor);
}
+ m_vm->smallStrings.visitStrongReferences(visitor);
+
{
GCPHASE(VisitMachineRoots);
MARK_LOG_ROOT(visitor, "C++ Stack");
@@ -533,10 +523,10 @@ void Heap::markRoots(bool fullGC)
visitor.donateAndDrain();
}
}
- if (m_globalData->exception) {
+ if (m_vm->exception) {
GCPHASE(MarkingException);
MARK_LOG_ROOT(visitor, "Exceptions");
- heapRootVisitor.visit(&m_globalData->exception);
+ heapRootVisitor.visit(&m_vm->exception);
visitor.donateAndDrain();
}
@@ -668,7 +658,7 @@ void Heap::deleteAllCompiledCode()
{
// If JavaScript is running, it's not safe to delete code, since we'll end
// up deleting code that is live on the stack.
- if (m_globalData->dynamicGlobalObject)
+ if (m_vm->dynamicGlobalObject)
return;
for (ExecutableBase* current = m_compiledCode.head(); current; current = current->next()) {
@@ -714,12 +704,11 @@ void Heap::collect(SweepToggle sweepToggle)
SamplingRegion samplingRegion("Garbage Collection");
GCPHASE(Collect);
- ASSERT(globalData()->apiLock().currentThreadIsHoldingLock());
- ASSERT(globalData()->identifierTable == wtfThreadData().currentIdentifierTable());
+ ASSERT(vm()->apiLock().currentThreadIsHoldingLock());
+ RELEASE_ASSERT(vm()->identifierTable == wtfThreadData().currentIdentifierTable());
ASSERT(m_isSafeToCollect);
JAVASCRIPTCORE_GC_BEGIN();
- if (m_operationInProgress != NoOperation)
- CRASH();
+ RELEASE_ASSERT(m_operationInProgress == NoOperation);
m_operationInProgress = Collection;
m_activityCallback->willCollect();
@@ -730,19 +719,12 @@ void Heap::collect(SweepToggle sweepToggle)
m_lastCodeDiscardTime = WTF::currentTime();
}
-#if ENABLE(GGC)
- bool fullGC = sweepToggle == DoSweep;
- if (!fullGC)
- fullGC = (capacity() > 4 * m_sizeAfterLastCollect);
-#else
- bool fullGC = true;
-#endif
{
GCPHASE(Canonicalize);
m_objectSpace.canonicalizeCellLivenessData();
}
- markRoots(fullGC);
+ markRoots();
{
GCPHASE(ReapingWeakHandles);
@@ -766,7 +748,7 @@ void Heap::collect(SweepToggle sweepToggle)
{
GCPHASE(finalizeSmallStrings);
- m_globalData->smallStrings.finalizeSmallStrings();
+ m_vm->smallStrings.finalizeSmallStrings();
}
{
@@ -774,6 +756,11 @@ void Heap::collect(SweepToggle sweepToggle)
deleteUnmarkedCompiledCode();
}
+ {
+ GCPHASE(DeleteSourceProviderCaches);
+ m_vm->clearSourceProviderCaches();
+ }
+
if (sweepToggle == DoSweep) {
SamplingRegion samplingRegion("Garbage Collection: Sweeping");
GCPHASE(Sweeping);
@@ -793,23 +780,22 @@ void Heap::collect(SweepToggle sweepToggle)
if (Options::gcMaxHeapSize() && currentHeapSize > Options::gcMaxHeapSize())
HeapStatistics::exitWithFailure();
- if (fullGC) {
- m_sizeAfterLastCollect = currentHeapSize;
+ m_sizeAfterLastCollect = currentHeapSize;
+
+ // To avoid pathological GC churn in very small and very large heaps, we set
+ // the new allocation limit based on the current size of the heap, with a
+ // fixed minimum.
+ size_t maxHeapSize = max(minHeapSize(m_heapType, m_ramSize), proportionalHeapSize(currentHeapSize, m_ramSize));
+ m_bytesAllocatedLimit = maxHeapSize - currentHeapSize;
- // To avoid pathological GC churn in very small and very large heaps, we set
- // the new allocation limit based on the current size of the heap, with a
- // fixed minimum.
- size_t maxHeapSize = max(minHeapSize(m_heapType, m_ramSize), proportionalHeapSize(currentHeapSize, m_ramSize));
- m_bytesAllocatedLimit = maxHeapSize - currentHeapSize;
- }
m_bytesAllocated = 0;
double lastGCEndTime = WTF::currentTime();
m_lastGCLength = lastGCEndTime - lastGCStartTime;
if (Options::recordGCPauseTimes())
HeapStatistics::recordGCPauseTime(lastGCStartTime, lastGCEndTime);
- if (m_operationInProgress != Collection)
- CRASH();
+ RELEASE_ASSERT(m_operationInProgress == Collection);
+
m_operationInProgress = NoOperation;
JAVASCRIPTCORE_GC_END();
@@ -828,19 +814,19 @@ void Heap::markDeadObjects()
m_objectSpace.forEachDeadCell<MarkObject>();
}
-void Heap::setActivityCallback(GCActivityCallback* activityCallback)
+void Heap::setActivityCallback(PassOwnPtr<GCActivityCallback> activityCallback)
{
m_activityCallback = activityCallback;
}
GCActivityCallback* Heap::activityCallback()
{
- return m_activityCallback;
+ return m_activityCallback.get();
}
IncrementalSweeper* Heap::sweeper()
{
- return m_sweeper;
+ return m_sweeper.get();
}
void Heap::setGarbageCollectionTimerEnabled(bool enable)
@@ -856,7 +842,7 @@ void Heap::didAllocate(size_t bytes)
bool Heap::isValidAllocation(size_t)
{
- if (!isValidThreadState(m_globalData))
+ if (!isValidThreadState(m_vm))
return false;
if (m_operationInProgress != NoOperation)
@@ -883,15 +869,6 @@ void Heap::addCompiledCode(ExecutableBase* executable)
m_compiledCode.append(executable);
}
-void Heap::didStartVMShutdown()
-{
- m_activityCallback->didStartVMShutdown();
- m_activityCallback = 0;
- m_sweeper->didStartVMShutdown();
- m_sweeper = 0;
- lastChanceToFinalize();
-}
-
class Zombify : public MarkedBlock::VoidFunctor {
public:
void operator()(JSCell* cell)
diff --git a/Source/JavaScriptCore/heap/Heap.h b/Source/JavaScriptCore/heap/Heap.h
index 2df365643..8266f5fd5 100644
--- a/Source/JavaScriptCore/heap/Heap.h
+++ b/Source/JavaScriptCore/heap/Heap.h
@@ -55,7 +55,7 @@ namespace JSC {
class IncrementalSweeper;
class JITStubRoutine;
class JSCell;
- class JSGlobalData;
+ class VM;
class JSStack;
class JSValue;
class LiveObjectIterator;
@@ -96,16 +96,16 @@ namespace JSC {
static void writeBarrier(const JSCell*, JSCell*);
static uint8_t* addressOfCardFor(JSCell*);
- Heap(JSGlobalData*, HeapType);
+ Heap(VM*, HeapType);
~Heap();
JS_EXPORT_PRIVATE void lastChanceToFinalize();
- JSGlobalData* globalData() const { return m_globalData; }
+ VM* vm() const { return m_vm; }
MarkedSpace& objectSpace() { return m_objectSpace; }
MachineThreads& machineThreads() { return m_machineThreads; }
JS_EXPORT_PRIVATE GCActivityCallback* activityCallback();
- JS_EXPORT_PRIVATE void setActivityCallback(GCActivityCallback*);
+ JS_EXPORT_PRIVATE void setActivityCallback(PassOwnPtr<GCActivityCallback>);
JS_EXPORT_PRIVATE void setGarbageCollectionTimerEnabled(bool);
JS_EXPORT_PRIVATE IncrementalSweeper* sweeper();
@@ -113,7 +113,6 @@ namespace JSC {
// true if an allocation or collection is in progress
inline bool isBusy();
- MarkedAllocator& firstAllocatorWithoutDestructors() { return m_objectSpace.firstAllocator(); }
MarkedAllocator& allocatorForObjectWithoutDestructor(size_t bytes) { return m_objectSpace.allocatorFor(bytes); }
MarkedAllocator& allocatorForObjectWithNormalDestructor(size_t bytes) { return m_objectSpace.normalDestructorAllocatorFor(bytes); }
MarkedAllocator& allocatorForObjectWithImmortalStructureDestructor(size_t bytes) { return m_objectSpace.immortalStructureDestructorAllocatorFor(bytes); }
@@ -151,8 +150,8 @@ namespace JSC {
JS_EXPORT_PRIVATE PassOwnPtr<TypeCountSet> objectTypeCounts();
void showStatistics();
- void pushTempSortVector(Vector<ValueStringPair>*);
- void popTempSortVector(Vector<ValueStringPair>*);
+ void pushTempSortVector(Vector<ValueStringPair, 0, UnsafeVectorOverflow>*);
+ void popTempSortVector(Vector<ValueStringPair, 0, UnsafeVectorOverflow>*);
HashSet<MarkedArgumentBuffer*>& markListSet() { if (!m_markListSet) m_markListSet = adoptPtr(new HashSet<MarkedArgumentBuffer*>); return *m_markListSet; }
@@ -162,6 +161,7 @@ namespace JSC {
HandleSet* handleSet() { return &m_handleSet; }
HandleStack* handleStack() { return &m_handleStack; }
+ void canonicalizeCellLivenessData();
void getConservativeRegisterRoots(HashSet<JSCell*>& roots);
double lastGCLength() { return m_lastGCLength; }
@@ -173,7 +173,6 @@ namespace JSC {
void didAbandon(size_t);
bool isPagedOut(double deadline);
- void didStartVMShutdown();
const JITStubRoutineSet& jitStubRoutines() { return m_jitStubRoutines; }
@@ -181,6 +180,7 @@ namespace JSC {
friend class CodeBlock;
friend class CopiedBlock;
friend class GCAwareJITStubRoutine;
+ friend class HandleSet;
friend class JITStubRoutine;
friend class LLIntOffsetsExtractor;
friend class MarkedSpace;
@@ -189,6 +189,7 @@ namespace JSC {
friend class CopiedSpace;
friend class CopyVisitor;
friend class SlotVisitor;
+ friend class SuperRegion;
friend class IncrementalSweeper;
friend class HeapStatistics;
friend class WeakSet;
@@ -209,7 +210,7 @@ namespace JSC {
JS_EXPORT_PRIVATE bool isValidAllocation(size_t);
JS_EXPORT_PRIVATE void reportExtraMemoryCostSlowCase(size_t);
- void markRoots(bool fullGC);
+ void markRoots();
void markProtectedObjects(HeapRootVisitor&);
void markTempSortVectors(HeapRootVisitor&);
void copyBackingStores();
@@ -241,7 +242,7 @@ namespace JSC {
#endif
ProtectCountSet m_protectedValues;
- Vector<Vector<ValueStringPair>* > m_tempSortingVectors;
+ Vector<Vector<ValueStringPair, 0, UnsafeVectorOverflow>* > m_tempSortingVectors;
OwnPtr<HashSet<MarkedArgumentBuffer*> > m_markListSet;
MachineThreads m_machineThreads;
@@ -258,14 +259,14 @@ namespace JSC {
bool m_isSafeToCollect;
- JSGlobalData* m_globalData;
+ VM* m_vm;
double m_lastGCLength;
double m_lastCodeDiscardTime;
DoublyLinkedList<ExecutableBase> m_compiledCode;
- GCActivityCallback* m_activityCallback;
- IncrementalSweeper* m_sweeper;
+ OwnPtr<GCActivityCallback> m_activityCallback;
+ OwnPtr<IncrementalSweeper> m_sweeper;
Vector<MarkedBlock*> m_blockSnapshot;
};
@@ -286,11 +287,7 @@ namespace JSC {
{
if (Options::gcMaxHeapSize())
return m_bytesAllocated > Options::gcMaxHeapSize() && m_isSafeToCollect && m_operationInProgress == NoOperation;
-#if ENABLE(GGC)
- return m_objectSpace.nurseryWaterMark() >= m_minBytesPerCycle && m_isSafeToCollect && m_operationInProgress == NoOperation;
-#else
return m_bytesAllocated > m_bytesAllocatedLimit && m_isSafeToCollect && m_operationInProgress == NoOperation;
-#endif
}
bool Heap::isBusy()
@@ -332,37 +329,13 @@ namespace JSC {
inline bool Heap::isWriteBarrierEnabled()
{
-#if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
+#if ENABLE(WRITE_BARRIER_PROFILING)
return true;
#else
return false;
#endif
}
-#if ENABLE(GGC)
- inline uint8_t* Heap::addressOfCardFor(JSCell* cell)
- {
- return MarkedBlock::blockFor(cell)->addressOfCardFor(cell);
- }
-
- inline void Heap::writeBarrier(const JSCell* owner, JSCell*)
- {
- WriteBarrierCounters::countWriteBarrier();
- MarkedBlock* block = MarkedBlock::blockFor(owner);
- if (block->isMarked(owner))
- block->setDirtyObject(owner);
- }
-
- inline void Heap::writeBarrier(const JSCell* owner, JSValue value)
- {
- if (!value)
- return;
- if (!value.isCell())
- return;
- writeBarrier(owner, value.asCell());
- }
-#else
-
inline void Heap::writeBarrier(const JSCell*, JSCell*)
{
WriteBarrierCounters::countWriteBarrier();
@@ -372,7 +345,6 @@ namespace JSC {
{
WriteBarrierCounters::countWriteBarrier();
}
-#endif
inline void Heap::reportExtraMemoryCost(size_t cost)
{
diff --git a/Source/JavaScriptCore/heap/HeapBlock.h b/Source/JavaScriptCore/heap/HeapBlock.h
index 677eaacd4..6f2a74c08 100644
--- a/Source/JavaScriptCore/heap/HeapBlock.h
+++ b/Source/JavaScriptCore/heap/HeapBlock.h
@@ -45,7 +45,7 @@ template<typename T>
class HeapBlock : public DoublyLinkedListNode<T> {
friend CLASS_IF_GCC DoublyLinkedListNode<T>;
public:
- static HeapBlock* destroy(HeapBlock* block)
+ static HeapBlock* destroy(HeapBlock* block) WARN_UNUSED_RETURN
{
static_cast<T*>(block)->~T();
return block;
diff --git a/Source/JavaScriptCore/heap/HeapStatistics.cpp b/Source/JavaScriptCore/heap/HeapStatistics.cpp
index 2b98fe711..55e3e9d9d 100644
--- a/Source/JavaScriptCore/heap/HeapStatistics.cpp
+++ b/Source/JavaScriptCore/heap/HeapStatistics.cpp
@@ -28,6 +28,7 @@
#include "Heap.h"
#include "JSObject.h"
+#include "Operations.h"
#include "Options.h"
#include <stdlib.h>
#if OS(UNIX)
diff --git a/Source/JavaScriptCore/heap/HeapStatistics.h b/Source/JavaScriptCore/heap/HeapStatistics.h
index ce7a40a79..13a29efbe 100644
--- a/Source/JavaScriptCore/heap/HeapStatistics.h
+++ b/Source/JavaScriptCore/heap/HeapStatistics.h
@@ -26,6 +26,7 @@
#ifndef HeapStatistics_h
#define HeapStatistics_h
+#include "JSExportMacros.h"
#include <wtf/Deque.h>
namespace JSC {
diff --git a/Source/JavaScriptCore/heap/HeapTimer.cpp b/Source/JavaScriptCore/heap/HeapTimer.cpp
index d69ee9607..a30a28b45 100644
--- a/Source/JavaScriptCore/heap/HeapTimer.cpp
+++ b/Source/JavaScriptCore/heap/HeapTimer.cpp
@@ -38,6 +38,8 @@
#include <QMutexLocker>
#include <QThread>
#include <QTimerEvent>
+#elif PLATFORM(EFL)
+#include <Ecore.h>
#endif
namespace JSC {
@@ -46,70 +48,67 @@ namespace JSC {
const CFTimeInterval HeapTimer::s_decade = 60 * 60 * 24 * 365 * 10;
-HeapTimer::HeapTimer(JSGlobalData* globalData, CFRunLoopRef runLoop)
- : m_globalData(globalData)
- , m_runLoop(runLoop)
+static const void* retainAPILock(const void* info)
{
- memset(&m_context, 0, sizeof(CFRunLoopTimerContext));
- m_context.info = this;
- m_timer.adoptCF(CFRunLoopTimerCreate(0, s_decade, s_decade, 0, 0, HeapTimer::timerDidFire, &m_context));
- CFRunLoopAddTimer(m_runLoop.get(), m_timer.get(), kCFRunLoopCommonModes);
+ static_cast<JSLock*>(const_cast<void*>(info))->ref();
+ return info;
}
-HeapTimer::~HeapTimer()
+static void releaseAPILock(const void* info)
{
- CFRunLoopRemoveTimer(m_runLoop.get(), m_timer.get(), kCFRunLoopCommonModes);
- CFRunLoopTimerInvalidate(m_timer.get());
+ static_cast<JSLock*>(const_cast<void*>(info))->deref();
}
-void HeapTimer::synchronize()
+HeapTimer::HeapTimer(VM* vm, CFRunLoopRef runLoop)
+ : m_vm(vm)
+ , m_runLoop(runLoop)
{
- if (CFRunLoopGetCurrent() == m_runLoop.get())
- return;
- CFRunLoopRemoveTimer(m_runLoop.get(), m_timer.get(), kCFRunLoopCommonModes);
- m_runLoop = CFRunLoopGetCurrent();
+ memset(&m_context, 0, sizeof(CFRunLoopTimerContext));
+ m_context.info = &vm->apiLock();
+ m_context.retain = retainAPILock;
+ m_context.release = releaseAPILock;
+ m_timer = adoptCF(CFRunLoopTimerCreate(0, s_decade, s_decade, 0, 0, HeapTimer::timerDidFire, &m_context));
CFRunLoopAddTimer(m_runLoop.get(), m_timer.get(), kCFRunLoopCommonModes);
}
-void HeapTimer::invalidate()
+HeapTimer::~HeapTimer()
{
- m_globalData = 0;
- CFRunLoopTimerSetNextFireDate(m_timer.get(), CFAbsoluteTimeGetCurrent() - s_decade);
+ CFRunLoopRemoveTimer(m_runLoop.get(), m_timer.get(), kCFRunLoopCommonModes);
+ CFRunLoopTimerInvalidate(m_timer.get());
}
-void HeapTimer::didStartVMShutdown()
+void HeapTimer::timerDidFire(CFRunLoopTimerRef timer, void* context)
{
- if (CFRunLoopGetCurrent() == m_runLoop.get()) {
- invalidate();
- delete this;
- return;
- }
- ASSERT(!m_globalData->apiLock().currentThreadIsHoldingLock());
- MutexLocker locker(m_shutdownMutex);
- invalidate();
-}
+ JSLock* apiLock = static_cast<JSLock*>(context);
+ apiLock->lock();
-void HeapTimer::timerDidFire(CFRunLoopTimerRef, void* info)
-{
- HeapTimer* agent = static_cast<HeapTimer*>(info);
- agent->m_shutdownMutex.lock();
- if (!agent->m_globalData) {
- agent->m_shutdownMutex.unlock();
- delete agent;
+ VM* vm = apiLock->vm();
+ // The VM has been destroyed, so we should just give up.
+ if (!vm) {
+ apiLock->unlock();
return;
}
+
+ HeapTimer* heapTimer = 0;
+ if (vm->heap.activityCallback()->m_timer.get() == timer)
+ heapTimer = vm->heap.activityCallback();
+ else if (vm->heap.sweeper()->m_timer.get() == timer)
+ heapTimer = vm->heap.sweeper();
+ else
+ RELEASE_ASSERT_NOT_REACHED();
+
{
- // We don't ref here to prevent us from resurrecting the ref count of a "dead" JSGlobalData.
- APIEntryShim shim(agent->m_globalData, APIEntryShimWithoutLock::DontRefGlobalData);
- agent->doWork();
+ APIEntryShim shim(vm);
+ heapTimer->doWork();
}
- agent->m_shutdownMutex.unlock();
+
+ apiLock->unlock();
}
#elif PLATFORM(BLACKBERRY)
-HeapTimer::HeapTimer(JSGlobalData* globalData)
- : m_globalData(globalData)
+HeapTimer::HeapTimer(VM* vm)
+ : m_vm(vm)
, m_timer(this, &HeapTimer::timerDidFire)
{
// FIXME: Implement HeapTimer for other threads.
@@ -126,23 +125,14 @@ void HeapTimer::timerDidFire()
doWork();
}
-void HeapTimer::synchronize()
-{
-}
-
void HeapTimer::invalidate()
{
}
-void HeapTimer::didStartVMShutdown()
-{
- delete this;
-}
-
#elif PLATFORM(QT)
-HeapTimer::HeapTimer(JSGlobalData* globalData)
- : m_globalData(globalData)
+HeapTimer::HeapTimer(VM* vm)
+ : m_vm(vm)
, m_newThread(0)
, m_mutex(QMutex::NonRecursive)
{
@@ -153,6 +143,8 @@ HeapTimer::HeapTimer(JSGlobalData* globalData)
HeapTimer::~HeapTimer()
{
+ QMutexLocker lock(&m_mutex);
+ m_timer.stop();
}
void HeapTimer::timerEvent(QTimerEvent*)
@@ -163,7 +155,7 @@ void HeapTimer::timerEvent(QTimerEvent*)
return;
}
- APIEntryShim shim(m_globalData, APIEntryShimWithoutLock::DontRefGlobalData);
+ APIEntryShim shim(m_vm);
doWork();
}
@@ -175,49 +167,51 @@ void HeapTimer::customEvent(QEvent*)
m_newThread = 0;
}
-void HeapTimer::synchronize()
+#elif PLATFORM(EFL)
+
+HeapTimer::HeapTimer(VM* vm)
+ : m_vm(vm)
+ , m_timer(0)
{
- if (thread() != QThread::currentThread()) {
- // We can only move from the objects own thread to another, so we fire an
- // event into the owning thread to trigger the move.
- // This must be processed before any timerEvents so giving it high priority.
- QMutexLocker lock(&m_mutex);
- m_newThread = QThread::currentThread();
- QCoreApplication::postEvent(this, new QEvent(QEvent::User), Qt::HighEventPriority);
- }
}
-void HeapTimer::invalidate()
+HeapTimer::~HeapTimer()
{
- QMutexLocker lock(&m_mutex);
- m_timer.stop();
+ stop();
}
-void HeapTimer::didStartVMShutdown()
+Ecore_Timer* HeapTimer::add(double delay, void* agent)
{
- invalidate();
- if (thread() == QThread::currentThread())
- delete this;
- else
- deleteLater();
+ return ecore_timer_add(delay, reinterpret_cast<Ecore_Task_Cb>(timerEvent), agent);
}
-
-#else
-HeapTimer::HeapTimer(JSGlobalData* globalData)
- : m_globalData(globalData)
+
+void HeapTimer::stop()
{
+ if (!m_timer)
+ return;
+
+ ecore_timer_del(m_timer);
+ m_timer = 0;
}
-HeapTimer::~HeapTimer()
+bool HeapTimer::timerEvent(void* info)
{
+ HeapTimer* agent = static_cast<HeapTimer*>(info);
+
+ APIEntryShim shim(agent->m_vm);
+ agent->doWork();
+ agent->m_timer = 0;
+
+ return ECORE_CALLBACK_CANCEL;
}
-void HeapTimer::didStartVMShutdown()
+#else
+HeapTimer::HeapTimer(VM* vm)
+ : m_vm(vm)
{
- delete this;
}
-void HeapTimer::synchronize()
+HeapTimer::~HeapTimer()
{
}
diff --git a/Source/JavaScriptCore/heap/HeapTimer.h b/Source/JavaScriptCore/heap/HeapTimer.h
index 66d28f228..f7576edd9 100644
--- a/Source/JavaScriptCore/heap/HeapTimer.h
+++ b/Source/JavaScriptCore/heap/HeapTimer.h
@@ -38,11 +38,13 @@
#include <QMutex>
#include <QObject>
#include <QThread>
+#elif PLATFORM(EFL)
+typedef struct _Ecore_Timer Ecore_Timer;
#endif
namespace JSC {
-class JSGlobalData;
+class VM;
#if PLATFORM(QT) && !USE(CF)
class HeapTimer : public QObject {
@@ -51,20 +53,17 @@ class HeapTimer {
#endif
public:
#if USE(CF)
- HeapTimer(JSGlobalData*, CFRunLoopRef);
+ HeapTimer(VM*, CFRunLoopRef);
static void timerDidFire(CFRunLoopTimerRef, void*);
#else
- HeapTimer(JSGlobalData*);
+ HeapTimer(VM*);
#endif
virtual ~HeapTimer();
-
- void didStartVMShutdown();
- virtual void synchronize();
virtual void doWork() = 0;
protected:
- JSGlobalData* m_globalData;
+ VM* m_vm;
#if USE(CF)
static const CFTimeInterval s_decade;
@@ -84,6 +83,11 @@ protected:
QBasicTimer m_timer;
QThread* m_newThread;
QMutex m_mutex;
+#elif PLATFORM(EFL)
+ static bool timerEvent(void*);
+ Ecore_Timer* add(double delay, void* agent);
+ void stop();
+ Ecore_Timer* m_timer;
#endif
private:
diff --git a/Source/JavaScriptCore/heap/IncrementalSweeper.cpp b/Source/JavaScriptCore/heap/IncrementalSweeper.cpp
index 41bc7f5e4..038432a5d 100644
--- a/Source/JavaScriptCore/heap/IncrementalSweeper.cpp
+++ b/Source/JavaScriptCore/heap/IncrementalSweeper.cpp
@@ -46,15 +46,15 @@ static const double sweepTimeMultiplier = 1.0 / sweepTimeTotal;
#if USE(CF)
IncrementalSweeper::IncrementalSweeper(Heap* heap, CFRunLoopRef runLoop)
- : HeapTimer(heap->globalData(), runLoop)
+ : HeapTimer(heap->vm(), runLoop)
, m_currentBlockToSweepIndex(0)
, m_blocksToSweep(heap->m_blockSnapshot)
{
}
-IncrementalSweeper* IncrementalSweeper::create(Heap* heap)
+PassOwnPtr<IncrementalSweeper> IncrementalSweeper::create(Heap* heap)
{
- return new IncrementalSweeper(heap, CFRunLoopGetCurrent());
+ return adoptPtr(new IncrementalSweeper(heap, CFRunLoopGetCurrent()));
}
void IncrementalSweeper::scheduleTimer()
@@ -70,15 +70,15 @@ void IncrementalSweeper::cancelTimer()
#elif PLATFORM(BLACKBERRY) || PLATFORM(QT)
IncrementalSweeper::IncrementalSweeper(Heap* heap)
- : HeapTimer(heap->globalData())
+ : HeapTimer(heap->vm())
, m_currentBlockToSweepIndex(0)
, m_blocksToSweep(heap->m_blockSnapshot)
{
}
-IncrementalSweeper* IncrementalSweeper::create(Heap* heap)
+PassOwnPtr<IncrementalSweeper> IncrementalSweeper::create(Heap* heap)
{
- return new IncrementalSweeper(heap);
+ return adoptPtr(new IncrementalSweeper(heap));
}
void IncrementalSweeper::scheduleTimer()
@@ -128,7 +128,7 @@ void IncrementalSweeper::sweepNextBlock()
continue;
block->sweep();
- m_globalData->heap.objectSpace().freeOrShrinkBlock(block);
+ m_vm->heap.objectSpace().freeOrShrinkBlock(block);
return;
}
}
@@ -144,14 +144,14 @@ void IncrementalSweeper::willFinishSweeping()
{
m_currentBlockToSweepIndex = 0;
m_blocksToSweep.clear();
- if (m_globalData)
+ if (m_vm)
cancelTimer();
}
#else
-IncrementalSweeper::IncrementalSweeper(JSGlobalData* globalData)
- : HeapTimer(globalData)
+IncrementalSweeper::IncrementalSweeper(VM* vm)
+ : HeapTimer(vm)
{
}
@@ -159,9 +159,9 @@ void IncrementalSweeper::doWork()
{
}
-IncrementalSweeper* IncrementalSweeper::create(Heap* heap)
+PassOwnPtr<IncrementalSweeper> IncrementalSweeper::create(Heap* heap)
{
- return new IncrementalSweeper(heap->globalData());
+ return adoptPtr(new IncrementalSweeper(heap->vm()));
}
void IncrementalSweeper::startSweeping(Vector<MarkedBlock*>&)
diff --git a/Source/JavaScriptCore/heap/IncrementalSweeper.h b/Source/JavaScriptCore/heap/IncrementalSweeper.h
index 7b0ae99ab..1c6a3786d 100644
--- a/Source/JavaScriptCore/heap/IncrementalSweeper.h
+++ b/Source/JavaScriptCore/heap/IncrementalSweeper.h
@@ -39,7 +39,7 @@ class Heap;
class IncrementalSweeper : public HeapTimer {
public:
- static IncrementalSweeper* create(Heap*);
+ static PassOwnPtr<IncrementalSweeper> create(Heap*);
void startSweeping(Vector<MarkedBlock*>&);
virtual void doWork();
void sweepNextBlock();
@@ -61,7 +61,7 @@ private:
Vector<MarkedBlock*>& m_blocksToSweep;
#else
- IncrementalSweeper(JSGlobalData*);
+ IncrementalSweeper(VM*);
#endif
};
diff --git a/Source/JavaScriptCore/heap/Local.h b/Source/JavaScriptCore/heap/Local.h
index 5d1f06439..d23435989 100644
--- a/Source/JavaScriptCore/heap/Local.h
+++ b/Source/JavaScriptCore/heap/Local.h
@@ -27,7 +27,7 @@
#define Local_h
#include "Handle.h"
-#include "JSGlobalData.h"
+#include "VM.h"
/*
A strongly referenced handle whose lifetime is temporary, limited to a given
@@ -44,8 +44,8 @@ template <typename T> class Local : public Handle<T> {
public:
typedef typename Handle<T>::ExternalType ExternalType;
- Local(JSGlobalData&, ExternalType = ExternalType());
- Local(JSGlobalData&, Handle<T>);
+ Local(VM&, ExternalType = ExternalType());
+ Local(VM&, Handle<T>);
Local(const Local<T>&); // Adopting constructor. Used to return a Local to a calling function.
Local& operator=(ExternalType);
@@ -56,14 +56,14 @@ private:
void set(ExternalType);
};
-template <typename T> inline Local<T>::Local(JSGlobalData& globalData, ExternalType value)
- : Handle<T>(globalData.heap.handleStack()->push())
+template <typename T> inline Local<T>::Local(VM& vm, ExternalType value)
+ : Handle<T>(vm.heap.handleStack()->push())
{
set(value);
}
-template <typename T> inline Local<T>::Local(JSGlobalData& globalData, Handle<T> other)
- : Handle<T>(globalData.heap.handleStack()->push())
+template <typename T> inline Local<T>::Local(VM& vm, Handle<T> other)
+ : Handle<T>(vm.heap.handleStack()->push())
{
set(other.get());
}
@@ -101,8 +101,8 @@ template <typename T> inline void Local<T>::set(ExternalType externalType)
template <typename T, unsigned inlineCapacity = 0> class LocalStack {
typedef typename Handle<T>::ExternalType ExternalType;
public:
- LocalStack(JSGlobalData& globalData)
- : m_globalData(globalData)
+ LocalStack(VM& vm)
+ : m_vm(vm)
, m_count(0)
{
}
@@ -122,7 +122,7 @@ public:
void push(ExternalType value)
{
if (m_count == m_stack.size())
- m_stack.append(Local<T>(m_globalData, value));
+ m_stack.append(Local<T>(m_vm, value));
else
m_stack[m_count] = value;
m_count++;
@@ -132,7 +132,7 @@ public:
unsigned size() const { return m_count; }
private:
- JSGlobalData& m_globalData;
+ VM& m_vm;
Vector<Local<T>, inlineCapacity> m_stack;
unsigned m_count;
};
diff --git a/Source/JavaScriptCore/heap/LocalScope.h b/Source/JavaScriptCore/heap/LocalScope.h
index cd27b32ef..ab896863c 100644
--- a/Source/JavaScriptCore/heap/LocalScope.h
+++ b/Source/JavaScriptCore/heap/LocalScope.h
@@ -37,11 +37,11 @@ namespace JSC {
LocalScope is similar in concept to NSAutoreleasePool.
*/
-class JSGlobalData;
+class VM;
class LocalScope {
public:
- explicit LocalScope(JSGlobalData&);
+ explicit LocalScope(VM&);
~LocalScope();
template <typename T> Local<T> release(Local<T>); // Destroys all other locals in the scope.
@@ -51,8 +51,8 @@ private:
HandleStack::Frame m_lastFrame;
};
-inline LocalScope::LocalScope(JSGlobalData& globalData)
- : m_handleStack(globalData.heap.handleStack())
+inline LocalScope::LocalScope(VM& vm)
+ : m_handleStack(vm.heap.handleStack())
{
m_handleStack->enterScope(m_lastFrame);
}
diff --git a/Source/JavaScriptCore/heap/MachineStackMarker.cpp b/Source/JavaScriptCore/heap/MachineStackMarker.cpp
index 613edd08c..78fdfa496 100644
--- a/Source/JavaScriptCore/heap/MachineStackMarker.cpp
+++ b/Source/JavaScriptCore/heap/MachineStackMarker.cpp
@@ -25,7 +25,7 @@
#include "ConservativeRoots.h"
#include "Heap.h"
#include "JSArray.h"
-#include "JSGlobalData.h"
+#include "VM.h"
#include <setjmp.h>
#include <stdlib.h>
#include <wtf/StdLibExtras.h>
@@ -188,7 +188,7 @@ void MachineThreads::makeUsableFromMultipleThreads()
void MachineThreads::addCurrentThread()
{
- ASSERT(!m_heap->globalData()->exclusiveThread || m_heap->globalData()->exclusiveThread == currentThread());
+ ASSERT(!m_heap->vm()->exclusiveThread || m_heap->vm()->exclusiveThread == currentThread());
if (!m_threadSpecific || threadSpecificGet(m_threadSpecific))
return;
diff --git a/Source/JavaScriptCore/heap/MarkedAllocator.cpp b/Source/JavaScriptCore/heap/MarkedAllocator.cpp
index 466c9fffe..cbdbfd532 100644
--- a/Source/JavaScriptCore/heap/MarkedAllocator.cpp
+++ b/Source/JavaScriptCore/heap/MarkedAllocator.cpp
@@ -4,7 +4,7 @@
#include "GCActivityCallback.h"
#include "Heap.h"
#include "IncrementalSweeper.h"
-#include "JSGlobalData.h"
+#include "VM.h"
#include <wtf/CurrentTime.h>
namespace JSC {
@@ -70,7 +70,7 @@ inline void* MarkedAllocator::tryAllocate(size_t bytes)
void* MarkedAllocator::allocateSlowCase(size_t bytes)
{
- ASSERT(m_heap->globalData()->apiLock().currentThreadIsHoldingLock());
+ ASSERT(m_heap->vm()->apiLock().currentThreadIsHoldingLock());
#if COLLECT_ON_EVERY_ALLOCATION
m_heap->collectAllGarbage();
ASSERT(m_heap->m_operationInProgress == NoOperation);
diff --git a/Source/JavaScriptCore/heap/MarkedAllocator.h b/Source/JavaScriptCore/heap/MarkedAllocator.h
index 867481fe3..686691433 100644
--- a/Source/JavaScriptCore/heap/MarkedAllocator.h
+++ b/Source/JavaScriptCore/heap/MarkedAllocator.h
@@ -15,10 +15,11 @@ class SpeculativeJIT;
}
class MarkedAllocator {
- friend class JIT;
- friend class DFG::SpeculativeJIT;
+ friend class LLIntOffsetsExtractor;
public:
+ static ptrdiff_t offsetOfFreeListHead();
+
MarkedAllocator();
void reset();
void canonicalizeCellLivenessData();
@@ -36,8 +37,6 @@ public:
bool isPagedOut(double deadline);
private:
- friend class LLIntOffsetsExtractor;
-
JS_EXPORT_PRIVATE void* allocateSlowCase(size_t);
void* tryAllocate(size_t);
void* tryAllocateHelper(size_t);
@@ -53,6 +52,11 @@ private:
MarkedSpace* m_markedSpace;
};
+inline ptrdiff_t MarkedAllocator::offsetOfFreeListHead()
+{
+ return OBJECT_OFFSETOF(MarkedAllocator, m_freeList) + OBJECT_OFFSETOF(MarkedBlock::FreeList, head);
+}
+
inline MarkedAllocator::MarkedAllocator()
: m_currentBlock(0)
, m_blocksToSweep(0)
diff --git a/Source/JavaScriptCore/heap/MarkedBlock.cpp b/Source/JavaScriptCore/heap/MarkedBlock.cpp
index 9a036f87c..0df2e1fb8 100644
--- a/Source/JavaScriptCore/heap/MarkedBlock.cpp
+++ b/Source/JavaScriptCore/heap/MarkedBlock.cpp
@@ -29,12 +29,13 @@
#include "IncrementalSweeper.h"
#include "JSCell.h"
#include "JSDestructibleObject.h"
-
+#include "Operations.h"
namespace JSC {
MarkedBlock* MarkedBlock::create(DeadBlock* block, MarkedAllocator* allocator, size_t cellSize, DestructorType destructorType)
{
+ ASSERT(reinterpret_cast<size_t>(block) == (reinterpret_cast<size_t>(block) & blockMask));
Region* region = block->region();
return new (NotNull, block) MarkedBlock(region, allocator, cellSize, destructorType);
}
@@ -46,7 +47,7 @@ MarkedBlock::MarkedBlock(Region* region, MarkedAllocator* allocator, size_t cell
, m_destructorType(destructorType)
, m_allocator(allocator)
, m_state(New) // All cells start out unmarked.
- , m_weakSet(allocator->heap()->globalData())
+ , m_weakSet(allocator->heap()->vm())
{
ASSERT(allocator);
HEAP_LOG_BLOCK_STATE_TRANSITION(this);
@@ -62,7 +63,7 @@ inline void MarkedBlock::callDestructor(JSCell* cell)
m_heap->m_destroyedTypeCounts.countVPtr(vptr);
#endif
- cell->methodTable()->destroy(cell);
+ cell->methodTableForDestruction()->destroy(cell);
cell->zap();
}
@@ -131,7 +132,7 @@ MarkedBlock::FreeList MarkedBlock::sweepHelper(SweepMode sweepMode)
ASSERT(sweepMode == SweepToFreeList);
return FreeList();
case Allocated:
- ASSERT_NOT_REACHED();
+ RELEASE_ASSERT_NOT_REACHED();
return FreeList();
case Marked:
return sweepMode == SweepToFreeList
@@ -139,7 +140,7 @@ MarkedBlock::FreeList MarkedBlock::sweepHelper(SweepMode sweepMode)
: specializedSweep<Marked, SweepOnly, dtorType>();
}
- ASSERT_NOT_REACHED();
+ RELEASE_ASSERT_NOT_REACHED();
return FreeList();
}
diff --git a/Source/JavaScriptCore/heap/MarkedBlock.h b/Source/JavaScriptCore/heap/MarkedBlock.h
index 9080aaef4..fcc3016d9 100644
--- a/Source/JavaScriptCore/heap/MarkedBlock.h
+++ b/Source/JavaScriptCore/heap/MarkedBlock.h
@@ -23,7 +23,6 @@
#define MarkedBlock_h
#include "BlockAllocator.h"
-#include "CardSet.h"
#include "HeapBlock.h"
#include "WeakSet.h"
@@ -71,19 +70,12 @@ namespace JSC {
class MarkedBlock : public HeapBlock<MarkedBlock> {
public:
- // Ensure natural alignment for native types whilst recognizing that the smallest
- // object the heap will commonly allocate is four words.
- static const size_t atomSize = 4 * sizeof(void*);
- static const size_t atomShift = 5;
+ static const size_t atomSize = 8; // bytes
static const size_t blockSize = 64 * KB;
static const size_t blockMask = ~(blockSize - 1); // blockSize must be a power of two.
- static const size_t atomsPerBlock = blockSize / atomSize; // ~0.4% overhead
+ static const size_t atomsPerBlock = blockSize / atomSize;
static const size_t atomMask = atomsPerBlock - 1;
- static const int cardShift = 8; // This is log2 of bytes per card.
- static const size_t bytesPerCard = 1 << cardShift;
- static const int cardCount = blockSize / bytesPerCard;
- static const int cardMask = cardCount - 1;
struct FreeCell {
FreeCell* next;
@@ -125,7 +117,7 @@ namespace JSC {
MarkedAllocator* allocator() const;
Heap* heap() const;
- JSGlobalData* globalData() const;
+ VM* vm() const;
WeakSet& weakSet();
enum SweepMode { SweepOnly, SweepToFreeList };
@@ -165,34 +157,6 @@ namespace JSC {
bool needsSweeping();
-#if ENABLE(GGC)
- void setDirtyObject(const void* atom)
- {
- ASSERT(MarkedBlock::blockFor(atom) == this);
- m_cards.markCardForAtom(atom);
- }
-
- uint8_t* addressOfCardFor(const void* atom)
- {
- ASSERT(MarkedBlock::blockFor(atom) == this);
- return &m_cards.cardForAtom(atom);
- }
-
- static inline size_t offsetOfCards()
- {
- return OBJECT_OFFSETOF(MarkedBlock, m_cards);
- }
-
- static inline size_t offsetOfMarks()
- {
- return OBJECT_OFFSETOF(MarkedBlock, m_marks);
- }
-
- typedef Vector<JSCell*, 32> DirtyCellVector;
- inline void gatherDirtyCells(DirtyCellVector&);
- template <int size> inline void gatherDirtyCellsWithSize(DirtyCellVector&);
-#endif
-
template <typename Functor> void forEachCell(Functor&);
template <typename Functor> void forEachLiveCell(Functor&);
template <typename Functor> void forEachDeadCell(Functor&);
@@ -211,10 +175,6 @@ namespace JSC {
void callDestructor(JSCell*);
template<BlockState, SweepMode, DestructorType> FreeList specializedSweep();
-#if ENABLE(GGC)
- CardSet<bytesPerCard, blockSize> m_cards;
-#endif
-
size_t m_atomsPerCell;
size_t m_endAtom; // This is a fuzzy end. Always test for < m_endAtom.
#if ENABLE(PARALLEL_GC)
@@ -280,9 +240,9 @@ namespace JSC {
return m_weakSet.heap();
}
- inline JSGlobalData* MarkedBlock::globalData() const
+ inline VM* MarkedBlock::vm() const
{
- return m_weakSet.globalData();
+ return m_weakSet.vm();
}
inline WeakSet& MarkedBlock::weakSet()
@@ -408,11 +368,11 @@ namespace JSC {
case New:
case FreeListed:
- ASSERT_NOT_REACHED();
+ RELEASE_ASSERT_NOT_REACHED();
return false;
}
- ASSERT_NOT_REACHED();
+ RELEASE_ASSERT_NOT_REACHED();
return false;
}
@@ -466,87 +426,6 @@ namespace JSC {
return m_state == Marked;
}
-#if ENABLE(GGC)
-template <int _cellSize> void MarkedBlock::gatherDirtyCellsWithSize(DirtyCellVector& dirtyCells)
-{
- if (m_cards.testAndClear(0)) {
- char* ptr = reinterpret_cast<char*>(&atoms()[firstAtom()]);
- const char* end = reinterpret_cast<char*>(this) + bytesPerCard;
- while (ptr < end) {
- JSCell* cell = reinterpret_cast<JSCell*>(ptr);
- if (isMarked(cell))
- dirtyCells.append(cell);
- ptr += _cellSize;
- }
- }
-
- const size_t cellOffset = firstAtom() * atomSize % _cellSize;
- for (size_t i = 1; i < m_cards.cardCount; i++) {
- if (!m_cards.testAndClear(i))
- continue;
- char* ptr = reinterpret_cast<char*>(this) + i * bytesPerCard + cellOffset;
- char* end = reinterpret_cast<char*>(this) + (i + 1) * bytesPerCard;
-
- while (ptr < end) {
- JSCell* cell = reinterpret_cast<JSCell*>(ptr);
- if (isMarked(cell))
- dirtyCells.append(cell);
- ptr += _cellSize;
- }
- }
-}
-
-void MarkedBlock::gatherDirtyCells(DirtyCellVector& dirtyCells)
-{
- COMPILE_ASSERT((int)m_cards.cardCount == (int)cardCount, MarkedBlockCardCountsMatch);
-
- ASSERT(m_state != New && m_state != FreeListed);
-
- // This is an optimisation to avoid having to walk the set of marked
- // blocks twice during GC.
- m_state = Marked;
-
- if (isEmpty())
- return;
-
- size_t cellSize = this->cellSize();
- if (cellSize == 32) {
- gatherDirtyCellsWithSize<32>(dirtyCells);
- return;
- }
- if (cellSize == 64) {
- gatherDirtyCellsWithSize<64>(dirtyCells);
- return;
- }
-
- const size_t firstCellOffset = firstAtom() * atomSize % cellSize;
-
- if (m_cards.testAndClear(0)) {
- char* ptr = reinterpret_cast<char*>(this) + firstAtom() * atomSize;
- char* end = reinterpret_cast<char*>(this) + bytesPerCard;
- while (ptr < end) {
- JSCell* cell = reinterpret_cast<JSCell*>(ptr);
- if (isMarked(cell))
- dirtyCells.append(cell);
- ptr += cellSize;
- }
- }
- for (size_t i = 1; i < m_cards.cardCount; i++) {
- if (!m_cards.testAndClear(i))
- continue;
- char* ptr = reinterpret_cast<char*>(this) + firstCellOffset + cellSize * ((i * bytesPerCard + cellSize - 1 - firstCellOffset) / cellSize);
- char* end = reinterpret_cast<char*>(this) + std::min((i + 1) * bytesPerCard, m_endAtom * atomSize);
-
- while (ptr < end) {
- JSCell* cell = reinterpret_cast<JSCell*>(ptr);
- if (isMarked(cell))
- dirtyCells.append(cell);
- ptr += cellSize;
- }
- }
-}
-#endif
-
} // namespace JSC
namespace WTF {
diff --git a/Source/JavaScriptCore/heap/MarkedSpace.cpp b/Source/JavaScriptCore/heap/MarkedSpace.cpp
index 50634dd23..2bef60843 100644
--- a/Source/JavaScriptCore/heap/MarkedSpace.cpp
+++ b/Source/JavaScriptCore/heap/MarkedSpace.cpp
@@ -223,35 +223,4 @@ void MarkedSpace::shrink()
forEachBlock(freeOrShrink);
}
-#if ENABLE(GGC)
-class GatherDirtyCells {
- WTF_MAKE_NONCOPYABLE(GatherDirtyCells);
-public:
- typedef void* ReturnType;
-
- explicit GatherDirtyCells(MarkedBlock::DirtyCellVector*);
- void operator()(MarkedBlock*);
- ReturnType returnValue() { return 0; }
-
-private:
- MarkedBlock::DirtyCellVector* m_dirtyCells;
-};
-
-inline GatherDirtyCells::GatherDirtyCells(MarkedBlock::DirtyCellVector* dirtyCells)
- : m_dirtyCells(dirtyCells)
-{
-}
-
-inline void GatherDirtyCells::operator()(MarkedBlock* block)
-{
- block->gatherDirtyCells(*m_dirtyCells);
-}
-
-void MarkedSpace::gatherDirtyCells(MarkedBlock::DirtyCellVector& dirtyCells)
-{
- GatherDirtyCells gatherDirtyCells(&dirtyCells);
- forEachBlock(gatherDirtyCells);
-}
-#endif
-
} // namespace JSC
diff --git a/Source/JavaScriptCore/heap/MarkedSpace.h b/Source/JavaScriptCore/heap/MarkedSpace.h
index 214536ad7..278f1cc98 100644
--- a/Source/JavaScriptCore/heap/MarkedSpace.h
+++ b/Source/JavaScriptCore/heap/MarkedSpace.h
@@ -114,9 +114,9 @@ public:
private:
friend class LLIntOffsetsExtractor;
- // [ 32... 512 ]
+ // [ 32... 128 ]
static const size_t preciseStep = MarkedBlock::atomSize;
- static const size_t preciseCutoff = 512;
+ static const size_t preciseCutoff = 128;
static const size_t preciseCount = preciseCutoff / preciseStep;
// [ 1024... blockSize ]
@@ -170,11 +170,6 @@ template<typename Functor> inline typename Functor::ReturnType MarkedSpace::forE
return forEachDeadCell(functor);
}
-inline MarkedAllocator& MarkedSpace::firstAllocator()
-{
- return m_normalSpace.preciseAllocators[0];
-}
-
inline MarkedAllocator& MarkedSpace::allocatorFor(size_t bytes)
{
ASSERT(bytes);
diff --git a/Source/JavaScriptCore/heap/PassWeak.h b/Source/JavaScriptCore/heap/PassWeak.h
index acd6e52c7..506a63970 100644
--- a/Source/JavaScriptCore/heap/PassWeak.h
+++ b/Source/JavaScriptCore/heap/PassWeak.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2012 Apple Inc. All rights reserved.
+ * Copyright (C) 2012, 2013 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -34,29 +34,14 @@
namespace JSC {
-template<typename T> class Weak;
template<typename T> class PassWeak;
template<typename T> PassWeak<T> adoptWeak(WeakImpl*);
-template<typename Base, typename T> class WeakImplAccessor {
+template<typename T> class PassWeak {
public:
- typedef T* GetType;
-
- T* operator->() const;
- T& operator*() const;
- GetType get() const;
-
- bool was(GetType) const;
-};
-
-template<typename T> class PassWeak : public WeakImplAccessor<PassWeak<T>, T> {
-public:
- friend class WeakImplAccessor<PassWeak<T>, T>;
- typedef typename WeakImplAccessor<PassWeak<T>, T>::GetType GetType;
-
PassWeak();
PassWeak(std::nullptr_t);
- PassWeak(GetType, WeakHandleOwner* = 0, void* context = 0);
+ PassWeak(T*, WeakHandleOwner* = 0, void* context = 0);
// It somewhat breaks the type system to allow transfer of ownership out of
// a const PassWeak. However, it makes it much easier to work with PassWeak
@@ -66,10 +51,14 @@ public:
~PassWeak();
+ T* operator->() const;
+ T& operator*() const;
+ T* get() const;
+
bool operator!() const;
// This conversion operator allows implicit conversion to bool but not to other integer types.
- typedef JSValue (PassWeak::*UnspecifiedBoolType);
+ typedef void* (PassWeak::*UnspecifiedBoolType);
operator UnspecifiedBoolType*() const;
WeakImpl* leakImpl() const WARN_UNUSED_RETURN;
@@ -81,30 +70,6 @@ private:
WeakImpl* m_impl;
};
-template<typename Base, typename T> inline T* WeakImplAccessor<Base, T>::operator->() const
-{
- ASSERT(static_cast<const Base*>(this)->m_impl && static_cast<const Base*>(this)->m_impl->state() == WeakImpl::Live);
- return jsCast<T*>(static_cast<const Base*>(this)->m_impl->jsValue().asCell());
-}
-
-template<typename Base, typename T> inline T& WeakImplAccessor<Base, T>::operator*() const
-{
- ASSERT(static_cast<const Base*>(this)->m_impl && static_cast<const Base*>(this)->m_impl->state() == WeakImpl::Live);
- return *jsCast<T*>(static_cast<const Base*>(this)->m_impl->jsValue().asCell());
-}
-
-template<typename Base, typename T> inline typename WeakImplAccessor<Base, T>::GetType WeakImplAccessor<Base, T>::get() const
-{
- if (!static_cast<const Base*>(this)->m_impl || static_cast<const Base*>(this)->m_impl->state() != WeakImpl::Live)
- return GetType();
- return jsCast<T*>(static_cast<const Base*>(this)->m_impl->jsValue().asCell());
-}
-
-template<typename Base, typename T> inline bool WeakImplAccessor<Base, T>::was(typename WeakImplAccessor<Base, T>::GetType other) const
-{
- return jsCast<T*>(static_cast<const Base*>(this)->m_impl->jsValue().asCell()) == other;
-}
-
template<typename T> inline PassWeak<T>::PassWeak()
: m_impl(0)
{
@@ -115,8 +80,8 @@ template<typename T> inline PassWeak<T>::PassWeak(std::nullptr_t)
{
}
-template<typename T> inline PassWeak<T>::PassWeak(typename PassWeak<T>::GetType getType, WeakHandleOwner* weakOwner, void* context)
- : m_impl(getType ? WeakSet::allocate(getType, weakOwner, context) : 0)
+template<typename T> inline PassWeak<T>::PassWeak(T* cell, WeakHandleOwner* weakOwner, void* context)
+ : m_impl(cell ? WeakSet::allocate(cell, weakOwner, context) : 0)
{
}
@@ -137,6 +102,25 @@ template<typename T> inline PassWeak<T>::~PassWeak()
WeakSet::deallocate(m_impl);
}
+template<typename T> inline T* PassWeak<T>::operator->() const
+{
+ ASSERT(m_impl && m_impl->state() == WeakImpl::Live);
+ return jsCast<T*>(m_impl->jsValue().asCell());
+}
+
+template<typename T> inline T& PassWeak<T>::operator*() const
+{
+ ASSERT(m_impl && m_impl->state() == WeakImpl::Live);
+ return *jsCast<T*>(m_impl->jsValue().asCell());
+}
+
+template<typename T> inline T* PassWeak<T>::get() const
+{
+ if (!m_impl || m_impl->state() != WeakImpl::Live)
+ return 0;
+ return jsCast<T*>(m_impl->jsValue().asCell());
+}
+
template<typename T> inline bool PassWeak<T>::operator!() const
{
return !m_impl || m_impl->state() != WeakImpl::Live || !m_impl->jsValue();
diff --git a/Source/JavaScriptCore/heap/Region.h b/Source/JavaScriptCore/heap/Region.h
new file mode 100644
index 000000000..366f25643
--- /dev/null
+++ b/Source/JavaScriptCore/heap/Region.h
@@ -0,0 +1,319 @@
+/*
+ * Copyright (C) 2013 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef JSC_Region_h
+#define JSC_Region_h
+
+#include "HeapBlock.h"
+#include "SuperRegion.h"
+#include <wtf/DoublyLinkedList.h>
+#include <wtf/MetaAllocatorHandle.h>
+#include <wtf/PageAllocationAligned.h>
+
+#define HEAP_MEMORY_ID reinterpret_cast<void*>(static_cast<intptr_t>(-3))
+
+#define ENABLE_SUPER_REGION 0
+
+#ifndef ENABLE_SUPER_REGION
+#if USE(JSVALUE64)
+#define ENABLE_SUPER_REGION 1
+#else
+#define ENABLE_SUPER_REGION 0
+#endif
+#endif
+
+namespace JSC {
+
+class DeadBlock : public HeapBlock<DeadBlock> {
+public:
+ DeadBlock(Region*);
+};
+
+inline DeadBlock::DeadBlock(Region* region)
+ : HeapBlock<DeadBlock>(region)
+{
+}
+
+class Region : public DoublyLinkedListNode<Region> {
+ friend CLASS_IF_GCC DoublyLinkedListNode<Region>;
+ friend class BlockAllocator;
+public:
+ ~Region();
+ static Region* create(SuperRegion*, size_t blockSize);
+ static Region* createCustomSize(SuperRegion*, size_t blockSize, size_t blockAlignment);
+ Region* reset(size_t blockSize);
+ void destroy();
+
+ size_t blockSize() const { return m_blockSize; }
+ bool isFull() const { return m_blocksInUse == m_totalBlocks; }
+ bool isEmpty() const { return !m_blocksInUse; }
+ bool isCustomSize() const { return m_isCustomSize; }
+
+ DeadBlock* allocate();
+ void deallocate(void*);
+
+ static const size_t s_regionSize = 64 * KB;
+ static const size_t s_regionMask = ~(s_regionSize - 1);
+
+protected:
+ Region(size_t blockSize, size_t totalBlocks, bool isExcess);
+ void initializeBlockList();
+
+ bool m_isExcess;
+
+private:
+ void* base();
+ size_t size();
+
+ size_t m_totalBlocks;
+ size_t m_blocksInUse;
+ size_t m_blockSize;
+ bool m_isCustomSize;
+ Region* m_prev;
+ Region* m_next;
+ DoublyLinkedList<DeadBlock> m_deadBlocks;
+};
+
+
+class NormalRegion : public Region {
+ friend class Region;
+private:
+ NormalRegion(PassRefPtr<WTF::MetaAllocatorHandle>, size_t blockSize, size_t totalBlocks);
+
+ static NormalRegion* tryCreate(SuperRegion*, size_t blockSize);
+ static NormalRegion* tryCreateCustomSize(SuperRegion*, size_t blockSize, size_t blockAlignment);
+
+ void* base() { return m_allocation->start(); }
+ size_t size() { return m_allocation->sizeInBytes(); }
+
+ NormalRegion* reset(size_t blockSize);
+
+ RefPtr<WTF::MetaAllocatorHandle> m_allocation;
+};
+
+class ExcessRegion : public Region {
+ friend class Region;
+private:
+ ExcessRegion(PageAllocationAligned&, size_t blockSize, size_t totalBlocks);
+
+ ~ExcessRegion();
+
+ static ExcessRegion* create(size_t blockSize);
+ static ExcessRegion* createCustomSize(size_t blockSize, size_t blockAlignment);
+
+ void* base() { return m_allocation.base(); }
+ size_t size() { return m_allocation.size(); }
+
+ ExcessRegion* reset(size_t blockSize);
+
+ PageAllocationAligned m_allocation;
+};
+
+inline NormalRegion::NormalRegion(PassRefPtr<WTF::MetaAllocatorHandle> allocation, size_t blockSize, size_t totalBlocks)
+ : Region(blockSize, totalBlocks, false)
+ , m_allocation(allocation)
+{
+ initializeBlockList();
+}
+
+inline NormalRegion* NormalRegion::tryCreate(SuperRegion* superRegion, size_t blockSize)
+{
+ RefPtr<WTF::MetaAllocatorHandle> allocation = superRegion->allocate(s_regionSize, HEAP_MEMORY_ID);
+ if (!allocation)
+ return 0;
+ return new NormalRegion(allocation, blockSize, s_regionSize / blockSize);
+}
+
+inline NormalRegion* NormalRegion::tryCreateCustomSize(SuperRegion* superRegion, size_t blockSize, size_t blockAlignment)
+{
+ ASSERT_UNUSED(blockAlignment, blockAlignment <= s_regionSize);
+ RefPtr<WTF::MetaAllocatorHandle> allocation = superRegion->allocate(blockSize, HEAP_MEMORY_ID);
+ if (!allocation)
+ return 0;
+ return new NormalRegion(allocation, blockSize, 1);
+}
+
+inline NormalRegion* NormalRegion::reset(size_t blockSize)
+{
+ ASSERT(!m_isExcess);
+ RefPtr<WTF::MetaAllocatorHandle> allocation = m_allocation.release();
+ return new (NotNull, this) NormalRegion(allocation.release(), blockSize, s_regionSize / blockSize);
+}
+
+inline ExcessRegion::ExcessRegion(PageAllocationAligned& allocation, size_t blockSize, size_t totalBlocks)
+ : Region(blockSize, totalBlocks, true)
+ , m_allocation(allocation)
+{
+ initializeBlockList();
+}
+
+inline ExcessRegion::~ExcessRegion()
+{
+ m_allocation.deallocate();
+}
+
+inline ExcessRegion* ExcessRegion::create(size_t blockSize)
+{
+ PageAllocationAligned allocation = PageAllocationAligned::allocate(s_regionSize, s_regionSize, OSAllocator::JSGCHeapPages);
+ ASSERT(static_cast<bool>(allocation));
+ return new ExcessRegion(allocation, blockSize, s_regionSize / blockSize);
+}
+
+inline ExcessRegion* ExcessRegion::createCustomSize(size_t blockSize, size_t blockAlignment)
+{
+ PageAllocationAligned allocation = PageAllocationAligned::allocate(blockSize, blockAlignment, OSAllocator::JSGCHeapPages);
+ ASSERT(static_cast<bool>(allocation));
+ return new ExcessRegion(allocation, blockSize, 1);
+}
+
+inline ExcessRegion* ExcessRegion::reset(size_t blockSize)
+{
+ ASSERT(m_isExcess);
+ PageAllocationAligned allocation = m_allocation;
+ return new (NotNull, this) ExcessRegion(allocation, blockSize, s_regionSize / blockSize);
+}
+
+inline Region::Region(size_t blockSize, size_t totalBlocks, bool isExcess)
+ : DoublyLinkedListNode<Region>()
+ , m_isExcess(isExcess)
+ , m_totalBlocks(totalBlocks)
+ , m_blocksInUse(0)
+ , m_blockSize(blockSize)
+ , m_isCustomSize(false)
+ , m_prev(0)
+ , m_next(0)
+{
+}
+
+inline void Region::initializeBlockList()
+{
+ char* start = static_cast<char*>(base());
+ char* current = start;
+ for (size_t i = 0; i < m_totalBlocks; i++) {
+ ASSERT(current < start + size());
+ m_deadBlocks.append(new (NotNull, current) DeadBlock(this));
+ current += m_blockSize;
+ }
+}
+
+inline Region* Region::create(SuperRegion* superRegion, size_t blockSize)
+{
+#if ENABLE(SUPER_REGION)
+ ASSERT(blockSize <= s_regionSize);
+ ASSERT(!(s_regionSize % blockSize));
+ Region* region = NormalRegion::tryCreate(superRegion, blockSize);
+ if (LIKELY(!!region))
+ return region;
+#else
+ UNUSED_PARAM(superRegion);
+#endif
+ return ExcessRegion::create(blockSize);
+}
+
+inline Region* Region::createCustomSize(SuperRegion* superRegion, size_t blockSize, size_t blockAlignment)
+{
+#if ENABLE(SUPER_REGION)
+ Region* region = NormalRegion::tryCreateCustomSize(superRegion, blockSize, blockAlignment);
+ if (UNLIKELY(!region))
+ region = ExcessRegion::createCustomSize(blockSize, blockAlignment);
+#else
+ UNUSED_PARAM(superRegion);
+ Region* region = ExcessRegion::createCustomSize(blockSize, blockAlignment);
+#endif
+ region->m_isCustomSize = true;
+ return region;
+}
+
+inline Region::~Region()
+{
+ ASSERT(isEmpty());
+}
+
+inline void Region::destroy()
+{
+#if ENABLE(SUPER_REGION)
+ if (UNLIKELY(m_isExcess))
+ delete static_cast<ExcessRegion*>(this);
+ else
+ delete static_cast<NormalRegion*>(this);
+#else
+ delete static_cast<ExcessRegion*>(this);
+#endif
+}
+
+inline Region* Region::reset(size_t blockSize)
+{
+#if ENABLE(SUPER_REGION)
+ ASSERT(isEmpty());
+ if (UNLIKELY(m_isExcess))
+ return static_cast<ExcessRegion*>(this)->reset(blockSize);
+ return static_cast<NormalRegion*>(this)->reset(blockSize);
+#else
+ return static_cast<ExcessRegion*>(this)->reset(blockSize);
+#endif
+}
+
+inline DeadBlock* Region::allocate()
+{
+ ASSERT(!isFull());
+ m_blocksInUse++;
+ return m_deadBlocks.removeHead();
+}
+
+inline void Region::deallocate(void* base)
+{
+ ASSERT(base);
+ ASSERT(m_blocksInUse);
+ ASSERT(base >= this->base() && base < static_cast<char*>(this->base()) + size());
+ DeadBlock* block = new (NotNull, base) DeadBlock(this);
+ m_deadBlocks.push(block);
+ m_blocksInUse--;
+}
+
+inline void* Region::base()
+{
+#if ENABLE(SUPER_REGION)
+ if (UNLIKELY(m_isExcess))
+ return static_cast<ExcessRegion*>(this)->ExcessRegion::base();
+ return static_cast<NormalRegion*>(this)->NormalRegion::base();
+#else
+ return static_cast<ExcessRegion*>(this)->ExcessRegion::base();
+#endif
+}
+
+inline size_t Region::size()
+{
+#if ENABLE(SUPER_REGION)
+ if (UNLIKELY(m_isExcess))
+ return static_cast<ExcessRegion*>(this)->ExcessRegion::size();
+ return static_cast<NormalRegion*>(this)->NormalRegion::size();
+#else
+ return static_cast<ExcessRegion*>(this)->ExcessRegion::size();
+#endif
+}
+
+} // namespace JSC
+
+#endif // JSC_Region_h
diff --git a/Source/JavaScriptCore/heap/SlotVisitor.cpp b/Source/JavaScriptCore/heap/SlotVisitor.cpp
index 3ff4b48fa..6c2ded09d 100644
--- a/Source/JavaScriptCore/heap/SlotVisitor.cpp
+++ b/Source/JavaScriptCore/heap/SlotVisitor.cpp
@@ -8,19 +8,20 @@
#include "GCThread.h"
#include "JSArray.h"
#include "JSDestructibleObject.h"
-#include "JSGlobalData.h"
+#include "VM.h"
#include "JSObject.h"
#include "JSString.h"
+#include "Operations.h"
#include <wtf/StackStats.h>
namespace JSC {
SlotVisitor::SlotVisitor(GCThreadSharedData& shared)
- : m_stack(shared.m_globalData->heap.blockAllocator())
+ : m_stack(shared.m_vm->heap.blockAllocator())
, m_visitCount(0)
, m_isInParallelMode(false)
, m_shared(shared)
- , m_shouldHashConst(false)
+ , m_shouldHashCons(false)
#if !ASSERT_DISABLED
, m_isCheckingForDefaultMarkViolation(false)
, m_isDraining(false)
@@ -35,11 +36,11 @@ SlotVisitor::~SlotVisitor()
void SlotVisitor::setup()
{
- m_shared.m_shouldHashConst = m_shared.m_globalData->haveEnoughNewStringsToHashConst();
- m_shouldHashConst = m_shared.m_shouldHashConst;
+ m_shared.m_shouldHashCons = m_shared.m_vm->haveEnoughNewStringsToHashCons();
+ m_shouldHashCons = m_shared.m_shouldHashCons;
#if ENABLE(PARALLEL_GC)
for (unsigned i = 0; i < m_shared.m_gcThreads.size(); ++i)
- m_shared.m_gcThreads[i]->slotVisitor()->m_shouldHashConst = m_shared.m_shouldHashConst;
+ m_shared.m_gcThreads[i]->slotVisitor()->m_shouldHashCons = m_shared.m_shouldHashCons;
#endif
}
@@ -52,9 +53,9 @@ void SlotVisitor::reset()
#else
m_opaqueRoots.clear();
#endif
- if (m_shouldHashConst) {
+ if (m_shouldHashCons) {
m_uniqueStrings.clear();
- m_shouldHashConst = false;
+ m_shouldHashCons = false;
}
}
@@ -240,15 +241,15 @@ void SlotVisitor::mergeOpaqueRoots()
m_opaqueRoots.clear();
}
-ALWAYS_INLINE bool JSString::tryHashConstLock()
+ALWAYS_INLINE bool JSString::tryHashConsLock()
{
#if ENABLE(PARALLEL_GC)
unsigned currentFlags = m_flags;
- if (currentFlags & HashConstLock)
+ if (currentFlags & HashConsLock)
return false;
- unsigned newFlags = currentFlags | HashConstLock;
+ unsigned newFlags = currentFlags | HashConsLock;
if (!WTF::weakCompareAndSwap(&m_flags, currentFlags, newFlags))
return false;
@@ -256,26 +257,26 @@ ALWAYS_INLINE bool JSString::tryHashConstLock()
WTF::memoryBarrierAfterLock();
return true;
#else
- if (isHashConstSingleton())
+ if (isHashConsSingleton())
return false;
- m_flags |= HashConstLock;
+ m_flags |= HashConsLock;
return true;
#endif
}
-ALWAYS_INLINE void JSString::releaseHashConstLock()
+ALWAYS_INLINE void JSString::releaseHashConsLock()
{
#if ENABLE(PARALLEL_GC)
WTF::memoryBarrierBeforeUnlock();
#endif
- m_flags &= ~HashConstLock;
+ m_flags &= ~HashConsLock;
}
-ALWAYS_INLINE bool JSString::shouldTryHashConst()
+ALWAYS_INLINE bool JSString::shouldTryHashCons()
{
- return ((length() > 1) && !isRope() && !isHashConstSingleton());
+ return ((length() > 1) && !isRope() && !isHashConsSingleton());
}
ALWAYS_INLINE void SlotVisitor::internalAppend(JSValue* slot)
@@ -297,21 +298,21 @@ ALWAYS_INLINE void SlotVisitor::internalAppend(JSValue* slot)
validate(cell);
- if (m_shouldHashConst && cell->isString()) {
+ if (m_shouldHashCons && cell->isString()) {
JSString* string = jsCast<JSString*>(cell);
- if (string->shouldTryHashConst() && string->tryHashConstLock()) {
+ if (string->shouldTryHashCons() && string->tryHashConsLock()) {
UniqueStringMap::AddResult addResult = m_uniqueStrings.add(string->string().impl(), value);
if (addResult.isNewEntry)
- string->setHashConstSingleton();
+ string->setHashConsSingleton();
else {
JSValue existingJSValue = addResult.iterator->value;
if (value != existingJSValue)
- jsCast<JSString*>(existingJSValue.asCell())->clearHashConstSingleton();
+ jsCast<JSString*>(existingJSValue.asCell())->clearHashConsSingleton();
*slot = existingJSValue;
- string->releaseHashConstLock();
+ string->releaseHashConsLock();
return;
}
- string->releaseHashConstLock();
+ string->releaseHashConsLock();
}
}
@@ -335,10 +336,7 @@ void SlotVisitor::finalizeUnconditionalFinalizers()
#if ENABLE(GC_VALIDATION)
void SlotVisitor::validate(JSCell* cell)
{
- if (!cell) {
- dataLogF("cell is NULL\n");
- CRASH();
- }
+ RELEASE_ASSERT(cell);
if (!cell->structure()) {
dataLogF("cell at %p has a null structure\n" , cell);
diff --git a/Source/JavaScriptCore/heap/SlotVisitor.h b/Source/JavaScriptCore/heap/SlotVisitor.h
index 7d16dc2ed..e1808faf0 100644
--- a/Source/JavaScriptCore/heap/SlotVisitor.h
+++ b/Source/JavaScriptCore/heap/SlotVisitor.h
@@ -62,6 +62,7 @@ public:
void addOpaqueRoot(void*);
bool containsOpaqueRoot(void*);
+ TriState containsOpaqueRootTriState(void*);
int opaqueRootCount();
GCThreadSharedData& sharedData() { return m_shared; }
@@ -124,7 +125,7 @@ private:
GCThreadSharedData& m_shared;
- bool m_shouldHashConst; // Local per-thread copy of shared flag for performance reasons
+ bool m_shouldHashCons; // Local per-thread copy of shared flag for performance reasons
typedef HashMap<StringImpl*, JSValue> UniqueStringMap;
UniqueStringMap m_uniqueStrings;
diff --git a/Source/JavaScriptCore/heap/SlotVisitorInlines.h b/Source/JavaScriptCore/heap/SlotVisitorInlines.h
index 3a7f2290c..4273a28d6 100644
--- a/Source/JavaScriptCore/heap/SlotVisitorInlines.h
+++ b/Source/JavaScriptCore/heap/SlotVisitorInlines.h
@@ -31,6 +31,7 @@
#include "Options.h"
#include "SlotVisitor.h"
#include "Weak.h"
+#include "WeakInlines.h"
namespace JSC {
@@ -121,6 +122,16 @@ inline bool SlotVisitor::containsOpaqueRoot(void* root)
#endif
}
+inline TriState SlotVisitor::containsOpaqueRootTriState(void* root)
+{
+ if (m_opaqueRoots.contains(root))
+ return TrueTriState;
+ MutexLocker locker(m_shared.m_opaqueRootsLock);
+ if (m_shared.m_opaqueRoots.contains(root))
+ return TrueTriState;
+ return MixedTriState;
+}
+
inline int SlotVisitor::opaqueRootCount()
{
ASSERT(!m_isInParallelMode);
diff --git a/Source/JavaScriptCore/heap/Strong.h b/Source/JavaScriptCore/heap/Strong.h
index 7fafaeab5..e00e92061 100644
--- a/Source/JavaScriptCore/heap/Strong.h
+++ b/Source/JavaScriptCore/heap/Strong.h
@@ -32,12 +32,13 @@
namespace JSC {
-class JSGlobalData;
+class VM;
// A strongly referenced handle that prevents the object it points to from being garbage collected.
template <typename T> class Strong : public Handle<T> {
using Handle<T>::slot;
using Handle<T>::setSlot;
+ template <typename U> friend class Strong;
public:
typedef typename Handle<T>::ExternalType ExternalType;
@@ -47,9 +48,9 @@ public:
{
}
- Strong(JSGlobalData&, ExternalType = ExternalType());
+ Strong(VM&, ExternalType = ExternalType());
- Strong(JSGlobalData&, Handle<T>);
+ Strong(VM&, Handle<T>);
Strong(const Strong& other)
: Handle<T>()
@@ -94,7 +95,7 @@ public:
ExternalType get() const { return HandleTypes<T>::getFromSlot(this->slot()); }
- void set(JSGlobalData&, ExternalType);
+ void set(VM&, ExternalType);
template <typename U> Strong& operator=(const Strong<U>& other)
{
@@ -103,7 +104,7 @@ public:
return *this;
}
- set(*HandleSet::heapFor(other.slot())->globalData(), other.get());
+ set(*HandleSet::heapFor(other.slot())->vm(), other.get());
return *this;
}
@@ -114,7 +115,7 @@ public:
return *this;
}
- set(*HandleSet::heapFor(other.slot())->globalData(), other.get());
+ set(*HandleSet::heapFor(other.slot())->vm(), other.get());
return *this;
}
diff --git a/Source/JavaScriptCore/heap/StrongInlines.h b/Source/JavaScriptCore/heap/StrongInlines.h
index 2308bf6f6..e1fbe90d2 100644
--- a/Source/JavaScriptCore/heap/StrongInlines.h
+++ b/Source/JavaScriptCore/heap/StrongInlines.h
@@ -26,29 +26,29 @@
#ifndef StrongInlines_h
#define StrongInlines_h
-#include "JSGlobalData.h"
+#include "VM.h"
namespace JSC {
template <typename T>
-inline Strong<T>::Strong(JSGlobalData& globalData, ExternalType value)
- : Handle<T>(globalData.heap.handleSet()->allocate())
+inline Strong<T>::Strong(VM& vm, ExternalType value)
+ : Handle<T>(vm.heap.handleSet()->allocate())
{
set(value);
}
template <typename T>
-inline Strong<T>::Strong(JSGlobalData& globalData, Handle<T> handle)
- : Handle<T>(globalData.heap.handleSet()->allocate())
+inline Strong<T>::Strong(VM& vm, Handle<T> handle)
+ : Handle<T>(vm.heap.handleSet()->allocate())
{
set(handle.get());
}
template <typename T>
-inline void Strong<T>::set(JSGlobalData& globalData, ExternalType value)
+inline void Strong<T>::set(VM& vm, ExternalType value)
{
if (!slot())
- setSlot(globalData.heap.handleSet()->allocate());
+ setSlot(vm.heap.handleSet()->allocate());
set(value);
}
diff --git a/Source/JavaScriptCore/heap/SuperRegion.cpp b/Source/JavaScriptCore/heap/SuperRegion.cpp
new file mode 100644
index 000000000..d58f600b5
--- /dev/null
+++ b/Source/JavaScriptCore/heap/SuperRegion.cpp
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2013 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "SuperRegion.h"
+
+#include "Region.h"
+
+namespace JSC {
+
+const uint64_t SuperRegion::s_fixedHeapMemoryPoolSize = 4 * 1024 * static_cast<uint64_t>(MB);
+
+SuperRegion::SuperRegion()
+ : MetaAllocator(Region::s_regionSize, Region::s_regionSize)
+ , m_reservationBase(0)
+{
+#if ENABLE(SUPER_REGION)
+ // Over-allocate so that we can make sure that we're aligned to the size of Regions.
+ m_reservation = PageReservation::reserve(s_fixedHeapMemoryPoolSize + Region::s_regionSize, OSAllocator::JSGCHeapPages);
+ m_reservationBase = getAlignedBase(m_reservation);
+ addFreshFreeSpace(m_reservationBase, s_fixedHeapMemoryPoolSize);
+#else
+ UNUSED_PARAM(m_reservation);
+ UNUSED_PARAM(m_reservationBase);
+#endif
+}
+
+SuperRegion::~SuperRegion()
+{
+#if ENABLE(SUPER_REGION)
+ m_reservation.deallocate();
+#endif
+}
+
+void* SuperRegion::getAlignedBase(PageReservation& reservation)
+{
+ for (char* current = static_cast<char*>(reservation.base()); current < static_cast<char*>(reservation.base()) + Region::s_regionSize; current += pageSize()) {
+ if (!(reinterpret_cast<size_t>(current) & ~Region::s_regionMask))
+ return current;
+ }
+ ASSERT_NOT_REACHED();
+ return 0;
+}
+
+void* SuperRegion::allocateNewSpace(size_t&)
+{
+ return 0;
+}
+
+void SuperRegion::notifyNeedPage(void* page)
+{
+ m_reservation.commit(page, Region::s_regionSize);
+}
+
+void SuperRegion::notifyPageIsFree(void* page)
+{
+ m_reservation.decommit(page, Region::s_regionSize);
+}
+
+} // namespace JSC
diff --git a/Source/JavaScriptCore/heap/SuperRegion.h b/Source/JavaScriptCore/heap/SuperRegion.h
new file mode 100644
index 000000000..b659510f8
--- /dev/null
+++ b/Source/JavaScriptCore/heap/SuperRegion.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2013 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef SuperRegion_h
+#define SuperRegion_h
+
+#include <wtf/MetaAllocator.h>
+#include <wtf/PageBlock.h>
+#include <wtf/PageReservation.h>
+
+namespace JSC {
+
+class VM;
+
+class SuperRegion : public WTF::MetaAllocator {
+public:
+ SuperRegion();
+ virtual ~SuperRegion();
+
+protected:
+ virtual void* allocateNewSpace(size_t&);
+ virtual void notifyNeedPage(void*);
+ virtual void notifyPageIsFree(void*);
+
+private:
+ static const uint64_t s_fixedHeapMemoryPoolSize;
+
+ static void* getAlignedBase(PageReservation&);
+
+ PageReservation m_reservation;
+ void* m_reservationBase;
+};
+
+} // namespace JSC
+
+#endif // SuperRegion_h
diff --git a/Source/JavaScriptCore/heap/VTableSpectrum.cpp b/Source/JavaScriptCore/heap/VTableSpectrum.cpp
index acb494034..a6f8e54bd 100644
--- a/Source/JavaScriptCore/heap/VTableSpectrum.cpp
+++ b/Source/JavaScriptCore/heap/VTableSpectrum.cpp
@@ -30,7 +30,6 @@
#include "Structure.h"
#include <algorithm>
#include <stdio.h>
-#include <wtf/Platform.h>
#include <wtf/Vector.h>
#if PLATFORM(MAC)
diff --git a/Source/JavaScriptCore/heap/Weak.cpp b/Source/JavaScriptCore/heap/Weak.cpp
new file mode 100644
index 000000000..3857b60d2
--- /dev/null
+++ b/Source/JavaScriptCore/heap/Weak.cpp
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2013 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "Weak.h"
+
+#include "WeakSetInlines.h"
+
+namespace JSC {
+
+void weakClearSlowCase(WeakImpl*& impl)
+{
+ ASSERT(impl);
+
+ WeakSet::deallocate(impl);
+ impl = 0;
+}
+
+} // namespace JSC
diff --git a/Source/JavaScriptCore/heap/Weak.h b/Source/JavaScriptCore/heap/Weak.h
index efb2a9a56..5c901df22 100644
--- a/Source/JavaScriptCore/heap/Weak.h
+++ b/Source/JavaScriptCore/heap/Weak.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2009, 2012 Apple Inc. All rights reserved.
+ * Copyright (C) 2009, 2012, 2013 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,22 +26,32 @@
#ifndef Weak_h
#define Weak_h
-#include "PassWeak.h"
-#include "WeakSetInlines.h"
-#include <wtf/Assertions.h>
-#include <wtf/HashMap.h>
+#include <wtf/Noncopyable.h>
+#include <wtf/NullPtr.h>
namespace JSC {
-template<typename T> class Weak : public WeakImplAccessor<Weak<T>, T> {
+template<typename T> class PassWeak;
+class WeakImpl;
+class WeakHandleOwner;
+
+// This is a free function rather than a Weak<T> member function so we can put it in Weak.cpp.
+JS_EXPORT_PRIVATE void weakClearSlowCase(WeakImpl*&);
+
+template<typename T> class Weak {
WTF_MAKE_NONCOPYABLE(Weak);
public:
- friend class WeakImplAccessor<Weak<T>, T>;
- typedef typename WeakImplAccessor<Weak<T>, T>::GetType GetType;
+ Weak()
+ : m_impl(0)
+ {
+ }
- Weak();
- Weak(std::nullptr_t);
- Weak(GetType, WeakHandleOwner* = 0, void* context = 0);
+ explicit Weak(std::nullptr_t)
+ : m_impl(0)
+ {
+ }
+
+ explicit Weak(T*, WeakHandleOwner* = 0, void* context = 0);
enum HashTableDeletedValueTag { HashTableDeletedValue };
bool isHashTableDeletedValue() const;
@@ -49,19 +59,32 @@ public:
template<typename U> Weak(const PassWeak<U>&);
- ~Weak();
+ ~Weak()
+ {
+ clear();
+ }
void swap(Weak&);
Weak& operator=(const PassWeak<T>&);
bool operator!() const;
+ T* operator->() const;
+ T& operator*() const;
+ T* get() const;
+
+ bool was(T*) const;
// This conversion operator allows implicit conversion to bool but not to other integer types.
- typedef JSValue (HandleBase::*UnspecifiedBoolType);
+ typedef void* (Weak::*UnspecifiedBoolType);
operator UnspecifiedBoolType*() const;
PassWeak<T> release();
- void clear();
+ void clear()
+ {
+ if (!m_impl)
+ return;
+ weakClearSlowCase(m_impl);
+ }
private:
static WeakImpl* hashTableDeletedValue();
@@ -69,145 +92,6 @@ private:
WeakImpl* m_impl;
};
-template<typename T> inline Weak<T>::Weak()
- : m_impl(0)
-{
-}
-
-template<typename T> inline Weak<T>::Weak(std::nullptr_t)
- : m_impl(0)
-{
-}
-
-template<typename T> inline Weak<T>::Weak(typename Weak<T>::GetType getType, WeakHandleOwner* weakOwner, void* context)
- : m_impl(getType ? WeakSet::allocate(getType, weakOwner, context) : 0)
-{
-}
-
-template<typename T> inline bool Weak<T>::isHashTableDeletedValue() const
-{
- return m_impl == hashTableDeletedValue();
-}
-
-template<typename T> inline Weak<T>::Weak(typename Weak<T>::HashTableDeletedValueTag)
- : m_impl(hashTableDeletedValue())
-{
-}
-
-template<typename T> template<typename U> inline Weak<T>::Weak(const PassWeak<U>& other)
- : m_impl(other.leakImpl())
-{
-}
-
-template<typename T> inline Weak<T>::~Weak()
-{
- clear();
-}
-
-template<class T> inline void swap(Weak<T>& a, Weak<T>& b)
-{
- a.swap(b);
-}
-
-template<typename T> inline void Weak<T>::swap(Weak& other)
-{
- std::swap(m_impl, other.m_impl);
-}
-
-template<typename T> inline Weak<T>& Weak<T>::operator=(const PassWeak<T>& o)
-{
- clear();
- m_impl = o.leakImpl();
- return *this;
-}
-
-template<typename T> inline bool Weak<T>::operator!() const
-{
- return !m_impl || !m_impl->jsValue() || m_impl->state() != WeakImpl::Live;
-}
-
-template<typename T> inline Weak<T>::operator UnspecifiedBoolType*() const
-{
- return reinterpret_cast<UnspecifiedBoolType*>(!!*this);
-}
-
-template<typename T> inline PassWeak<T> Weak<T>::release()
-{
- PassWeak<T> tmp = adoptWeak<T>(m_impl);
- m_impl = 0;
- return tmp;
-}
-
-template<typename T> inline void Weak<T>::clear()
-{
- if (!m_impl)
- return;
- WeakSet::deallocate(m_impl);
- m_impl = 0;
-}
-
-template<typename T> inline WeakImpl* Weak<T>::hashTableDeletedValue()
-{
- return reinterpret_cast<WeakImpl*>(-1);
-}
-
-template <typename T> inline bool operator==(const Weak<T>& lhs, const Weak<T>& rhs)
-{
- return lhs.get() == rhs.get();
-}
-
-// This function helps avoid modifying a weak table while holding an iterator into it. (Object allocation
-// can run a finalizer that modifies the table. We avoid that by requiring a pre-constructed object as our value.)
-template<typename Map, typename Key, typename Value> inline void weakAdd(Map& map, const Key& key, Value value)
-{
- ASSERT(!map.get(key));
- map.set(key, value); // The table may still have a zombie for value.
-}
-
-template<typename Map, typename Key, typename Value> inline void weakRemove(Map& map, const Key& key, Value value)
-{
- typename Map::iterator it = map.find(key);
- ASSERT_UNUSED(value, value);
- ASSERT(it != map.end());
- ASSERT(it->value.was(value));
- ASSERT(!it->value);
- map.remove(it);
-}
-
-template<typename T> inline void weakClear(Weak<T>& weak, typename Weak<T>::GetType value)
-{
- ASSERT_UNUSED(value, value);
- ASSERT(weak.was(value));
- ASSERT(!weak);
- weak.clear();
-}
-
} // namespace JSC
-namespace WTF {
-
-template<typename T> struct VectorTraits<JSC::Weak<T> > : SimpleClassVectorTraits {
- static const bool canCompareWithMemcmp = false;
-};
-
-template<typename T> struct HashTraits<JSC::Weak<T> > : SimpleClassHashTraits<JSC::Weak<T> > {
- typedef JSC::Weak<T> StorageType;
-
- typedef std::nullptr_t EmptyValueType;
- static EmptyValueType emptyValue() { return nullptr; }
-
- typedef JSC::PassWeak<T> PassInType;
- static void store(PassInType value, StorageType& storage) { storage = value; }
-
- typedef JSC::PassWeak<T> PassOutType;
- static PassOutType passOut(StorageType& value) { return value.release(); }
- static PassOutType passOut(EmptyValueType) { return PassOutType(); }
-
- typedef typename StorageType::GetType PeekType;
- static PeekType peek(const StorageType& value) { return value.get(); }
- static PeekType peek(EmptyValueType) { return PeekType(); }
-};
-
-}
-
#endif // Weak_h
diff --git a/Source/JavaScriptCore/heap/WeakBlock.cpp b/Source/JavaScriptCore/heap/WeakBlock.cpp
index 5f01f34b3..957090569 100644
--- a/Source/JavaScriptCore/heap/WeakBlock.cpp
+++ b/Source/JavaScriptCore/heap/WeakBlock.cpp
@@ -29,7 +29,7 @@
#include "Heap.h"
#include "HeapRootVisitor.h"
#include "JSObject.h"
-
+#include "Operations.h"
#include "Structure.h"
namespace JSC {
diff --git a/Source/JavaScriptCore/heap/WeakBlock.h b/Source/JavaScriptCore/heap/WeakBlock.h
index fd28101fd..b6b631e27 100644
--- a/Source/JavaScriptCore/heap/WeakBlock.h
+++ b/Source/JavaScriptCore/heap/WeakBlock.h
@@ -97,7 +97,7 @@ inline bool WeakBlock::SweepResult::isNull() const
inline WeakImpl* WeakBlock::asWeakImpl(FreeCell* freeCell)
{
- return reinterpret_cast<WeakImpl*>(freeCell);
+ return reinterpret_cast_ptr<WeakImpl*>(freeCell);
}
inline WeakBlock::SweepResult WeakBlock::takeSweepResult()
@@ -110,12 +110,12 @@ inline WeakBlock::SweepResult WeakBlock::takeSweepResult()
inline WeakBlock::FreeCell* WeakBlock::asFreeCell(WeakImpl* weakImpl)
{
- return reinterpret_cast<FreeCell*>(weakImpl);
+ return reinterpret_cast_ptr<FreeCell*>(weakImpl);
}
inline WeakImpl* WeakBlock::weakImpls()
{
- return reinterpret_cast<WeakImpl*>(this) + ((sizeof(WeakBlock) + sizeof(WeakImpl) - 1) / sizeof(WeakImpl));
+ return reinterpret_cast_ptr<WeakImpl*>(this) + ((sizeof(WeakBlock) + sizeof(WeakImpl) - 1) / sizeof(WeakImpl));
}
inline size_t WeakBlock::weakImplCount()
diff --git a/Source/JavaScriptCore/heap/WeakImpl.h b/Source/JavaScriptCore/heap/WeakImpl.h
index 9924923f9..ca93fb286 100644
--- a/Source/JavaScriptCore/heap/WeakImpl.h
+++ b/Source/JavaScriptCore/heap/WeakImpl.h
@@ -26,7 +26,7 @@
#ifndef WeakImpl_h
#define WeakImpl_h
-#include "JSValue.h"
+#include "JSCJSValue.h"
namespace JSC {
@@ -107,7 +107,7 @@ inline void* WeakImpl::context()
inline WeakImpl* WeakImpl::asWeakImpl(JSValue* slot)
{
- return reinterpret_cast<WeakImpl*>(reinterpret_cast<char*>(slot) + OBJECT_OFFSETOF(WeakImpl, m_jsValue));
+ return reinterpret_cast_ptr<WeakImpl*>(reinterpret_cast_ptr<char*>(slot) + OBJECT_OFFSETOF(WeakImpl, m_jsValue));
}
} // namespace JSC
diff --git a/Source/JavaScriptCore/heap/WeakInlines.h b/Source/JavaScriptCore/heap/WeakInlines.h
new file mode 100644
index 000000000..221b6c11a
--- /dev/null
+++ b/Source/JavaScriptCore/heap/WeakInlines.h
@@ -0,0 +1,178 @@
+/*
+ * Copyright (C) 2009, 2012, 2013 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WeakInlines_h
+#define WeakInlines_h
+
+#include "PassWeak.h"
+#include "WeakSetInlines.h"
+#include <wtf/Assertions.h>
+#include <wtf/HashTraits.h>
+
+namespace JSC {
+
+template<typename T> inline Weak<T>::Weak(T* cell, WeakHandleOwner* weakOwner, void* context)
+ : m_impl(cell ? WeakSet::allocate(cell, weakOwner, context) : 0)
+{
+}
+
+template<typename T> inline bool Weak<T>::isHashTableDeletedValue() const
+{
+ return m_impl == hashTableDeletedValue();
+}
+
+template<typename T> inline Weak<T>::Weak(typename Weak<T>::HashTableDeletedValueTag)
+ : m_impl(hashTableDeletedValue())
+{
+}
+
+template<typename T> template<typename U> inline Weak<T>::Weak(const PassWeak<U>& other)
+ : m_impl(other.leakImpl())
+{
+}
+
+template<class T> inline void swap(Weak<T>& a, Weak<T>& b)
+{
+ a.swap(b);
+}
+
+template<typename T> inline void Weak<T>::swap(Weak& other)
+{
+ std::swap(m_impl, other.m_impl);
+}
+
+template<typename T> inline Weak<T>& Weak<T>::operator=(const PassWeak<T>& o)
+{
+ clear();
+ m_impl = o.leakImpl();
+ return *this;
+}
+
+template<typename T> inline T* Weak<T>::operator->() const
+{
+ ASSERT(m_impl && m_impl->state() == WeakImpl::Live);
+ return jsCast<T*>(m_impl->jsValue().asCell());
+}
+
+template<typename T> inline T& Weak<T>::operator*() const
+{
+ ASSERT(m_impl && m_impl->state() == WeakImpl::Live);
+ return *jsCast<T*>(m_impl->jsValue().asCell());
+}
+
+template<typename T> inline T* Weak<T>::get() const
+{
+ if (!m_impl || m_impl->state() != WeakImpl::Live)
+ return 0;
+ return jsCast<T*>(m_impl->jsValue().asCell());
+}
+
+template<typename T> inline bool Weak<T>::was(T* other) const
+{
+ return jsCast<T*>(m_impl->jsValue().asCell()) == other;
+}
+
+template<typename T> inline bool Weak<T>::operator!() const
+{
+ return !m_impl || !m_impl->jsValue() || m_impl->state() != WeakImpl::Live;
+}
+
+template<typename T> inline Weak<T>::operator UnspecifiedBoolType*() const
+{
+ return reinterpret_cast<UnspecifiedBoolType*>(!!*this);
+}
+
+template<typename T> inline PassWeak<T> Weak<T>::release()
+{
+ PassWeak<T> tmp = adoptWeak<T>(m_impl);
+ m_impl = 0;
+ return tmp;
+}
+
+template<typename T> inline WeakImpl* Weak<T>::hashTableDeletedValue()
+{
+ return reinterpret_cast<WeakImpl*>(-1);
+}
+
+template <typename T> inline bool operator==(const Weak<T>& lhs, const Weak<T>& rhs)
+{
+ return lhs.get() == rhs.get();
+}
+
+// This function helps avoid modifying a weak table while holding an iterator into it. (Object allocation
+// can run a finalizer that modifies the table. We avoid that by requiring a pre-constructed object as our value.)
+template<typename Map, typename Key, typename Value> inline void weakAdd(Map& map, const Key& key, Value value)
+{
+ ASSERT(!map.get(key));
+ map.set(key, value); // The table may still have a zombie for value.
+}
+
+template<typename Map, typename Key, typename Value> inline void weakRemove(Map& map, const Key& key, Value value)
+{
+ typename Map::iterator it = map.find(key);
+ ASSERT_UNUSED(value, value);
+ ASSERT(it != map.end());
+ ASSERT(it->value.was(value));
+ ASSERT(!it->value);
+ map.remove(it);
+}
+
+template<typename T> inline void weakClear(Weak<T>& weak, T* cell)
+{
+ ASSERT_UNUSED(cell, cell);
+ ASSERT(weak.was(cell));
+ ASSERT(!weak);
+ weak.clear();
+}
+
+} // namespace JSC
+
+namespace WTF {
+
+template<typename T> struct VectorTraits<JSC::Weak<T> > : SimpleClassVectorTraits {
+ static const bool canCompareWithMemcmp = false;
+};
+
+template<typename T> struct HashTraits<JSC::Weak<T> > : SimpleClassHashTraits<JSC::Weak<T> > {
+ typedef JSC::Weak<T> StorageType;
+
+ typedef std::nullptr_t EmptyValueType;
+ static EmptyValueType emptyValue() { return nullptr; }
+
+ typedef JSC::PassWeak<T> PassInType;
+ static void store(PassInType value, StorageType& storage) { storage = value; }
+
+ typedef JSC::PassWeak<T> PassOutType;
+ static PassOutType passOut(StorageType& value) { return value.release(); }
+ static PassOutType passOut(EmptyValueType) { return PassOutType(); }
+
+ typedef T* PeekType;
+ static PeekType peek(const StorageType& value) { return value.get(); }
+ static PeekType peek(EmptyValueType) { return PeekType(); }
+};
+
+} // namespace WTF
+
+#endif // WeakInlines_h
diff --git a/Source/JavaScriptCore/heap/WeakSet.cpp b/Source/JavaScriptCore/heap/WeakSet.cpp
index 7cedaee85..e62e66eae 100644
--- a/Source/JavaScriptCore/heap/WeakSet.cpp
+++ b/Source/JavaScriptCore/heap/WeakSet.cpp
@@ -27,7 +27,7 @@
#include "WeakSet.h"
#include "Heap.h"
-#include "JSGlobalData.h"
+#include "VM.h"
namespace JSC {
diff --git a/Source/JavaScriptCore/heap/WeakSet.h b/Source/JavaScriptCore/heap/WeakSet.h
index 06514eb69..580cbe7a9 100644
--- a/Source/JavaScriptCore/heap/WeakSet.h
+++ b/Source/JavaScriptCore/heap/WeakSet.h
@@ -38,12 +38,12 @@ public:
static WeakImpl* allocate(JSValue, WeakHandleOwner* = 0, void* context = 0);
static void deallocate(WeakImpl*);
- WeakSet(JSGlobalData*);
+ WeakSet(VM*);
~WeakSet();
void lastChanceToFinalize();
Heap* heap() const;
- JSGlobalData* globalData() const;
+ VM* vm() const;
bool isEmpty() const;
@@ -62,19 +62,19 @@ private:
WeakBlock::FreeCell* m_allocator;
WeakBlock* m_nextAllocator;
DoublyLinkedList<WeakBlock> m_blocks;
- JSGlobalData* m_globalData;
+ VM* m_vm;
};
-inline WeakSet::WeakSet(JSGlobalData* globalData)
+inline WeakSet::WeakSet(VM* vm)
: m_allocator(0)
, m_nextAllocator(0)
- , m_globalData(globalData)
+ , m_vm(vm)
{
}
-inline JSGlobalData* WeakSet::globalData() const
+inline VM* WeakSet::vm() const
{
- return m_globalData;
+ return m_vm;
}
inline bool WeakSet::isEmpty() const
diff --git a/Source/JavaScriptCore/heap/WeakSetInlines.h b/Source/JavaScriptCore/heap/WeakSetInlines.h
index 76337fda7..f23922493 100644
--- a/Source/JavaScriptCore/heap/WeakSetInlines.h
+++ b/Source/JavaScriptCore/heap/WeakSetInlines.h
@@ -26,7 +26,7 @@
#ifndef WeakSetInlines_h
#define WeakSetInlines_h
-#include "WeakSet.h"
+#include "MarkedBlock.h"
namespace JSC {