#include "config.h"
#include "MarkedSpace.h"
+#include "DelayedReleaseScope.h"
+#include "IncrementalSweeper.h"
#include "JSGlobalObject.h"
#include "JSLock.h"
#include "JSObject.h"
-#include "ScopeChain.h"
+#include "JSCInlines.h"
namespace JSC {
class Structure;
+class Free {
+public:
+ typedef MarkedBlock* ReturnType;
+
+ enum FreeMode { FreeOrShrink, FreeAll };
+
+ Free(FreeMode, MarkedSpace*);
+ void operator()(MarkedBlock*);
+ ReturnType returnValue();
+
+private:
+ FreeMode m_freeMode;
+ MarkedSpace* m_markedSpace;
+ DoublyLinkedList<MarkedBlock> m_blocks;
+};
+
+inline Free::Free(FreeMode freeMode, MarkedSpace* newSpace)
+ : m_freeMode(freeMode)
+ , m_markedSpace(newSpace)
+{
+}
+
+inline void Free::operator()(MarkedBlock* block)
+{
+ if (m_freeMode == FreeOrShrink)
+ m_markedSpace->freeOrShrinkBlock(block);
+ else
+ m_markedSpace->freeBlock(block);
+}
+
+inline Free::ReturnType Free::returnValue()
+{
+ return m_blocks.head();
+}
+
+struct VisitWeakSet : MarkedBlock::VoidFunctor {
+ VisitWeakSet(HeapRootVisitor& heapRootVisitor) : m_heapRootVisitor(heapRootVisitor) { }
+ void operator()(MarkedBlock* block) { block->visitWeakSet(m_heapRootVisitor); }
+private:
+ HeapRootVisitor& m_heapRootVisitor;
+};
+
+struct ReapWeakSet : MarkedBlock::VoidFunctor {
+ void operator()(MarkedBlock* block) { block->reapWeakSet(); }
+};
+
MarkedSpace::MarkedSpace(Heap* heap)
: m_heap(heap)
+ , m_capacity(0)
+ , m_isIterating(false)
+ , m_currentDelayedReleaseScope(nullptr)
{
for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
- allocatorFor(cellSize).init(heap, this, cellSize, false);
- destructorAllocatorFor(cellSize).init(heap, this, cellSize, true);
+ allocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::None);
+ normalDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::Normal);
+ immortalStructureDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::ImmortalStructure);
}
for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
- allocatorFor(cellSize).init(heap, this, cellSize, false);
- destructorAllocatorFor(cellSize).init(heap, this, cellSize, true);
+ allocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::None);
+ normalDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::Normal);
+ immortalStructureDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::ImmortalStructure);
}
+
+ m_normalSpace.largeAllocator.init(heap, this, 0, MarkedBlock::None);
+ m_normalDestructorSpace.largeAllocator.init(heap, this, 0, MarkedBlock::Normal);
+ m_immortalStructureDestructorSpace.largeAllocator.init(heap, this, 0, MarkedBlock::ImmortalStructure);
+}
+
+MarkedSpace::~MarkedSpace()
+{
+ Free free(Free::FreeAll, this);
+ forEachBlock(free);
+ ASSERT(!m_blocks.set().size());
+}
+
+struct LastChanceToFinalize {
+ void operator()(MarkedAllocator& allocator) { allocator.lastChanceToFinalize(); }
+};
+
+void MarkedSpace::lastChanceToFinalize()
+{
+ DelayedReleaseScope delayedReleaseScope(*this);
+ stopAllocating();
+ forEachAllocator<LastChanceToFinalize>();
+}
+
+void MarkedSpace::sweep()
+{
+ if (Options::logGC())
+ dataLog("Eagerly sweeping...");
+ m_heap->sweeper()->willFinishSweeping();
+ forEachBlock<Sweep>();
+}
+
+void MarkedSpace::zombifySweep()
+{
+ if (Options::logGC())
+ dataLog("Zombifying sweep...");
+ m_heap->sweeper()->willFinishSweeping();
+ forEachBlock<ZombifySweep>();
}
void MarkedSpace::resetAllocators()
{
for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
allocatorFor(cellSize).reset();
- destructorAllocatorFor(cellSize).reset();
+ normalDestructorAllocatorFor(cellSize).reset();
+ immortalStructureDestructorAllocatorFor(cellSize).reset();
}
for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
allocatorFor(cellSize).reset();
- destructorAllocatorFor(cellSize).reset();
+ normalDestructorAllocatorFor(cellSize).reset();
+ immortalStructureDestructorAllocatorFor(cellSize).reset();
}
+
+ m_normalSpace.largeAllocator.reset();
+ m_normalDestructorSpace.largeAllocator.reset();
+ m_immortalStructureDestructorSpace.largeAllocator.reset();
+
+#if ENABLE(GGC)
+ m_blocksWithNewObjects.clear();
+#endif
+}
+
+void MarkedSpace::visitWeakSets(HeapRootVisitor& heapRootVisitor)
+{
+ VisitWeakSet visitWeakSet(heapRootVisitor);
+ if (m_heap->operationInProgress() == EdenCollection) {
+ for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i)
+ visitWeakSet(m_blocksWithNewObjects[i]);
+ } else
+ forEachBlock(visitWeakSet);
+}
+
+void MarkedSpace::reapWeakSets()
+{
+ if (m_heap->operationInProgress() == EdenCollection) {
+ for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i)
+ m_blocksWithNewObjects[i]->reapWeakSet();
+ } else
+ forEachBlock<ReapWeakSet>();
+}
+
+template <typename Functor>
+void MarkedSpace::forEachAllocator()
+{
+ Functor functor;
+ forEachAllocator(functor);
}
-void MarkedSpace::canonicalizeCellLivenessData()
+template <typename Functor>
+void MarkedSpace::forEachAllocator(Functor& functor)
{
for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
- allocatorFor(cellSize).zapFreeList();
- destructorAllocatorFor(cellSize).zapFreeList();
+ functor(allocatorFor(cellSize));
+ functor(normalDestructorAllocatorFor(cellSize));
+ functor(immortalStructureDestructorAllocatorFor(cellSize));
}
for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
- allocatorFor(cellSize).zapFreeList();
- destructorAllocatorFor(cellSize).zapFreeList();
+ functor(allocatorFor(cellSize));
+ functor(normalDestructorAllocatorFor(cellSize));
+ functor(immortalStructureDestructorAllocatorFor(cellSize));
}
+
+ functor(m_normalSpace.largeAllocator);
+ functor(m_normalDestructorSpace.largeAllocator);
+ functor(m_immortalStructureDestructorSpace.largeAllocator);
+}
+
+struct StopAllocatingFunctor {
+ void operator()(MarkedAllocator& allocator) { allocator.stopAllocating(); }
+};
+
+void MarkedSpace::stopAllocating()
+{
+ ASSERT(!isIterating());
+ forEachAllocator<StopAllocatingFunctor>();
+}
+
+struct ResumeAllocatingFunctor {
+ void operator()(MarkedAllocator& allocator) { allocator.resumeAllocating(); }
+};
+
+void MarkedSpace::resumeAllocating()
+{
+ ASSERT(isIterating());
+ forEachAllocator<ResumeAllocatingFunctor>();
}
bool MarkedSpace::isPagedOut(double deadline)
{
for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
- if (allocatorFor(cellSize).isPagedOut(deadline) || destructorAllocatorFor(cellSize).isPagedOut(deadline))
+ if (allocatorFor(cellSize).isPagedOut(deadline)
+ || normalDestructorAllocatorFor(cellSize).isPagedOut(deadline)
+ || immortalStructureDestructorAllocatorFor(cellSize).isPagedOut(deadline))
return true;
}
for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
- if (allocatorFor(cellSize).isPagedOut(deadline) || destructorAllocatorFor(cellSize).isPagedOut(deadline))
+ if (allocatorFor(cellSize).isPagedOut(deadline)
+ || normalDestructorAllocatorFor(cellSize).isPagedOut(deadline)
+ || immortalStructureDestructorAllocatorFor(cellSize).isPagedOut(deadline))
return true;
}
+ if (m_normalSpace.largeAllocator.isPagedOut(deadline)
+ || m_normalDestructorSpace.largeAllocator.isPagedOut(deadline)
+ || m_immortalStructureDestructorSpace.largeAllocator.isPagedOut(deadline))
+ return true;
+
return false;
}
-void MarkedSpace::freeBlocks(MarkedBlock* head)
+void MarkedSpace::freeBlock(MarkedBlock* block)
{
- MarkedBlock* next;
- for (MarkedBlock* block = head; block; block = next) {
- next = static_cast<MarkedBlock*>(block->next());
-
- m_blocks.remove(block);
- block->sweep();
+ block->allocator()->removeBlock(block);
+ m_capacity -= block->capacity();
+ m_blocks.remove(block);
+ if (block->capacity() == MarkedBlock::blockSize) {
+ m_heap->blockAllocator().deallocate(MarkedBlock::destroy(block));
+ return;
+ }
+ m_heap->blockAllocator().deallocateCustomSize(MarkedBlock::destroy(block));
+}
- m_heap->blockAllocator().deallocate(block);
+void MarkedSpace::freeOrShrinkBlock(MarkedBlock* block)
+{
+ if (!block->isEmpty()) {
+ block->shrink();
+ return;
}
+
+ freeBlock(block);
}
-class TakeIfUnmarked {
-public:
- typedef MarkedBlock* ReturnType;
-
- TakeIfUnmarked(MarkedSpace*);
- void operator()(MarkedBlock*);
- ReturnType returnValue();
-
-private:
- MarkedSpace* m_markedSpace;
- DoublyLinkedList<MarkedBlock> m_empties;
+struct Shrink : MarkedBlock::VoidFunctor {
+ void operator()(MarkedBlock* block) { block->shrink(); }
};
-inline TakeIfUnmarked::TakeIfUnmarked(MarkedSpace* newSpace)
- : m_markedSpace(newSpace)
+void MarkedSpace::shrink()
{
+ Free freeOrShrink(Free::FreeOrShrink, this);
+ forEachBlock(freeOrShrink);
}
-inline void TakeIfUnmarked::operator()(MarkedBlock* block)
+static void clearNewlyAllocatedInBlock(MarkedBlock* block)
{
- if (!block->markCountIsZero())
+ if (!block)
return;
-
- m_markedSpace->allocatorFor(block).removeBlock(block);
- m_empties.append(block);
+ block->clearNewlyAllocated();
}
-inline TakeIfUnmarked::ReturnType TakeIfUnmarked::returnValue()
-{
- return m_empties.head();
-}
+struct ClearNewlyAllocated : MarkedBlock::VoidFunctor {
+ void operator()(MarkedBlock* block) { block->clearNewlyAllocated(); }
+};
-void MarkedSpace::shrink()
+#ifndef NDEBUG
+struct VerifyNewlyAllocated : MarkedBlock::VoidFunctor {
+ void operator()(MarkedBlock* block) { ASSERT(!block->clearNewlyAllocated()); }
+};
+#endif
+
+void MarkedSpace::clearNewlyAllocated()
{
- // We record a temporary list of empties to avoid modifying m_blocks while iterating it.
- TakeIfUnmarked takeIfUnmarked(this);
- freeBlocks(forEachBlock(takeIfUnmarked));
+ for (size_t i = 0; i < preciseCount; ++i) {
+ clearNewlyAllocatedInBlock(m_normalSpace.preciseAllocators[i].takeLastActiveBlock());
+ clearNewlyAllocatedInBlock(m_normalDestructorSpace.preciseAllocators[i].takeLastActiveBlock());
+ clearNewlyAllocatedInBlock(m_immortalStructureDestructorSpace.preciseAllocators[i].takeLastActiveBlock());
+ }
+
+ for (size_t i = 0; i < impreciseCount; ++i) {
+ clearNewlyAllocatedInBlock(m_normalSpace.impreciseAllocators[i].takeLastActiveBlock());
+ clearNewlyAllocatedInBlock(m_normalDestructorSpace.impreciseAllocators[i].takeLastActiveBlock());
+ clearNewlyAllocatedInBlock(m_immortalStructureDestructorSpace.impreciseAllocators[i].takeLastActiveBlock());
+ }
+
+ // We have to iterate all of the blocks in the large allocators because they are
+ // canonicalized as they are used up (see MarkedAllocator::tryAllocateHelper)
+ // which creates the m_newlyAllocated bitmap.
+ ClearNewlyAllocated functor;
+ m_normalSpace.largeAllocator.forEachBlock(functor);
+ m_normalDestructorSpace.largeAllocator.forEachBlock(functor);
+ m_immortalStructureDestructorSpace.largeAllocator.forEachBlock(functor);
+
+#ifndef NDEBUG
+ VerifyNewlyAllocated verifyFunctor;
+ forEachBlock(verifyFunctor);
+#endif
}
-#if ENABLE(GGC)
-class GatherDirtyCells {
- WTF_MAKE_NONCOPYABLE(GatherDirtyCells);
-public:
- typedef void* ReturnType;
-
- explicit GatherDirtyCells(MarkedBlock::DirtyCellVector*);
- void operator()(MarkedBlock*);
- ReturnType returnValue() { return 0; }
-
-private:
- MarkedBlock::DirtyCellVector* m_dirtyCells;
-};
+#ifndef NDEBUG
+struct VerifyMarkedOrRetired : MarkedBlock::VoidFunctor {
+ void operator()(MarkedBlock* block)
+ {
+ switch (block->m_state) {
+ case MarkedBlock::Marked:
+ case MarkedBlock::Retired:
+ return;
+ default:
+ RELEASE_ASSERT_NOT_REACHED();
+ }
+ }
+};
+#endif
-inline GatherDirtyCells::GatherDirtyCells(MarkedBlock::DirtyCellVector* dirtyCells)
- : m_dirtyCells(dirtyCells)
+void MarkedSpace::clearMarks()
{
+ if (m_heap->operationInProgress() == EdenCollection) {
+ for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i)
+ m_blocksWithNewObjects[i]->clearMarks();
+ } else
+ forEachBlock<ClearMarks>();
+
+#ifndef NDEBUG
+ VerifyMarkedOrRetired verifyFunctor;
+ forEachBlock(verifyFunctor);
+#endif
}
-inline void GatherDirtyCells::operator()(MarkedBlock* block)
+void MarkedSpace::willStartIterating()
{
- block->gatherDirtyCells(*m_dirtyCells);
+ ASSERT(!isIterating());
+ stopAllocating();
+ m_isIterating = true;
}
-void MarkedSpace::gatherDirtyCells(MarkedBlock::DirtyCellVector& dirtyCells)
+void MarkedSpace::didFinishIterating()
{
- GatherDirtyCells gatherDirtyCells(&dirtyCells);
- forEachBlock(gatherDirtyCells);
+ ASSERT(isIterating());
+ DelayedReleaseScope scope(*this);
+ resumeAllocating();
+ m_isIterating = false;
}
-#endif
} // namespace JSC