X-Git-Url: https://git.saurik.com/apple/javascriptcore.git/blobdiff_plain/6fe7ccc865dc7d7541b93c5bcaf6368d2c98a174..12899fa232562c774004a3a9d7d3149944dec712:/heap/MarkedSpace.cpp diff --git a/heap/MarkedSpace.cpp b/heap/MarkedSpace.cpp index 405ed57..2bef608 100644 --- a/heap/MarkedSpace.cpp +++ b/heap/MarkedSpace.cpp @@ -21,151 +21,206 @@ #include "config.h" #include "MarkedSpace.h" +#include "IncrementalSweeper.h" #include "JSGlobalObject.h" #include "JSLock.h" #include "JSObject.h" -#include "ScopeChain.h" + namespace JSC { class Structure; +class Free { +public: + typedef MarkedBlock* ReturnType; + + enum FreeMode { FreeOrShrink, FreeAll }; + + Free(FreeMode, MarkedSpace*); + void operator()(MarkedBlock*); + ReturnType returnValue(); + +private: + FreeMode m_freeMode; + MarkedSpace* m_markedSpace; + DoublyLinkedList m_blocks; +}; + +inline Free::Free(FreeMode freeMode, MarkedSpace* newSpace) + : m_freeMode(freeMode) + , m_markedSpace(newSpace) +{ +} + +inline void Free::operator()(MarkedBlock* block) +{ + if (m_freeMode == FreeOrShrink) + m_markedSpace->freeOrShrinkBlock(block); + else + m_markedSpace->freeBlock(block); +} + +inline Free::ReturnType Free::returnValue() +{ + return m_blocks.head(); +} + +struct VisitWeakSet : MarkedBlock::VoidFunctor { + VisitWeakSet(HeapRootVisitor& heapRootVisitor) : m_heapRootVisitor(heapRootVisitor) { } + void operator()(MarkedBlock* block) { block->visitWeakSet(m_heapRootVisitor); } +private: + HeapRootVisitor& m_heapRootVisitor; +}; + +struct ReapWeakSet : MarkedBlock::VoidFunctor { + void operator()(MarkedBlock* block) { block->reapWeakSet(); } +}; + MarkedSpace::MarkedSpace(Heap* heap) : m_heap(heap) { for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { - allocatorFor(cellSize).init(heap, this, cellSize, false); - destructorAllocatorFor(cellSize).init(heap, this, cellSize, true); + allocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::None); + normalDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::Normal); + immortalStructureDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::ImmortalStructure); } for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { - allocatorFor(cellSize).init(heap, this, cellSize, false); - destructorAllocatorFor(cellSize).init(heap, this, cellSize, true); + allocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::None); + normalDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::Normal); + immortalStructureDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::ImmortalStructure); } + + m_normalSpace.largeAllocator.init(heap, this, 0, MarkedBlock::None); + m_normalDestructorSpace.largeAllocator.init(heap, this, 0, MarkedBlock::Normal); + m_immortalStructureDestructorSpace.largeAllocator.init(heap, this, 0, MarkedBlock::ImmortalStructure); +} + +MarkedSpace::~MarkedSpace() +{ + Free free(Free::FreeAll, this); + forEachBlock(free); +} + +struct LastChanceToFinalize : MarkedBlock::VoidFunctor { + void operator()(MarkedBlock* block) { block->lastChanceToFinalize(); } +}; + +void MarkedSpace::lastChanceToFinalize() +{ + canonicalizeCellLivenessData(); + forEachBlock(); +} + +void MarkedSpace::sweep() +{ + m_heap->sweeper()->willFinishSweeping(); + forEachBlock(); } void MarkedSpace::resetAllocators() { for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { allocatorFor(cellSize).reset(); - destructorAllocatorFor(cellSize).reset(); + normalDestructorAllocatorFor(cellSize).reset(); + immortalStructureDestructorAllocatorFor(cellSize).reset(); } for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { allocatorFor(cellSize).reset(); - destructorAllocatorFor(cellSize).reset(); + normalDestructorAllocatorFor(cellSize).reset(); + immortalStructureDestructorAllocatorFor(cellSize).reset(); } + + m_normalSpace.largeAllocator.reset(); + m_normalDestructorSpace.largeAllocator.reset(); + m_immortalStructureDestructorSpace.largeAllocator.reset(); +} + +void MarkedSpace::visitWeakSets(HeapRootVisitor& heapRootVisitor) +{ + VisitWeakSet visitWeakSet(heapRootVisitor); + forEachBlock(visitWeakSet); +} + +void MarkedSpace::reapWeakSets() +{ + forEachBlock(); } void MarkedSpace::canonicalizeCellLivenessData() { for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { - allocatorFor(cellSize).zapFreeList(); - destructorAllocatorFor(cellSize).zapFreeList(); + allocatorFor(cellSize).canonicalizeCellLivenessData(); + normalDestructorAllocatorFor(cellSize).canonicalizeCellLivenessData(); + immortalStructureDestructorAllocatorFor(cellSize).canonicalizeCellLivenessData(); } for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { - allocatorFor(cellSize).zapFreeList(); - destructorAllocatorFor(cellSize).zapFreeList(); + allocatorFor(cellSize).canonicalizeCellLivenessData(); + normalDestructorAllocatorFor(cellSize).canonicalizeCellLivenessData(); + immortalStructureDestructorAllocatorFor(cellSize).canonicalizeCellLivenessData(); } + + m_normalSpace.largeAllocator.canonicalizeCellLivenessData(); + m_normalDestructorSpace.largeAllocator.canonicalizeCellLivenessData(); + m_immortalStructureDestructorSpace.largeAllocator.canonicalizeCellLivenessData(); } bool MarkedSpace::isPagedOut(double deadline) { for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { - if (allocatorFor(cellSize).isPagedOut(deadline) || destructorAllocatorFor(cellSize).isPagedOut(deadline)) + if (allocatorFor(cellSize).isPagedOut(deadline) + || normalDestructorAllocatorFor(cellSize).isPagedOut(deadline) + || immortalStructureDestructorAllocatorFor(cellSize).isPagedOut(deadline)) return true; } for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { - if (allocatorFor(cellSize).isPagedOut(deadline) || destructorAllocatorFor(cellSize).isPagedOut(deadline)) + if (allocatorFor(cellSize).isPagedOut(deadline) + || normalDestructorAllocatorFor(cellSize).isPagedOut(deadline) + || immortalStructureDestructorAllocatorFor(cellSize).isPagedOut(deadline)) return true; } + if (m_normalSpace.largeAllocator.isPagedOut(deadline) + || m_normalDestructorSpace.largeAllocator.isPagedOut(deadline) + || m_immortalStructureDestructorSpace.largeAllocator.isPagedOut(deadline)) + return true; + return false; } -void MarkedSpace::freeBlocks(MarkedBlock* head) +void MarkedSpace::freeBlock(MarkedBlock* block) { - MarkedBlock* next; - for (MarkedBlock* block = head; block; block = next) { - next = static_cast(block->next()); - - m_blocks.remove(block); - block->sweep(); - - m_heap->blockAllocator().deallocate(block); + block->allocator()->removeBlock(block); + m_blocks.remove(block); + if (block->capacity() == MarkedBlock::blockSize) { + m_heap->blockAllocator().deallocate(MarkedBlock::destroy(block)); + return; } + m_heap->blockAllocator().deallocateCustomSize(MarkedBlock::destroy(block)); } -class TakeIfUnmarked { -public: - typedef MarkedBlock* ReturnType; - - TakeIfUnmarked(MarkedSpace*); - void operator()(MarkedBlock*); - ReturnType returnValue(); - -private: - MarkedSpace* m_markedSpace; - DoublyLinkedList m_empties; -}; - -inline TakeIfUnmarked::TakeIfUnmarked(MarkedSpace* newSpace) - : m_markedSpace(newSpace) -{ -} - -inline void TakeIfUnmarked::operator()(MarkedBlock* block) +void MarkedSpace::freeOrShrinkBlock(MarkedBlock* block) { - if (!block->markCountIsZero()) + if (!block->isEmpty()) { + block->shrink(); return; - - m_markedSpace->allocatorFor(block).removeBlock(block); - m_empties.append(block); -} - -inline TakeIfUnmarked::ReturnType TakeIfUnmarked::returnValue() -{ - return m_empties.head(); -} + } -void MarkedSpace::shrink() -{ - // We record a temporary list of empties to avoid modifying m_blocks while iterating it. - TakeIfUnmarked takeIfUnmarked(this); - freeBlocks(forEachBlock(takeIfUnmarked)); + freeBlock(block); } -#if ENABLE(GGC) -class GatherDirtyCells { - WTF_MAKE_NONCOPYABLE(GatherDirtyCells); -public: - typedef void* ReturnType; - - explicit GatherDirtyCells(MarkedBlock::DirtyCellVector*); - void operator()(MarkedBlock*); - ReturnType returnValue() { return 0; } - -private: - MarkedBlock::DirtyCellVector* m_dirtyCells; +struct Shrink : MarkedBlock::VoidFunctor { + void operator()(MarkedBlock* block) { block->shrink(); } }; -inline GatherDirtyCells::GatherDirtyCells(MarkedBlock::DirtyCellVector* dirtyCells) - : m_dirtyCells(dirtyCells) -{ -} - -inline void GatherDirtyCells::operator()(MarkedBlock* block) -{ - block->gatherDirtyCells(*m_dirtyCells); -} - -void MarkedSpace::gatherDirtyCells(MarkedBlock::DirtyCellVector& dirtyCells) +void MarkedSpace::shrink() { - GatherDirtyCells gatherDirtyCells(&dirtyCells); - forEachBlock(gatherDirtyCells); + Free freeOrShrink(Free::FreeOrShrink, this); + forEachBlock(freeOrShrink); } -#endif } // namespace JSC