X-Git-Url: https://git.saurik.com/apple/javascriptcore.git/blobdiff_plain/6fe7ccc865dc7d7541b93c5bcaf6368d2c98a174..ed1e77d3adeb83d26fd1dfb16dd84cabdcefd250:/heap/MarkedSpace.cpp?ds=sidebyside diff --git a/heap/MarkedSpace.cpp b/heap/MarkedSpace.cpp index 405ed57..4f30890 100644 --- a/heap/MarkedSpace.cpp +++ b/heap/MarkedSpace.cpp @@ -21,17 +21,66 @@ #include "config.h" #include "MarkedSpace.h" +#include "IncrementalSweeper.h" #include "JSGlobalObject.h" #include "JSLock.h" #include "JSObject.h" -#include "ScopeChain.h" +#include "JSCInlines.h" namespace JSC { class Structure; +class Free { +public: + typedef MarkedBlock* ReturnType; + + enum FreeMode { FreeOrShrink, FreeAll }; + + Free(FreeMode, MarkedSpace*); + void operator()(MarkedBlock*); + ReturnType returnValue(); + +private: + FreeMode m_freeMode; + MarkedSpace* m_markedSpace; + DoublyLinkedList m_blocks; +}; + +inline Free::Free(FreeMode freeMode, MarkedSpace* newSpace) + : m_freeMode(freeMode) + , m_markedSpace(newSpace) +{ +} + +inline void Free::operator()(MarkedBlock* block) +{ + if (m_freeMode == FreeOrShrink) + m_markedSpace->freeOrShrinkBlock(block); + else + m_markedSpace->freeBlock(block); +} + +inline Free::ReturnType Free::returnValue() +{ + return m_blocks.head(); +} + +struct VisitWeakSet : MarkedBlock::VoidFunctor { + VisitWeakSet(HeapRootVisitor& heapRootVisitor) : m_heapRootVisitor(heapRootVisitor) { } + void operator()(MarkedBlock* block) { block->visitWeakSet(m_heapRootVisitor); } +private: + HeapRootVisitor& m_heapRootVisitor; +}; + +struct ReapWeakSet : MarkedBlock::VoidFunctor { + void operator()(MarkedBlock* block) { block->reapWeakSet(); } +}; + MarkedSpace::MarkedSpace(Heap* heap) : m_heap(heap) + , m_capacity(0) + , m_isIterating(false) { for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { allocatorFor(cellSize).init(heap, this, cellSize, false); @@ -42,6 +91,40 @@ MarkedSpace::MarkedSpace(Heap* heap) allocatorFor(cellSize).init(heap, this, cellSize, false); destructorAllocatorFor(cellSize).init(heap, this, cellSize, true); } + + m_normalSpace.largeAllocator.init(heap, this, 0, false); + m_destructorSpace.largeAllocator.init(heap, this, 0, true); +} + +MarkedSpace::~MarkedSpace() +{ + Free free(Free::FreeAll, this); + forEachBlock(free); + ASSERT(!m_blocks.set().size()); +} + +struct LastChanceToFinalize { + void operator()(MarkedAllocator& allocator) { allocator.lastChanceToFinalize(); } +}; + +void MarkedSpace::lastChanceToFinalize() +{ + stopAllocating(); + forEachAllocator(); +} + +void MarkedSpace::sweep() +{ + m_heap->sweeper()->willFinishSweeping(); + forEachBlock(); +} + +void MarkedSpace::zombifySweep() +{ + if (Options::logGC()) + dataLog("Zombifying sweep..."); + m_heap->sweeper()->willFinishSweeping(); + forEachBlock(); } void MarkedSpace::resetAllocators() @@ -55,117 +138,210 @@ void MarkedSpace::resetAllocators() allocatorFor(cellSize).reset(); destructorAllocatorFor(cellSize).reset(); } + + m_normalSpace.largeAllocator.reset(); + m_destructorSpace.largeAllocator.reset(); + +#if ENABLE(GGC) + m_blocksWithNewObjects.clear(); +#endif } -void MarkedSpace::canonicalizeCellLivenessData() +void MarkedSpace::visitWeakSets(HeapRootVisitor& heapRootVisitor) +{ + VisitWeakSet visitWeakSet(heapRootVisitor); + if (m_heap->operationInProgress() == EdenCollection) { + for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i) + visitWeakSet(m_blocksWithNewObjects[i]); + } else + forEachBlock(visitWeakSet); +} + +void MarkedSpace::reapWeakSets() +{ + if (m_heap->operationInProgress() == EdenCollection) { + for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i) + m_blocksWithNewObjects[i]->reapWeakSet(); + } else + forEachBlock(); +} + +template +void MarkedSpace::forEachAllocator() +{ + Functor functor; + forEachAllocator(functor); +} + +template +void MarkedSpace::forEachAllocator(Functor& functor) { for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { - allocatorFor(cellSize).zapFreeList(); - destructorAllocatorFor(cellSize).zapFreeList(); + functor(allocatorFor(cellSize)); + functor(destructorAllocatorFor(cellSize)); } for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { - allocatorFor(cellSize).zapFreeList(); - destructorAllocatorFor(cellSize).zapFreeList(); + functor(allocatorFor(cellSize)); + functor(destructorAllocatorFor(cellSize)); } + + functor(m_normalSpace.largeAllocator); + functor(m_destructorSpace.largeAllocator); +} + +struct StopAllocatingFunctor { + void operator()(MarkedAllocator& allocator) { allocator.stopAllocating(); } +}; + +void MarkedSpace::stopAllocating() +{ + ASSERT(!isIterating()); + forEachAllocator(); +} + +struct ResumeAllocatingFunctor { + void operator()(MarkedAllocator& allocator) { allocator.resumeAllocating(); } +}; + +void MarkedSpace::resumeAllocating() +{ + ASSERT(isIterating()); + forEachAllocator(); } bool MarkedSpace::isPagedOut(double deadline) { for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { - if (allocatorFor(cellSize).isPagedOut(deadline) || destructorAllocatorFor(cellSize).isPagedOut(deadline)) + if (allocatorFor(cellSize).isPagedOut(deadline) + || destructorAllocatorFor(cellSize).isPagedOut(deadline)) return true; } for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { - if (allocatorFor(cellSize).isPagedOut(deadline) || destructorAllocatorFor(cellSize).isPagedOut(deadline)) + if (allocatorFor(cellSize).isPagedOut(deadline) + || destructorAllocatorFor(cellSize).isPagedOut(deadline)) return true; } + if (m_normalSpace.largeAllocator.isPagedOut(deadline) + || m_destructorSpace.largeAllocator.isPagedOut(deadline)) + return true; + return false; } -void MarkedSpace::freeBlocks(MarkedBlock* head) +void MarkedSpace::freeBlock(MarkedBlock* block) { - MarkedBlock* next; - for (MarkedBlock* block = head; block; block = next) { - next = static_cast(block->next()); - - m_blocks.remove(block); - block->sweep(); + block->allocator()->removeBlock(block); + m_capacity -= block->capacity(); + m_blocks.remove(block); + MarkedBlock::destroy(block); +} - m_heap->blockAllocator().deallocate(block); +void MarkedSpace::freeOrShrinkBlock(MarkedBlock* block) +{ + if (!block->isEmpty()) { + block->shrink(); + return; } + + freeBlock(block); } -class TakeIfUnmarked { -public: - typedef MarkedBlock* ReturnType; - - TakeIfUnmarked(MarkedSpace*); - void operator()(MarkedBlock*); - ReturnType returnValue(); - -private: - MarkedSpace* m_markedSpace; - DoublyLinkedList m_empties; +struct Shrink : MarkedBlock::VoidFunctor { + void operator()(MarkedBlock* block) { block->shrink(); } }; -inline TakeIfUnmarked::TakeIfUnmarked(MarkedSpace* newSpace) - : m_markedSpace(newSpace) +void MarkedSpace::shrink() { + Free freeOrShrink(Free::FreeOrShrink, this); + forEachBlock(freeOrShrink); } -inline void TakeIfUnmarked::operator()(MarkedBlock* block) +static void clearNewlyAllocatedInBlock(MarkedBlock* block) { - if (!block->markCountIsZero()) + if (!block) return; - - m_markedSpace->allocatorFor(block).removeBlock(block); - m_empties.append(block); + block->clearNewlyAllocated(); } -inline TakeIfUnmarked::ReturnType TakeIfUnmarked::returnValue() -{ - return m_empties.head(); -} +struct ClearNewlyAllocated : MarkedBlock::VoidFunctor { + void operator()(MarkedBlock* block) { block->clearNewlyAllocated(); } +}; -void MarkedSpace::shrink() +#ifndef NDEBUG +struct VerifyNewlyAllocated : MarkedBlock::VoidFunctor { + void operator()(MarkedBlock* block) { ASSERT(!block->clearNewlyAllocated()); } +}; +#endif + +void MarkedSpace::clearNewlyAllocated() { - // We record a temporary list of empties to avoid modifying m_blocks while iterating it. - TakeIfUnmarked takeIfUnmarked(this); - freeBlocks(forEachBlock(takeIfUnmarked)); + for (size_t i = 0; i < preciseCount; ++i) { + clearNewlyAllocatedInBlock(m_normalSpace.preciseAllocators[i].takeLastActiveBlock()); + clearNewlyAllocatedInBlock(m_destructorSpace.preciseAllocators[i].takeLastActiveBlock()); + } + + for (size_t i = 0; i < impreciseCount; ++i) { + clearNewlyAllocatedInBlock(m_normalSpace.impreciseAllocators[i].takeLastActiveBlock()); + clearNewlyAllocatedInBlock(m_destructorSpace.impreciseAllocators[i].takeLastActiveBlock()); + } + + // We have to iterate all of the blocks in the large allocators because they are + // canonicalized as they are used up (see MarkedAllocator::tryAllocateHelper) + // which creates the m_newlyAllocated bitmap. + ClearNewlyAllocated functor; + m_normalSpace.largeAllocator.forEachBlock(functor); + m_destructorSpace.largeAllocator.forEachBlock(functor); + +#ifndef NDEBUG + VerifyNewlyAllocated verifyFunctor; + forEachBlock(verifyFunctor); +#endif } -#if ENABLE(GGC) -class GatherDirtyCells { - WTF_MAKE_NONCOPYABLE(GatherDirtyCells); -public: - typedef void* ReturnType; - - explicit GatherDirtyCells(MarkedBlock::DirtyCellVector*); - void operator()(MarkedBlock*); - ReturnType returnValue() { return 0; } - -private: - MarkedBlock::DirtyCellVector* m_dirtyCells; -}; +#ifndef NDEBUG +struct VerifyMarkedOrRetired : MarkedBlock::VoidFunctor { + void operator()(MarkedBlock* block) + { + switch (block->m_state) { + case MarkedBlock::Marked: + case MarkedBlock::Retired: + return; + default: + RELEASE_ASSERT_NOT_REACHED(); + } + } +}; +#endif -inline GatherDirtyCells::GatherDirtyCells(MarkedBlock::DirtyCellVector* dirtyCells) - : m_dirtyCells(dirtyCells) +void MarkedSpace::clearMarks() { + if (m_heap->operationInProgress() == EdenCollection) { + for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i) + m_blocksWithNewObjects[i]->clearMarks(); + } else + forEachBlock(); + +#ifndef NDEBUG + VerifyMarkedOrRetired verifyFunctor; + forEachBlock(verifyFunctor); +#endif } -inline void GatherDirtyCells::operator()(MarkedBlock* block) +void MarkedSpace::willStartIterating() { - block->gatherDirtyCells(*m_dirtyCells); + ASSERT(!isIterating()); + stopAllocating(); + m_isIterating = true; } -void MarkedSpace::gatherDirtyCells(MarkedBlock::DirtyCellVector& dirtyCells) +void MarkedSpace::didFinishIterating() { - GatherDirtyCells gatherDirtyCells(&dirtyCells); - forEachBlock(gatherDirtyCells); + ASSERT(isIterating()); + resumeAllocating(); + m_isIterating = false; } -#endif } // namespace JSC