X-Git-Url: https://git.saurik.com/apple/javascriptcore.git/blobdiff_plain/2d39b0e377c0896910ee49ae70082ba665faf986..ed1e77d3adeb83d26fd1dfb16dd84cabdcefd250:/heap/MarkedSpace.cpp diff --git a/heap/MarkedSpace.cpp b/heap/MarkedSpace.cpp index 8a8fbf6..4f30890 100644 --- a/heap/MarkedSpace.cpp +++ b/heap/MarkedSpace.cpp @@ -21,7 +21,6 @@ #include "config.h" #include "MarkedSpace.h" -#include "DelayedReleaseScope.h" #include "IncrementalSweeper.h" #include "JSGlobalObject.h" #include "JSLock.h" @@ -82,23 +81,19 @@ MarkedSpace::MarkedSpace(Heap* heap) : m_heap(heap) , m_capacity(0) , m_isIterating(false) - , m_currentDelayedReleaseScope(nullptr) { for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { - allocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::None); - normalDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::Normal); - immortalStructureDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::ImmortalStructure); + allocatorFor(cellSize).init(heap, this, cellSize, false); + destructorAllocatorFor(cellSize).init(heap, this, cellSize, true); } for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { - allocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::None); - normalDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::Normal); - immortalStructureDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::ImmortalStructure); + allocatorFor(cellSize).init(heap, this, cellSize, false); + destructorAllocatorFor(cellSize).init(heap, this, cellSize, true); } - m_normalSpace.largeAllocator.init(heap, this, 0, MarkedBlock::None); - m_normalDestructorSpace.largeAllocator.init(heap, this, 0, MarkedBlock::Normal); - m_immortalStructureDestructorSpace.largeAllocator.init(heap, this, 0, MarkedBlock::ImmortalStructure); + m_normalSpace.largeAllocator.init(heap, this, 0, false); + m_destructorSpace.largeAllocator.init(heap, this, 0, true); } MarkedSpace::~MarkedSpace() @@ -114,15 +109,12 @@ struct LastChanceToFinalize { void MarkedSpace::lastChanceToFinalize() { - DelayedReleaseScope delayedReleaseScope(*this); stopAllocating(); forEachAllocator(); } void MarkedSpace::sweep() { - if (Options::logGC()) - dataLog("Eagerly sweeping..."); m_heap->sweeper()->willFinishSweeping(); forEachBlock(); } @@ -139,19 +131,16 @@ void MarkedSpace::resetAllocators() { for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { allocatorFor(cellSize).reset(); - normalDestructorAllocatorFor(cellSize).reset(); - immortalStructureDestructorAllocatorFor(cellSize).reset(); + destructorAllocatorFor(cellSize).reset(); } for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { allocatorFor(cellSize).reset(); - normalDestructorAllocatorFor(cellSize).reset(); - immortalStructureDestructorAllocatorFor(cellSize).reset(); + destructorAllocatorFor(cellSize).reset(); } m_normalSpace.largeAllocator.reset(); - m_normalDestructorSpace.largeAllocator.reset(); - m_immortalStructureDestructorSpace.largeAllocator.reset(); + m_destructorSpace.largeAllocator.reset(); #if ENABLE(GGC) m_blocksWithNewObjects.clear(); @@ -189,19 +178,16 @@ void MarkedSpace::forEachAllocator(Functor& functor) { for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { functor(allocatorFor(cellSize)); - functor(normalDestructorAllocatorFor(cellSize)); - functor(immortalStructureDestructorAllocatorFor(cellSize)); + functor(destructorAllocatorFor(cellSize)); } for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { functor(allocatorFor(cellSize)); - functor(normalDestructorAllocatorFor(cellSize)); - functor(immortalStructureDestructorAllocatorFor(cellSize)); + functor(destructorAllocatorFor(cellSize)); } functor(m_normalSpace.largeAllocator); - functor(m_normalDestructorSpace.largeAllocator); - functor(m_immortalStructureDestructorSpace.largeAllocator); + functor(m_destructorSpace.largeAllocator); } struct StopAllocatingFunctor { @@ -228,21 +214,18 @@ bool MarkedSpace::isPagedOut(double deadline) { for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { if (allocatorFor(cellSize).isPagedOut(deadline) - || normalDestructorAllocatorFor(cellSize).isPagedOut(deadline) - || immortalStructureDestructorAllocatorFor(cellSize).isPagedOut(deadline)) + || destructorAllocatorFor(cellSize).isPagedOut(deadline)) return true; } for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { if (allocatorFor(cellSize).isPagedOut(deadline) - || normalDestructorAllocatorFor(cellSize).isPagedOut(deadline) - || immortalStructureDestructorAllocatorFor(cellSize).isPagedOut(deadline)) + || destructorAllocatorFor(cellSize).isPagedOut(deadline)) return true; } if (m_normalSpace.largeAllocator.isPagedOut(deadline) - || m_normalDestructorSpace.largeAllocator.isPagedOut(deadline) - || m_immortalStructureDestructorSpace.largeAllocator.isPagedOut(deadline)) + || m_destructorSpace.largeAllocator.isPagedOut(deadline)) return true; return false; @@ -253,11 +236,7 @@ void MarkedSpace::freeBlock(MarkedBlock* block) block->allocator()->removeBlock(block); m_capacity -= block->capacity(); m_blocks.remove(block); - if (block->capacity() == MarkedBlock::blockSize) { - m_heap->blockAllocator().deallocate(MarkedBlock::destroy(block)); - return; - } - m_heap->blockAllocator().deallocateCustomSize(MarkedBlock::destroy(block)); + MarkedBlock::destroy(block); } void MarkedSpace::freeOrShrinkBlock(MarkedBlock* block) @@ -301,14 +280,12 @@ void MarkedSpace::clearNewlyAllocated() { for (size_t i = 0; i < preciseCount; ++i) { clearNewlyAllocatedInBlock(m_normalSpace.preciseAllocators[i].takeLastActiveBlock()); - clearNewlyAllocatedInBlock(m_normalDestructorSpace.preciseAllocators[i].takeLastActiveBlock()); - clearNewlyAllocatedInBlock(m_immortalStructureDestructorSpace.preciseAllocators[i].takeLastActiveBlock()); + clearNewlyAllocatedInBlock(m_destructorSpace.preciseAllocators[i].takeLastActiveBlock()); } for (size_t i = 0; i < impreciseCount; ++i) { clearNewlyAllocatedInBlock(m_normalSpace.impreciseAllocators[i].takeLastActiveBlock()); - clearNewlyAllocatedInBlock(m_normalDestructorSpace.impreciseAllocators[i].takeLastActiveBlock()); - clearNewlyAllocatedInBlock(m_immortalStructureDestructorSpace.impreciseAllocators[i].takeLastActiveBlock()); + clearNewlyAllocatedInBlock(m_destructorSpace.impreciseAllocators[i].takeLastActiveBlock()); } // We have to iterate all of the blocks in the large allocators because they are @@ -316,8 +293,7 @@ void MarkedSpace::clearNewlyAllocated() // which creates the m_newlyAllocated bitmap. ClearNewlyAllocated functor; m_normalSpace.largeAllocator.forEachBlock(functor); - m_normalDestructorSpace.largeAllocator.forEachBlock(functor); - m_immortalStructureDestructorSpace.largeAllocator.forEachBlock(functor); + m_destructorSpace.largeAllocator.forEachBlock(functor); #ifndef NDEBUG VerifyNewlyAllocated verifyFunctor; @@ -364,7 +340,6 @@ void MarkedSpace::willStartIterating() void MarkedSpace::didFinishIterating() { ASSERT(isIterating()); - DelayedReleaseScope scope(*this); resumeAllocating(); m_isIterating = false; }