X-Git-Url: https://git.saurik.com/apple/javascriptcore.git/blobdiff_plain/14957cd040308e3eeec43d26bae5d76da13fcd85..8b637bb680022adfddad653280734877951535a9:/heap/MarkedSpace.cpp diff --git a/heap/MarkedSpace.cpp b/heap/MarkedSpace.cpp index 77f6e52..2bef608 100644 --- a/heap/MarkedSpace.cpp +++ b/heap/MarkedSpace.cpp @@ -21,147 +21,206 @@ #include "config.h" #include "MarkedSpace.h" +#include "IncrementalSweeper.h" #include "JSGlobalObject.h" -#include "JSCell.h" -#include "JSGlobalData.h" #include "JSLock.h" #include "JSObject.h" -#include "ScopeChain.h" + namespace JSC { class Structure; -MarkedSpace::MarkedSpace(JSGlobalData* globalData) - : m_waterMark(0) - , m_highWaterMark(0) - , m_globalData(globalData) -{ - for (size_t cellSize = preciseStep; cellSize < preciseCutoff; cellSize += preciseStep) - sizeClassFor(cellSize).cellSize = cellSize; +class Free { +public: + typedef MarkedBlock* ReturnType; - for (size_t cellSize = impreciseStep; cellSize < impreciseCutoff; cellSize += impreciseStep) - sizeClassFor(cellSize).cellSize = cellSize; -} + enum FreeMode { FreeOrShrink, FreeAll }; -void MarkedSpace::destroy() + Free(FreeMode, MarkedSpace*); + void operator()(MarkedBlock*); + ReturnType returnValue(); + +private: + FreeMode m_freeMode; + MarkedSpace* m_markedSpace; + DoublyLinkedList m_blocks; +}; + +inline Free::Free(FreeMode freeMode, MarkedSpace* newSpace) + : m_freeMode(freeMode) + , m_markedSpace(newSpace) { - clearMarks(); - shrink(); - ASSERT(!size()); } -MarkedBlock* MarkedSpace::allocateBlock(SizeClass& sizeClass) +inline void Free::operator()(MarkedBlock* block) { - MarkedBlock* block = MarkedBlock::create(globalData(), sizeClass.cellSize); - sizeClass.blockList.append(block); - sizeClass.nextBlock = block; - m_blocks.add(block); + if (m_freeMode == FreeOrShrink) + m_markedSpace->freeOrShrinkBlock(block); + else + m_markedSpace->freeBlock(block); +} - return block; +inline Free::ReturnType Free::returnValue() +{ + return m_blocks.head(); } -void MarkedSpace::freeBlocks(DoublyLinkedList& blocks) +struct VisitWeakSet : MarkedBlock::VoidFunctor { + VisitWeakSet(HeapRootVisitor& heapRootVisitor) : m_heapRootVisitor(heapRootVisitor) { } + void operator()(MarkedBlock* block) { block->visitWeakSet(m_heapRootVisitor); } +private: + HeapRootVisitor& m_heapRootVisitor; +}; + +struct ReapWeakSet : MarkedBlock::VoidFunctor { + void operator()(MarkedBlock* block) { block->reapWeakSet(); } +}; + +MarkedSpace::MarkedSpace(Heap* heap) + : m_heap(heap) { - MarkedBlock* next; - for (MarkedBlock* block = blocks.head(); block; block = next) { - next = block->next(); + for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { + allocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::None); + normalDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::Normal); + immortalStructureDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::ImmortalStructure); + } - blocks.remove(block); - m_blocks.remove(block); - MarkedBlock::destroy(block); + for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { + allocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::None); + normalDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::Normal); + immortalStructureDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::ImmortalStructure); } + + m_normalSpace.largeAllocator.init(heap, this, 0, MarkedBlock::None); + m_normalDestructorSpace.largeAllocator.init(heap, this, 0, MarkedBlock::Normal); + m_immortalStructureDestructorSpace.largeAllocator.init(heap, this, 0, MarkedBlock::ImmortalStructure); } -void* MarkedSpace::allocateFromSizeClass(SizeClass& sizeClass) +MarkedSpace::~MarkedSpace() { - for (MarkedBlock*& block = sizeClass.nextBlock ; block; block = block->next()) { - if (void* result = block->allocate()) - return result; + Free free(Free::FreeAll, this); + forEachBlock(free); +} - m_waterMark += block->capacity(); - } +struct LastChanceToFinalize : MarkedBlock::VoidFunctor { + void operator()(MarkedBlock* block) { block->lastChanceToFinalize(); } +}; - if (m_waterMark < m_highWaterMark) - return allocateBlock(sizeClass)->allocate(); +void MarkedSpace::lastChanceToFinalize() +{ + canonicalizeCellLivenessData(); + forEachBlock(); +} - return 0; +void MarkedSpace::sweep() +{ + m_heap->sweeper()->willFinishSweeping(); + forEachBlock(); } -void MarkedSpace::shrink() +void MarkedSpace::resetAllocators() { - // We record a temporary list of empties to avoid modifying m_blocks while iterating it. - DoublyLinkedList empties; - - BlockIterator end = m_blocks.end(); - for (BlockIterator it = m_blocks.begin(); it != end; ++it) { - MarkedBlock* block = *it; - if (block->isEmpty()) { - SizeClass& sizeClass = sizeClassFor(block->cellSize()); - sizeClass.blockList.remove(block); - sizeClass.nextBlock = sizeClass.blockList.head(); - empties.append(block); - } + for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { + allocatorFor(cellSize).reset(); + normalDestructorAllocatorFor(cellSize).reset(); + immortalStructureDestructorAllocatorFor(cellSize).reset(); } - - freeBlocks(empties); - ASSERT(empties.isEmpty()); + + for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { + allocatorFor(cellSize).reset(); + normalDestructorAllocatorFor(cellSize).reset(); + immortalStructureDestructorAllocatorFor(cellSize).reset(); + } + + m_normalSpace.largeAllocator.reset(); + m_normalDestructorSpace.largeAllocator.reset(); + m_immortalStructureDestructorSpace.largeAllocator.reset(); } -void MarkedSpace::clearMarks() +void MarkedSpace::visitWeakSets(HeapRootVisitor& heapRootVisitor) { - BlockIterator end = m_blocks.end(); - for (BlockIterator it = m_blocks.begin(); it != end; ++it) - (*it)->clearMarks(); + VisitWeakSet visitWeakSet(heapRootVisitor); + forEachBlock(visitWeakSet); } -void MarkedSpace::sweep() +void MarkedSpace::reapWeakSets() { - BlockIterator end = m_blocks.end(); - for (BlockIterator it = m_blocks.begin(); it != end; ++it) - (*it)->sweep(); + forEachBlock(); } -size_t MarkedSpace::objectCount() const +void MarkedSpace::canonicalizeCellLivenessData() { - size_t result = 0; - BlockIterator end = m_blocks.end(); - for (BlockIterator it = m_blocks.begin(); it != end; ++it) - result += (*it)->markCount(); - return result; + for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { + allocatorFor(cellSize).canonicalizeCellLivenessData(); + normalDestructorAllocatorFor(cellSize).canonicalizeCellLivenessData(); + immortalStructureDestructorAllocatorFor(cellSize).canonicalizeCellLivenessData(); + } + + for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { + allocatorFor(cellSize).canonicalizeCellLivenessData(); + normalDestructorAllocatorFor(cellSize).canonicalizeCellLivenessData(); + immortalStructureDestructorAllocatorFor(cellSize).canonicalizeCellLivenessData(); + } + + m_normalSpace.largeAllocator.canonicalizeCellLivenessData(); + m_normalDestructorSpace.largeAllocator.canonicalizeCellLivenessData(); + m_immortalStructureDestructorSpace.largeAllocator.canonicalizeCellLivenessData(); } -size_t MarkedSpace::size() const +bool MarkedSpace::isPagedOut(double deadline) { - size_t result = 0; - BlockIterator end = m_blocks.end(); - for (BlockIterator it = m_blocks.begin(); it != end; ++it) - result += (*it)->size(); - return result; + for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) { + if (allocatorFor(cellSize).isPagedOut(deadline) + || normalDestructorAllocatorFor(cellSize).isPagedOut(deadline) + || immortalStructureDestructorAllocatorFor(cellSize).isPagedOut(deadline)) + return true; + } + + for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) { + if (allocatorFor(cellSize).isPagedOut(deadline) + || normalDestructorAllocatorFor(cellSize).isPagedOut(deadline) + || immortalStructureDestructorAllocatorFor(cellSize).isPagedOut(deadline)) + return true; + } + + if (m_normalSpace.largeAllocator.isPagedOut(deadline) + || m_normalDestructorSpace.largeAllocator.isPagedOut(deadline) + || m_immortalStructureDestructorSpace.largeAllocator.isPagedOut(deadline)) + return true; + + return false; } -size_t MarkedSpace::capacity() const +void MarkedSpace::freeBlock(MarkedBlock* block) { - size_t result = 0; - BlockIterator end = m_blocks.end(); - for (BlockIterator it = m_blocks.begin(); it != end; ++it) - result += (*it)->capacity(); - return result; + block->allocator()->removeBlock(block); + m_blocks.remove(block); + if (block->capacity() == MarkedBlock::blockSize) { + m_heap->blockAllocator().deallocate(MarkedBlock::destroy(block)); + return; + } + m_heap->blockAllocator().deallocateCustomSize(MarkedBlock::destroy(block)); } -void MarkedSpace::reset() +void MarkedSpace::freeOrShrinkBlock(MarkedBlock* block) { - m_waterMark = 0; + if (!block->isEmpty()) { + block->shrink(); + return; + } - for (size_t cellSize = preciseStep; cellSize < preciseCutoff; cellSize += preciseStep) - sizeClassFor(cellSize).reset(); + freeBlock(block); +} - for (size_t cellSize = impreciseStep; cellSize < impreciseCutoff; cellSize += impreciseStep) - sizeClassFor(cellSize).reset(); +struct Shrink : MarkedBlock::VoidFunctor { + void operator()(MarkedBlock* block) { block->shrink(); } +}; - BlockIterator end = m_blocks.end(); - for (BlockIterator it = m_blocks.begin(); it != end; ++it) - (*it)->reset(); +void MarkedSpace::shrink() +{ + Free freeOrShrink(Free::FreeOrShrink, this); + forEachBlock(freeOrShrink); } } // namespace JSC