X-Git-Url: https://git.saurik.com/apple/javascriptcore.git/blobdiff_plain/2d39b0e377c0896910ee49ae70082ba665faf986..refs/heads/master:/heap/MarkedBlock.cpp diff --git a/heap/MarkedBlock.cpp b/heap/MarkedBlock.cpp index f4d39fc..b9c3f9f 100644 --- a/heap/MarkedBlock.cpp +++ b/heap/MarkedBlock.cpp @@ -26,7 +26,6 @@ #include "config.h" #include "MarkedBlock.h" -#include "DelayedReleaseScope.h" #include "IncrementalSweeper.h" #include "JSCell.h" #include "JSDestructibleObject.h" @@ -34,46 +33,53 @@ namespace JSC { -MarkedBlock* MarkedBlock::create(DeadBlock* block, MarkedAllocator* allocator, size_t cellSize, DestructorType destructorType) +MarkedBlock* MarkedBlock::create(MarkedAllocator* allocator, size_t capacity, size_t cellSize, bool needsDestruction) { - ASSERT(reinterpret_cast(block) == (reinterpret_cast(block) & blockMask)); - Region* region = block->region(); - return new (NotNull, block) MarkedBlock(region, allocator, cellSize, destructorType); + return new (NotNull, fastAlignedMalloc(blockSize, capacity)) MarkedBlock(allocator, capacity, cellSize, needsDestruction); } -MarkedBlock::MarkedBlock(Region* region, MarkedAllocator* allocator, size_t cellSize, DestructorType destructorType) - : HeapBlock(region) +void MarkedBlock::destroy(MarkedBlock* block) +{ + block->~MarkedBlock(); + fastAlignedFree(block); +} + +MarkedBlock::MarkedBlock(MarkedAllocator* allocator, size_t capacity, size_t cellSize, bool needsDestruction) + : DoublyLinkedListNode() , m_atomsPerCell((cellSize + atomSize - 1) / atomSize) - , m_endAtom((allocator->cellSize() ? atomsPerBlock : region->blockSize() / atomSize) - m_atomsPerCell + 1) - , m_destructorType(destructorType) + , m_endAtom((allocator->cellSize() ? atomsPerBlock - m_atomsPerCell : firstAtom()) + 1) + , m_capacity(capacity) + , m_needsDestruction(needsDestruction) , m_allocator(allocator) , m_state(New) // All cells start out unmarked. - , m_weakSet(allocator->heap()->vm()) + , m_weakSet(allocator->heap()->vm(), *this) { ASSERT(allocator); HEAP_LOG_BLOCK_STATE_TRANSITION(this); } -template inline void MarkedBlock::callDestructor(JSCell* cell) { // A previous eager sweep may already have run cell's destructor. if (cell->isZapped()) return; - if (dtorType == MarkedBlock::Normal) - jsCast(cell)->classInfo()->methodTable.destroy(cell); - else + ASSERT(cell->structureID()); + if (cell->inlineTypeFlags() & StructureIsImmortal) cell->structure(*vm())->classInfo()->methodTable.destroy(cell); + else + jsCast(cell)->classInfo()->methodTable.destroy(cell); cell->zap(); } -template +template MarkedBlock::FreeList MarkedBlock::specializedSweep() { ASSERT(blockState != Allocated && blockState != FreeListed); - ASSERT(!(dtorType == MarkedBlock::None && sweepMode == SweepOnly)); + ASSERT(!(!callDestructors && sweepMode == SweepOnly)); + SamplingRegion samplingRegion((!callDestructors && blockState != New) ? "Calling destructors" : "sweeping"); + // This produces a free list that is ordered in reverse through the block. // This is fine, since the allocation code makes no assumptions about the // order of the free list. @@ -85,8 +91,8 @@ MarkedBlock::FreeList MarkedBlock::specializedSweep() JSCell* cell = reinterpret_cast_ptr(&atoms()[i]); - if (dtorType != MarkedBlock::None && blockState != New) - callDestructor(cell); + if (callDestructors && blockState != New) + callDestructor(cell); if (sweepMode == SweepToFreeList) { FreeCell* freeCell = reinterpret_cast(cell); @@ -99,7 +105,7 @@ MarkedBlock::FreeList MarkedBlock::specializedSweep() // We only want to discard the newlyAllocated bits if we're creating a FreeList, // otherwise we would lose information on what's currently alive. if (sweepMode == SweepToFreeList && m_newlyAllocated) - m_newlyAllocated.clear(); + m_newlyAllocated = nullptr; m_state = ((sweepMode == SweepToFreeList) ? FreeListed : Marked); return FreeList(head, count * cellSize()); @@ -107,28 +113,25 @@ MarkedBlock::FreeList MarkedBlock::specializedSweep() MarkedBlock::FreeList MarkedBlock::sweep(SweepMode sweepMode) { - ASSERT(DelayedReleaseScope::isInEffectFor(heap()->m_objectSpace)); HEAP_LOG_BLOCK_STATE_TRANSITION(this); m_weakSet.sweep(); - if (sweepMode == SweepOnly && m_destructorType == MarkedBlock::None) + if (sweepMode == SweepOnly && !m_needsDestruction) return FreeList(); - if (m_destructorType == MarkedBlock::ImmortalStructure) - return sweepHelper(sweepMode); - if (m_destructorType == MarkedBlock::Normal) - return sweepHelper(sweepMode); - return sweepHelper(sweepMode); + if (m_needsDestruction) + return sweepHelper(sweepMode); + return sweepHelper(sweepMode); } -template +template MarkedBlock::FreeList MarkedBlock::sweepHelper(SweepMode sweepMode) { switch (m_state) { case New: ASSERT(sweepMode == SweepToFreeList); - return specializedSweep(); + return specializedSweep(); case FreeListed: // Happens when a block transitions to fully allocated. ASSERT(sweepMode == SweepToFreeList); @@ -139,8 +142,8 @@ MarkedBlock::FreeList MarkedBlock::sweepHelper(SweepMode sweepMode) return FreeList(); case Marked: return sweepMode == SweepToFreeList - ? specializedSweep() - : specializedSweep(); + ? specializedSweep() + : specializedSweep(); } RELEASE_ASSERT_NOT_REACHED(); @@ -154,10 +157,11 @@ public: { } - void operator()(JSCell* cell) + IterationStatus operator()(JSCell* cell) { ASSERT(MarkedBlock::blockFor(cell) == m_block); m_block->setNewlyAllocated(cell); + return IterationStatus::Continue; } private: @@ -187,7 +191,7 @@ void MarkedBlock::stopAllocating(const FreeList& freeList) // way to tell what's live vs dead. ASSERT(!m_newlyAllocated); - m_newlyAllocated = adoptPtr(new WTF::Bitmap()); + m_newlyAllocated = std::make_unique>(); SetNewlyAllocatedFunctor functor(this); forEachCell(functor); @@ -214,11 +218,6 @@ void MarkedBlock::clearMarks() #endif } -void MarkedBlock::clearRememberedSet() -{ - m_rememberedSet.clearAll(); -} - template void MarkedBlock::clearMarksWithCollectionType() { @@ -228,9 +227,6 @@ void MarkedBlock::clearMarksWithCollectionType() ASSERT(m_state != New && m_state != FreeListed); if (collectionType == FullCollection) { m_marks.clearAll(); -#if ENABLE(GGC) - m_rememberedSet.clearAll(); -#endif // This will become true at the end of the mark phase. We set it now to // avoid an extra pass to do so later. m_state = Marked;