+    }
+
+    // We only want to discard the newlyAllocated bits if we're creating a FreeList,
+    // otherwise we would lose information on what's currently alive.
+    if (sweepMode == SweepToFreeList && m_newlyAllocated)
+        m_newlyAllocated = nullptr;
+
+    m_state = ((sweepMode == SweepToFreeList) ? FreeListed : Marked);
+    return FreeList(head, count * cellSize());
+}
+
+MarkedBlock::FreeList MarkedBlock::sweep(SweepMode sweepMode)
+{
+    HEAP_LOG_BLOCK_STATE_TRANSITION(this);
+
+    m_weakSet.sweep();
+
+    if (sweepMode == SweepOnly && !m_needsDestruction)
+        return FreeList();
+
+    if (m_needsDestruction)
+        return sweepHelper<true>(sweepMode);
+    return sweepHelper<false>(sweepMode);
+}
+
+template<bool callDestructors>
+MarkedBlock::FreeList MarkedBlock::sweepHelper(SweepMode sweepMode)
+{
+    switch (m_state) {
+    case New:
+        ASSERT(sweepMode == SweepToFreeList);
+        return specializedSweep<New, SweepToFreeList, callDestructors>();
+    case FreeListed:
+        // Happens when a block transitions to fully allocated.
+        ASSERT(sweepMode == SweepToFreeList);
+        return FreeList();
+    case Retired:
+    case Allocated:
+        RELEASE_ASSERT_NOT_REACHED();
+        return FreeList();
+    case Marked:
+        return sweepMode == SweepToFreeList
+            ? specializedSweep<Marked, SweepToFreeList, callDestructors>()
+            : specializedSweep<Marked, SweepOnly, callDestructors>();
+    }
+
+    RELEASE_ASSERT_NOT_REACHED();
+    return FreeList();
+}
+
+class SetNewlyAllocatedFunctor : public MarkedBlock::VoidFunctor {
+public:
+    SetNewlyAllocatedFunctor(MarkedBlock* block)
+        : m_block(block)
+    {
+    }
+
+    IterationStatus operator()(JSCell* cell)
+    {
+        ASSERT(MarkedBlock::blockFor(cell) == m_block);
+        m_block->setNewlyAllocated(cell);
+        return IterationStatus::Continue;
+    }
+
+private:
+    MarkedBlock* m_block;
+};
+
+void MarkedBlock::stopAllocating(const FreeList& freeList)
+{
+    HEAP_LOG_BLOCK_STATE_TRANSITION(this);
+    FreeCell* head = freeList.head;
+
+    if (m_state == Marked) {
+        // If the block is in the Marked state then we know that:
+        // 1) It was not used for allocation during the previous allocation cycle.
+        // 2) It may have dead objects, and we only know them to be dead by the
+        //    fact that their mark bits are unset.
+        // Hence if the block is Marked we need to leave it Marked.
+        
+        ASSERT(!head);
+        return;
+    }
+   
+    ASSERT(m_state == FreeListed);
+    
+    // Roll back to a coherent state for Heap introspection. Cells newly
+    // allocated from our free list are not currently marked, so we need another
+    // way to tell what's live vs dead. 
+    
+    ASSERT(!m_newlyAllocated);
+    m_newlyAllocated = std::make_unique<WTF::Bitmap<atomsPerBlock>>();
+
+    SetNewlyAllocatedFunctor functor(this);
+    forEachCell(functor);
+
+    FreeCell* next;
+    for (FreeCell* current = head; current; current = next) {
+        next = current->next;
+        reinterpret_cast<JSCell*>(current)->zap();
+        clearNewlyAllocated(current);
+    }
+    
+    m_state = Marked;
+}
+
+void MarkedBlock::clearMarks()
+{
+#if ENABLE(GGC)
+    if (heap()->operationInProgress() == JSC::EdenCollection)
+        this->clearMarksWithCollectionType<EdenCollection>();
+    else
+        this->clearMarksWithCollectionType<FullCollection>();