#include "config.h"
#include "MarkedSpace.h"
+#include "IncrementalSweeper.h"
#include "JSGlobalObject.h"
-#include "JSCell.h"
-#include "JSGlobalData.h"
#include "JSLock.h"
#include "JSObject.h"
-#include "ScopeChain.h"
+#include "JSCInlines.h"
namespace JSC {
class Structure;
-MarkedSpace::MarkedSpace(JSGlobalData* globalData)
- : m_waterMark(0)
- , m_highWaterMark(0)
- , m_globalData(globalData)
+class Free {
+public:
+ typedef MarkedBlock* ReturnType;
+
+ enum FreeMode { FreeOrShrink, FreeAll };
+
+ Free(FreeMode, MarkedSpace*);
+ void operator()(MarkedBlock*);
+ ReturnType returnValue();
+
+private:
+ FreeMode m_freeMode;
+ MarkedSpace* m_markedSpace;
+ DoublyLinkedList<MarkedBlock> m_blocks;
+};
+
+inline Free::Free(FreeMode freeMode, MarkedSpace* newSpace)
+ : m_freeMode(freeMode)
+ , m_markedSpace(newSpace)
{
- for (size_t cellSize = preciseStep; cellSize < preciseCutoff; cellSize += preciseStep)
- sizeClassFor(cellSize).cellSize = cellSize;
+}
- for (size_t cellSize = impreciseStep; cellSize < impreciseCutoff; cellSize += impreciseStep)
- sizeClassFor(cellSize).cellSize = cellSize;
+inline void Free::operator()(MarkedBlock* block)
+{
+ if (m_freeMode == FreeOrShrink)
+ m_markedSpace->freeOrShrinkBlock(block);
+ else
+ m_markedSpace->freeBlock(block);
}
-void MarkedSpace::destroy()
+inline Free::ReturnType Free::returnValue()
{
- clearMarks();
- shrink();
- ASSERT(!size());
+ return m_blocks.head();
}
-MarkedBlock* MarkedSpace::allocateBlock(SizeClass& sizeClass)
+struct VisitWeakSet : MarkedBlock::VoidFunctor {
+ VisitWeakSet(HeapRootVisitor& heapRootVisitor) : m_heapRootVisitor(heapRootVisitor) { }
+ void operator()(MarkedBlock* block) { block->visitWeakSet(m_heapRootVisitor); }
+private:
+ HeapRootVisitor& m_heapRootVisitor;
+};
+
+struct ReapWeakSet : MarkedBlock::VoidFunctor {
+ void operator()(MarkedBlock* block) { block->reapWeakSet(); }
+};
+
+MarkedSpace::MarkedSpace(Heap* heap)
+ : m_heap(heap)
+ , m_capacity(0)
+ , m_isIterating(false)
{
- MarkedBlock* block = MarkedBlock::create(globalData(), sizeClass.cellSize);
- sizeClass.blockList.append(block);
- sizeClass.nextBlock = block;
- m_blocks.add(block);
+ for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
+ allocatorFor(cellSize).init(heap, this, cellSize, false);
+ destructorAllocatorFor(cellSize).init(heap, this, cellSize, true);
+ }
- return block;
+ for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
+ allocatorFor(cellSize).init(heap, this, cellSize, false);
+ destructorAllocatorFor(cellSize).init(heap, this, cellSize, true);
+ }
+
+ m_normalSpace.largeAllocator.init(heap, this, 0, false);
+ m_destructorSpace.largeAllocator.init(heap, this, 0, true);
}
-void MarkedSpace::freeBlocks(DoublyLinkedList<MarkedBlock>& blocks)
+MarkedSpace::~MarkedSpace()
{
- MarkedBlock* next;
- for (MarkedBlock* block = blocks.head(); block; block = next) {
- next = block->next();
+ Free free(Free::FreeAll, this);
+ forEachBlock(free);
+ ASSERT(!m_blocks.set().size());
+}
+
+struct LastChanceToFinalize {
+ void operator()(MarkedAllocator& allocator) { allocator.lastChanceToFinalize(); }
+};
- blocks.remove(block);
- m_blocks.remove(block);
- MarkedBlock::destroy(block);
+void MarkedSpace::lastChanceToFinalize()
+{
+ stopAllocating();
+ forEachAllocator<LastChanceToFinalize>();
+}
+
+void MarkedSpace::sweep()
+{
+ m_heap->sweeper()->willFinishSweeping();
+ forEachBlock<Sweep>();
+}
+
+void MarkedSpace::zombifySweep()
+{
+ if (Options::logGC())
+ dataLog("Zombifying sweep...");
+ m_heap->sweeper()->willFinishSweeping();
+ forEachBlock<ZombifySweep>();
+}
+
+void MarkedSpace::resetAllocators()
+{
+ for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
+ allocatorFor(cellSize).reset();
+ destructorAllocatorFor(cellSize).reset();
}
+
+ for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
+ allocatorFor(cellSize).reset();
+ destructorAllocatorFor(cellSize).reset();
+ }
+
+ m_normalSpace.largeAllocator.reset();
+ m_destructorSpace.largeAllocator.reset();
+
+#if ENABLE(GGC)
+ m_blocksWithNewObjects.clear();
+#endif
}
-void* MarkedSpace::allocateFromSizeClass(SizeClass& sizeClass)
+void MarkedSpace::visitWeakSets(HeapRootVisitor& heapRootVisitor)
{
- for (MarkedBlock*& block = sizeClass.nextBlock ; block; block = block->next()) {
- if (void* result = block->allocate())
- return result;
+ VisitWeakSet visitWeakSet(heapRootVisitor);
+ if (m_heap->operationInProgress() == EdenCollection) {
+ for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i)
+ visitWeakSet(m_blocksWithNewObjects[i]);
+ } else
+ forEachBlock(visitWeakSet);
+}
- m_waterMark += block->capacity();
+void MarkedSpace::reapWeakSets()
+{
+ if (m_heap->operationInProgress() == EdenCollection) {
+ for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i)
+ m_blocksWithNewObjects[i]->reapWeakSet();
+ } else
+ forEachBlock<ReapWeakSet>();
+}
+
+template <typename Functor>
+void MarkedSpace::forEachAllocator()
+{
+ Functor functor;
+ forEachAllocator(functor);
+}
+
+template <typename Functor>
+void MarkedSpace::forEachAllocator(Functor& functor)
+{
+ for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
+ functor(allocatorFor(cellSize));
+ functor(destructorAllocatorFor(cellSize));
}
- if (m_waterMark < m_highWaterMark)
- return allocateBlock(sizeClass)->allocate();
+ for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
+ functor(allocatorFor(cellSize));
+ functor(destructorAllocatorFor(cellSize));
+ }
- return 0;
+ functor(m_normalSpace.largeAllocator);
+ functor(m_destructorSpace.largeAllocator);
}
-void MarkedSpace::shrink()
+struct StopAllocatingFunctor {
+ void operator()(MarkedAllocator& allocator) { allocator.stopAllocating(); }
+};
+
+void MarkedSpace::stopAllocating()
{
- // We record a temporary list of empties to avoid modifying m_blocks while iterating it.
- DoublyLinkedList<MarkedBlock> empties;
-
- BlockIterator end = m_blocks.end();
- for (BlockIterator it = m_blocks.begin(); it != end; ++it) {
- MarkedBlock* block = *it;
- if (block->isEmpty()) {
- SizeClass& sizeClass = sizeClassFor(block->cellSize());
- sizeClass.blockList.remove(block);
- sizeClass.nextBlock = sizeClass.blockList.head();
- empties.append(block);
- }
+ ASSERT(!isIterating());
+ forEachAllocator<StopAllocatingFunctor>();
+}
+
+struct ResumeAllocatingFunctor {
+ void operator()(MarkedAllocator& allocator) { allocator.resumeAllocating(); }
+};
+
+void MarkedSpace::resumeAllocating()
+{
+ ASSERT(isIterating());
+ forEachAllocator<ResumeAllocatingFunctor>();
+}
+
+bool MarkedSpace::isPagedOut(double deadline)
+{
+ for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
+ if (allocatorFor(cellSize).isPagedOut(deadline)
+ || destructorAllocatorFor(cellSize).isPagedOut(deadline))
+ return true;
}
-
- freeBlocks(empties);
- ASSERT(empties.isEmpty());
+
+ for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
+ if (allocatorFor(cellSize).isPagedOut(deadline)
+ || destructorAllocatorFor(cellSize).isPagedOut(deadline))
+ return true;
+ }
+
+ if (m_normalSpace.largeAllocator.isPagedOut(deadline)
+ || m_destructorSpace.largeAllocator.isPagedOut(deadline))
+ return true;
+
+ return false;
}
-void MarkedSpace::clearMarks()
+void MarkedSpace::freeBlock(MarkedBlock* block)
{
- BlockIterator end = m_blocks.end();
- for (BlockIterator it = m_blocks.begin(); it != end; ++it)
- (*it)->clearMarks();
+ block->allocator()->removeBlock(block);
+ m_capacity -= block->capacity();
+ m_blocks.remove(block);
+ MarkedBlock::destroy(block);
}
-void MarkedSpace::sweep()
+void MarkedSpace::freeOrShrinkBlock(MarkedBlock* block)
{
- BlockIterator end = m_blocks.end();
- for (BlockIterator it = m_blocks.begin(); it != end; ++it)
- (*it)->sweep();
+ if (!block->isEmpty()) {
+ block->shrink();
+ return;
+ }
+
+ freeBlock(block);
}
-size_t MarkedSpace::objectCount() const
+struct Shrink : MarkedBlock::VoidFunctor {
+ void operator()(MarkedBlock* block) { block->shrink(); }
+};
+
+void MarkedSpace::shrink()
{
- size_t result = 0;
- BlockIterator end = m_blocks.end();
- for (BlockIterator it = m_blocks.begin(); it != end; ++it)
- result += (*it)->markCount();
- return result;
+ Free freeOrShrink(Free::FreeOrShrink, this);
+ forEachBlock(freeOrShrink);
}
-size_t MarkedSpace::size() const
+static void clearNewlyAllocatedInBlock(MarkedBlock* block)
{
- size_t result = 0;
- BlockIterator end = m_blocks.end();
- for (BlockIterator it = m_blocks.begin(); it != end; ++it)
- result += (*it)->size();
- return result;
+ if (!block)
+ return;
+ block->clearNewlyAllocated();
}
-size_t MarkedSpace::capacity() const
+struct ClearNewlyAllocated : MarkedBlock::VoidFunctor {
+ void operator()(MarkedBlock* block) { block->clearNewlyAllocated(); }
+};
+
+#ifndef NDEBUG
+struct VerifyNewlyAllocated : MarkedBlock::VoidFunctor {
+ void operator()(MarkedBlock* block) { ASSERT(!block->clearNewlyAllocated()); }
+};
+#endif
+
+void MarkedSpace::clearNewlyAllocated()
{
- size_t result = 0;
- BlockIterator end = m_blocks.end();
- for (BlockIterator it = m_blocks.begin(); it != end; ++it)
- result += (*it)->capacity();
- return result;
+ for (size_t i = 0; i < preciseCount; ++i) {
+ clearNewlyAllocatedInBlock(m_normalSpace.preciseAllocators[i].takeLastActiveBlock());
+ clearNewlyAllocatedInBlock(m_destructorSpace.preciseAllocators[i].takeLastActiveBlock());
+ }
+
+ for (size_t i = 0; i < impreciseCount; ++i) {
+ clearNewlyAllocatedInBlock(m_normalSpace.impreciseAllocators[i].takeLastActiveBlock());
+ clearNewlyAllocatedInBlock(m_destructorSpace.impreciseAllocators[i].takeLastActiveBlock());
+ }
+
+ // We have to iterate all of the blocks in the large allocators because they are
+ // canonicalized as they are used up (see MarkedAllocator::tryAllocateHelper)
+ // which creates the m_newlyAllocated bitmap.
+ ClearNewlyAllocated functor;
+ m_normalSpace.largeAllocator.forEachBlock(functor);
+ m_destructorSpace.largeAllocator.forEachBlock(functor);
+
+#ifndef NDEBUG
+ VerifyNewlyAllocated verifyFunctor;
+ forEachBlock(verifyFunctor);
+#endif
}
-void MarkedSpace::reset()
+#ifndef NDEBUG
+struct VerifyMarkedOrRetired : MarkedBlock::VoidFunctor {
+ void operator()(MarkedBlock* block)
+ {
+ switch (block->m_state) {
+ case MarkedBlock::Marked:
+ case MarkedBlock::Retired:
+ return;
+ default:
+ RELEASE_ASSERT_NOT_REACHED();
+ }
+ }
+};
+#endif
+
+void MarkedSpace::clearMarks()
{
- m_waterMark = 0;
+ if (m_heap->operationInProgress() == EdenCollection) {
+ for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i)
+ m_blocksWithNewObjects[i]->clearMarks();
+ } else
+ forEachBlock<ClearMarks>();
- for (size_t cellSize = preciseStep; cellSize < preciseCutoff; cellSize += preciseStep)
- sizeClassFor(cellSize).reset();
+#ifndef NDEBUG
+ VerifyMarkedOrRetired verifyFunctor;
+ forEachBlock(verifyFunctor);
+#endif
+}
- for (size_t cellSize = impreciseStep; cellSize < impreciseCutoff; cellSize += impreciseStep)
- sizeClassFor(cellSize).reset();
+void MarkedSpace::willStartIterating()
+{
+ ASSERT(!isIterating());
+ stopAllocating();
+ m_isIterating = true;
+}
- BlockIterator end = m_blocks.end();
- for (BlockIterator it = m_blocks.begin(); it != end; ++it)
- (*it)->reset();
+void MarkedSpace::didFinishIterating()
+{
+ ASSERT(isIterating());
+ resumeAllocating();
+ m_isIterating = false;
}
} // namespace JSC