#include "config.h"
#include "MarkedSpace.h"
-#include "DelayedReleaseScope.h"
#include "IncrementalSweeper.h"
#include "JSGlobalObject.h"
#include "JSLock.h"
: m_heap(heap)
, m_capacity(0)
, m_isIterating(false)
- , m_currentDelayedReleaseScope(nullptr)
{
for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
- allocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::None);
- normalDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::Normal);
- immortalStructureDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::ImmortalStructure);
+ allocatorFor(cellSize).init(heap, this, cellSize, false);
+ destructorAllocatorFor(cellSize).init(heap, this, cellSize, true);
}
for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
- allocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::None);
- normalDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::Normal);
- immortalStructureDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::ImmortalStructure);
+ allocatorFor(cellSize).init(heap, this, cellSize, false);
+ destructorAllocatorFor(cellSize).init(heap, this, cellSize, true);
}
- m_normalSpace.largeAllocator.init(heap, this, 0, MarkedBlock::None);
- m_normalDestructorSpace.largeAllocator.init(heap, this, 0, MarkedBlock::Normal);
- m_immortalStructureDestructorSpace.largeAllocator.init(heap, this, 0, MarkedBlock::ImmortalStructure);
+ m_normalSpace.largeAllocator.init(heap, this, 0, false);
+ m_destructorSpace.largeAllocator.init(heap, this, 0, true);
}
MarkedSpace::~MarkedSpace()
void MarkedSpace::lastChanceToFinalize()
{
- DelayedReleaseScope delayedReleaseScope(*this);
stopAllocating();
forEachAllocator<LastChanceToFinalize>();
}
void MarkedSpace::sweep()
{
- if (Options::logGC())
- dataLog("Eagerly sweeping...");
m_heap->sweeper()->willFinishSweeping();
forEachBlock<Sweep>();
}
{
for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
allocatorFor(cellSize).reset();
- normalDestructorAllocatorFor(cellSize).reset();
- immortalStructureDestructorAllocatorFor(cellSize).reset();
+ destructorAllocatorFor(cellSize).reset();
}
for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
allocatorFor(cellSize).reset();
- normalDestructorAllocatorFor(cellSize).reset();
- immortalStructureDestructorAllocatorFor(cellSize).reset();
+ destructorAllocatorFor(cellSize).reset();
}
m_normalSpace.largeAllocator.reset();
- m_normalDestructorSpace.largeAllocator.reset();
- m_immortalStructureDestructorSpace.largeAllocator.reset();
+ m_destructorSpace.largeAllocator.reset();
#if ENABLE(GGC)
m_blocksWithNewObjects.clear();
{
for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
functor(allocatorFor(cellSize));
- functor(normalDestructorAllocatorFor(cellSize));
- functor(immortalStructureDestructorAllocatorFor(cellSize));
+ functor(destructorAllocatorFor(cellSize));
}
for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
functor(allocatorFor(cellSize));
- functor(normalDestructorAllocatorFor(cellSize));
- functor(immortalStructureDestructorAllocatorFor(cellSize));
+ functor(destructorAllocatorFor(cellSize));
}
functor(m_normalSpace.largeAllocator);
- functor(m_normalDestructorSpace.largeAllocator);
- functor(m_immortalStructureDestructorSpace.largeAllocator);
+ functor(m_destructorSpace.largeAllocator);
}
struct StopAllocatingFunctor {
{
for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
if (allocatorFor(cellSize).isPagedOut(deadline)
- || normalDestructorAllocatorFor(cellSize).isPagedOut(deadline)
- || immortalStructureDestructorAllocatorFor(cellSize).isPagedOut(deadline))
+ || destructorAllocatorFor(cellSize).isPagedOut(deadline))
return true;
}
for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
if (allocatorFor(cellSize).isPagedOut(deadline)
- || normalDestructorAllocatorFor(cellSize).isPagedOut(deadline)
- || immortalStructureDestructorAllocatorFor(cellSize).isPagedOut(deadline))
+ || destructorAllocatorFor(cellSize).isPagedOut(deadline))
return true;
}
if (m_normalSpace.largeAllocator.isPagedOut(deadline)
- || m_normalDestructorSpace.largeAllocator.isPagedOut(deadline)
- || m_immortalStructureDestructorSpace.largeAllocator.isPagedOut(deadline))
+ || m_destructorSpace.largeAllocator.isPagedOut(deadline))
return true;
return false;
block->allocator()->removeBlock(block);
m_capacity -= block->capacity();
m_blocks.remove(block);
- if (block->capacity() == MarkedBlock::blockSize) {
- m_heap->blockAllocator().deallocate(MarkedBlock::destroy(block));
- return;
- }
- m_heap->blockAllocator().deallocateCustomSize(MarkedBlock::destroy(block));
+ MarkedBlock::destroy(block);
}
void MarkedSpace::freeOrShrinkBlock(MarkedBlock* block)
{
for (size_t i = 0; i < preciseCount; ++i) {
clearNewlyAllocatedInBlock(m_normalSpace.preciseAllocators[i].takeLastActiveBlock());
- clearNewlyAllocatedInBlock(m_normalDestructorSpace.preciseAllocators[i].takeLastActiveBlock());
- clearNewlyAllocatedInBlock(m_immortalStructureDestructorSpace.preciseAllocators[i].takeLastActiveBlock());
+ clearNewlyAllocatedInBlock(m_destructorSpace.preciseAllocators[i].takeLastActiveBlock());
}
for (size_t i = 0; i < impreciseCount; ++i) {
clearNewlyAllocatedInBlock(m_normalSpace.impreciseAllocators[i].takeLastActiveBlock());
- clearNewlyAllocatedInBlock(m_normalDestructorSpace.impreciseAllocators[i].takeLastActiveBlock());
- clearNewlyAllocatedInBlock(m_immortalStructureDestructorSpace.impreciseAllocators[i].takeLastActiveBlock());
+ clearNewlyAllocatedInBlock(m_destructorSpace.impreciseAllocators[i].takeLastActiveBlock());
}
// We have to iterate all of the blocks in the large allocators because they are
// which creates the m_newlyAllocated bitmap.
ClearNewlyAllocated functor;
m_normalSpace.largeAllocator.forEachBlock(functor);
- m_normalDestructorSpace.largeAllocator.forEachBlock(functor);
- m_immortalStructureDestructorSpace.largeAllocator.forEachBlock(functor);
+ m_destructorSpace.largeAllocator.forEachBlock(functor);
#ifndef NDEBUG
VerifyNewlyAllocated verifyFunctor;
void MarkedSpace::didFinishIterating()
{
ASSERT(isIterating());
- DelayedReleaseScope scope(*this);
resumeAllocating();
m_isIterating = false;
}