2 * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2007 Eric Seidel <eric@webkit.org>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Lesser General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Lesser General Public License for more details.
15 * You should have received a copy of the GNU Lesser General Public
16 * License along with this library; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22 #include "MarkedSpace.h"
24 #include "IncrementalSweeper.h"
25 #include "JSGlobalObject.h"
36 typedef MarkedBlock
* ReturnType
;
38 enum FreeMode
{ FreeOrShrink
, FreeAll
};
40 Free(FreeMode
, MarkedSpace
*);
41 void operator()(MarkedBlock
*);
42 ReturnType
returnValue();
46 MarkedSpace
* m_markedSpace
;
47 DoublyLinkedList
<MarkedBlock
> m_blocks
;
50 inline Free::Free(FreeMode freeMode
, MarkedSpace
* newSpace
)
51 : m_freeMode(freeMode
)
52 , m_markedSpace(newSpace
)
56 inline void Free::operator()(MarkedBlock
* block
)
58 if (m_freeMode
== FreeOrShrink
)
59 m_markedSpace
->freeOrShrinkBlock(block
);
61 m_markedSpace
->freeBlock(block
);
64 inline Free::ReturnType
Free::returnValue()
66 return m_blocks
.head();
69 struct VisitWeakSet
: MarkedBlock::VoidFunctor
{
70 VisitWeakSet(HeapRootVisitor
& heapRootVisitor
) : m_heapRootVisitor(heapRootVisitor
) { }
71 void operator()(MarkedBlock
* block
) { block
->visitWeakSet(m_heapRootVisitor
); }
73 HeapRootVisitor
& m_heapRootVisitor
;
76 struct ReapWeakSet
: MarkedBlock::VoidFunctor
{
77 void operator()(MarkedBlock
* block
) { block
->reapWeakSet(); }
80 MarkedSpace::MarkedSpace(Heap
* heap
)
83 for (size_t cellSize
= preciseStep
; cellSize
<= preciseCutoff
; cellSize
+= preciseStep
) {
84 allocatorFor(cellSize
).init(heap
, this, cellSize
, MarkedBlock::None
);
85 normalDestructorAllocatorFor(cellSize
).init(heap
, this, cellSize
, MarkedBlock::Normal
);
86 immortalStructureDestructorAllocatorFor(cellSize
).init(heap
, this, cellSize
, MarkedBlock::ImmortalStructure
);
89 for (size_t cellSize
= impreciseStep
; cellSize
<= impreciseCutoff
; cellSize
+= impreciseStep
) {
90 allocatorFor(cellSize
).init(heap
, this, cellSize
, MarkedBlock::None
);
91 normalDestructorAllocatorFor(cellSize
).init(heap
, this, cellSize
, MarkedBlock::Normal
);
92 immortalStructureDestructorAllocatorFor(cellSize
).init(heap
, this, cellSize
, MarkedBlock::ImmortalStructure
);
95 m_normalSpace
.largeAllocator
.init(heap
, this, 0, MarkedBlock::None
);
96 m_normalDestructorSpace
.largeAllocator
.init(heap
, this, 0, MarkedBlock::Normal
);
97 m_immortalStructureDestructorSpace
.largeAllocator
.init(heap
, this, 0, MarkedBlock::ImmortalStructure
);
100 MarkedSpace::~MarkedSpace()
102 Free
free(Free::FreeAll
, this);
106 struct LastChanceToFinalize
: MarkedBlock::VoidFunctor
{
107 void operator()(MarkedBlock
* block
) { block
->lastChanceToFinalize(); }
110 void MarkedSpace::lastChanceToFinalize()
112 canonicalizeCellLivenessData();
113 forEachBlock
<LastChanceToFinalize
>();
116 void MarkedSpace::sweep()
118 m_heap
->sweeper()->willFinishSweeping();
119 forEachBlock
<Sweep
>();
122 void MarkedSpace::resetAllocators()
124 for (size_t cellSize
= preciseStep
; cellSize
<= preciseCutoff
; cellSize
+= preciseStep
) {
125 allocatorFor(cellSize
).reset();
126 normalDestructorAllocatorFor(cellSize
).reset();
127 immortalStructureDestructorAllocatorFor(cellSize
).reset();
130 for (size_t cellSize
= impreciseStep
; cellSize
<= impreciseCutoff
; cellSize
+= impreciseStep
) {
131 allocatorFor(cellSize
).reset();
132 normalDestructorAllocatorFor(cellSize
).reset();
133 immortalStructureDestructorAllocatorFor(cellSize
).reset();
136 m_normalSpace
.largeAllocator
.reset();
137 m_normalDestructorSpace
.largeAllocator
.reset();
138 m_immortalStructureDestructorSpace
.largeAllocator
.reset();
141 void MarkedSpace::visitWeakSets(HeapRootVisitor
& heapRootVisitor
)
143 VisitWeakSet
visitWeakSet(heapRootVisitor
);
144 forEachBlock(visitWeakSet
);
147 void MarkedSpace::reapWeakSets()
149 forEachBlock
<ReapWeakSet
>();
152 void MarkedSpace::canonicalizeCellLivenessData()
154 for (size_t cellSize
= preciseStep
; cellSize
<= preciseCutoff
; cellSize
+= preciseStep
) {
155 allocatorFor(cellSize
).canonicalizeCellLivenessData();
156 normalDestructorAllocatorFor(cellSize
).canonicalizeCellLivenessData();
157 immortalStructureDestructorAllocatorFor(cellSize
).canonicalizeCellLivenessData();
160 for (size_t cellSize
= impreciseStep
; cellSize
<= impreciseCutoff
; cellSize
+= impreciseStep
) {
161 allocatorFor(cellSize
).canonicalizeCellLivenessData();
162 normalDestructorAllocatorFor(cellSize
).canonicalizeCellLivenessData();
163 immortalStructureDestructorAllocatorFor(cellSize
).canonicalizeCellLivenessData();
166 m_normalSpace
.largeAllocator
.canonicalizeCellLivenessData();
167 m_normalDestructorSpace
.largeAllocator
.canonicalizeCellLivenessData();
168 m_immortalStructureDestructorSpace
.largeAllocator
.canonicalizeCellLivenessData();
171 bool MarkedSpace::isPagedOut(double deadline
)
173 for (size_t cellSize
= preciseStep
; cellSize
<= preciseCutoff
; cellSize
+= preciseStep
) {
174 if (allocatorFor(cellSize
).isPagedOut(deadline
)
175 || normalDestructorAllocatorFor(cellSize
).isPagedOut(deadline
)
176 || immortalStructureDestructorAllocatorFor(cellSize
).isPagedOut(deadline
))
180 for (size_t cellSize
= impreciseStep
; cellSize
<= impreciseCutoff
; cellSize
+= impreciseStep
) {
181 if (allocatorFor(cellSize
).isPagedOut(deadline
)
182 || normalDestructorAllocatorFor(cellSize
).isPagedOut(deadline
)
183 || immortalStructureDestructorAllocatorFor(cellSize
).isPagedOut(deadline
))
187 if (m_normalSpace
.largeAllocator
.isPagedOut(deadline
)
188 || m_normalDestructorSpace
.largeAllocator
.isPagedOut(deadline
)
189 || m_immortalStructureDestructorSpace
.largeAllocator
.isPagedOut(deadline
))
195 void MarkedSpace::freeBlock(MarkedBlock
* block
)
197 block
->allocator()->removeBlock(block
);
198 m_blocks
.remove(block
);
199 if (block
->capacity() == MarkedBlock::blockSize
) {
200 m_heap
->blockAllocator().deallocate(MarkedBlock::destroy(block
));
203 m_heap
->blockAllocator().deallocateCustomSize(MarkedBlock::destroy(block
));
206 void MarkedSpace::freeOrShrinkBlock(MarkedBlock
* block
)
208 if (!block
->isEmpty()) {
216 struct Shrink
: MarkedBlock::VoidFunctor
{
217 void operator()(MarkedBlock
* block
) { block
->shrink(); }
220 void MarkedSpace::shrink()
222 Free
freeOrShrink(Free::FreeOrShrink
, this);
223 forEachBlock(freeOrShrink
);