2 * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2007 Eric Seidel <eric@webkit.org>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Lesser General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Lesser General Public License for more details.
15 * You should have received a copy of the GNU Lesser General Public
16 * License along with this library; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22 #include "MarkedSpace.h"
24 #include "DelayedReleaseScope.h"
25 #include "IncrementalSweeper.h"
26 #include "JSGlobalObject.h"
29 #include "JSCInlines.h"
37 typedef MarkedBlock
* ReturnType
;
39 enum FreeMode
{ FreeOrShrink
, FreeAll
};
41 Free(FreeMode
, MarkedSpace
*);
42 void operator()(MarkedBlock
*);
43 ReturnType
returnValue();
47 MarkedSpace
* m_markedSpace
;
48 DoublyLinkedList
<MarkedBlock
> m_blocks
;
51 inline Free::Free(FreeMode freeMode
, MarkedSpace
* newSpace
)
52 : m_freeMode(freeMode
)
53 , m_markedSpace(newSpace
)
57 inline void Free::operator()(MarkedBlock
* block
)
59 if (m_freeMode
== FreeOrShrink
)
60 m_markedSpace
->freeOrShrinkBlock(block
);
62 m_markedSpace
->freeBlock(block
);
65 inline Free::ReturnType
Free::returnValue()
67 return m_blocks
.head();
70 struct VisitWeakSet
: MarkedBlock::VoidFunctor
{
71 VisitWeakSet(HeapRootVisitor
& heapRootVisitor
) : m_heapRootVisitor(heapRootVisitor
) { }
72 void operator()(MarkedBlock
* block
) { block
->visitWeakSet(m_heapRootVisitor
); }
74 HeapRootVisitor
& m_heapRootVisitor
;
77 struct ReapWeakSet
: MarkedBlock::VoidFunctor
{
78 void operator()(MarkedBlock
* block
) { block
->reapWeakSet(); }
81 MarkedSpace::MarkedSpace(Heap
* heap
)
84 , m_isIterating(false)
85 , m_currentDelayedReleaseScope(nullptr)
87 for (size_t cellSize
= preciseStep
; cellSize
<= preciseCutoff
; cellSize
+= preciseStep
) {
88 allocatorFor(cellSize
).init(heap
, this, cellSize
, MarkedBlock::None
);
89 normalDestructorAllocatorFor(cellSize
).init(heap
, this, cellSize
, MarkedBlock::Normal
);
90 immortalStructureDestructorAllocatorFor(cellSize
).init(heap
, this, cellSize
, MarkedBlock::ImmortalStructure
);
93 for (size_t cellSize
= impreciseStep
; cellSize
<= impreciseCutoff
; cellSize
+= impreciseStep
) {
94 allocatorFor(cellSize
).init(heap
, this, cellSize
, MarkedBlock::None
);
95 normalDestructorAllocatorFor(cellSize
).init(heap
, this, cellSize
, MarkedBlock::Normal
);
96 immortalStructureDestructorAllocatorFor(cellSize
).init(heap
, this, cellSize
, MarkedBlock::ImmortalStructure
);
99 m_normalSpace
.largeAllocator
.init(heap
, this, 0, MarkedBlock::None
);
100 m_normalDestructorSpace
.largeAllocator
.init(heap
, this, 0, MarkedBlock::Normal
);
101 m_immortalStructureDestructorSpace
.largeAllocator
.init(heap
, this, 0, MarkedBlock::ImmortalStructure
);
104 MarkedSpace::~MarkedSpace()
106 Free
free(Free::FreeAll
, this);
108 ASSERT(!m_blocks
.set().size());
111 struct LastChanceToFinalize
{
112 void operator()(MarkedAllocator
& allocator
) { allocator
.lastChanceToFinalize(); }
115 void MarkedSpace::lastChanceToFinalize()
117 DelayedReleaseScope
delayedReleaseScope(*this);
119 forEachAllocator
<LastChanceToFinalize
>();
122 void MarkedSpace::sweep()
124 if (Options::logGC())
125 dataLog("Eagerly sweeping...");
126 m_heap
->sweeper()->willFinishSweeping();
127 forEachBlock
<Sweep
>();
130 void MarkedSpace::zombifySweep()
132 if (Options::logGC())
133 dataLog("Zombifying sweep...");
134 m_heap
->sweeper()->willFinishSweeping();
135 forEachBlock
<ZombifySweep
>();
138 void MarkedSpace::resetAllocators()
140 for (size_t cellSize
= preciseStep
; cellSize
<= preciseCutoff
; cellSize
+= preciseStep
) {
141 allocatorFor(cellSize
).reset();
142 normalDestructorAllocatorFor(cellSize
).reset();
143 immortalStructureDestructorAllocatorFor(cellSize
).reset();
146 for (size_t cellSize
= impreciseStep
; cellSize
<= impreciseCutoff
; cellSize
+= impreciseStep
) {
147 allocatorFor(cellSize
).reset();
148 normalDestructorAllocatorFor(cellSize
).reset();
149 immortalStructureDestructorAllocatorFor(cellSize
).reset();
152 m_normalSpace
.largeAllocator
.reset();
153 m_normalDestructorSpace
.largeAllocator
.reset();
154 m_immortalStructureDestructorSpace
.largeAllocator
.reset();
157 m_blocksWithNewObjects
.clear();
161 void MarkedSpace::visitWeakSets(HeapRootVisitor
& heapRootVisitor
)
163 VisitWeakSet
visitWeakSet(heapRootVisitor
);
164 if (m_heap
->operationInProgress() == EdenCollection
) {
165 for (unsigned i
= 0; i
< m_blocksWithNewObjects
.size(); ++i
)
166 visitWeakSet(m_blocksWithNewObjects
[i
]);
168 forEachBlock(visitWeakSet
);
171 void MarkedSpace::reapWeakSets()
173 if (m_heap
->operationInProgress() == EdenCollection
) {
174 for (unsigned i
= 0; i
< m_blocksWithNewObjects
.size(); ++i
)
175 m_blocksWithNewObjects
[i
]->reapWeakSet();
177 forEachBlock
<ReapWeakSet
>();
180 template <typename Functor
>
181 void MarkedSpace::forEachAllocator()
184 forEachAllocator(functor
);
187 template <typename Functor
>
188 void MarkedSpace::forEachAllocator(Functor
& functor
)
190 for (size_t cellSize
= preciseStep
; cellSize
<= preciseCutoff
; cellSize
+= preciseStep
) {
191 functor(allocatorFor(cellSize
));
192 functor(normalDestructorAllocatorFor(cellSize
));
193 functor(immortalStructureDestructorAllocatorFor(cellSize
));
196 for (size_t cellSize
= impreciseStep
; cellSize
<= impreciseCutoff
; cellSize
+= impreciseStep
) {
197 functor(allocatorFor(cellSize
));
198 functor(normalDestructorAllocatorFor(cellSize
));
199 functor(immortalStructureDestructorAllocatorFor(cellSize
));
202 functor(m_normalSpace
.largeAllocator
);
203 functor(m_normalDestructorSpace
.largeAllocator
);
204 functor(m_immortalStructureDestructorSpace
.largeAllocator
);
207 struct StopAllocatingFunctor
{
208 void operator()(MarkedAllocator
& allocator
) { allocator
.stopAllocating(); }
211 void MarkedSpace::stopAllocating()
213 ASSERT(!isIterating());
214 forEachAllocator
<StopAllocatingFunctor
>();
217 struct ResumeAllocatingFunctor
{
218 void operator()(MarkedAllocator
& allocator
) { allocator
.resumeAllocating(); }
221 void MarkedSpace::resumeAllocating()
223 ASSERT(isIterating());
224 forEachAllocator
<ResumeAllocatingFunctor
>();
227 bool MarkedSpace::isPagedOut(double deadline
)
229 for (size_t cellSize
= preciseStep
; cellSize
<= preciseCutoff
; cellSize
+= preciseStep
) {
230 if (allocatorFor(cellSize
).isPagedOut(deadline
)
231 || normalDestructorAllocatorFor(cellSize
).isPagedOut(deadline
)
232 || immortalStructureDestructorAllocatorFor(cellSize
).isPagedOut(deadline
))
236 for (size_t cellSize
= impreciseStep
; cellSize
<= impreciseCutoff
; cellSize
+= impreciseStep
) {
237 if (allocatorFor(cellSize
).isPagedOut(deadline
)
238 || normalDestructorAllocatorFor(cellSize
).isPagedOut(deadline
)
239 || immortalStructureDestructorAllocatorFor(cellSize
).isPagedOut(deadline
))
243 if (m_normalSpace
.largeAllocator
.isPagedOut(deadline
)
244 || m_normalDestructorSpace
.largeAllocator
.isPagedOut(deadline
)
245 || m_immortalStructureDestructorSpace
.largeAllocator
.isPagedOut(deadline
))
251 void MarkedSpace::freeBlock(MarkedBlock
* block
)
253 block
->allocator()->removeBlock(block
);
254 m_capacity
-= block
->capacity();
255 m_blocks
.remove(block
);
256 if (block
->capacity() == MarkedBlock::blockSize
) {
257 m_heap
->blockAllocator().deallocate(MarkedBlock::destroy(block
));
260 m_heap
->blockAllocator().deallocateCustomSize(MarkedBlock::destroy(block
));
263 void MarkedSpace::freeOrShrinkBlock(MarkedBlock
* block
)
265 if (!block
->isEmpty()) {
273 struct Shrink
: MarkedBlock::VoidFunctor
{
274 void operator()(MarkedBlock
* block
) { block
->shrink(); }
277 void MarkedSpace::shrink()
279 Free
freeOrShrink(Free::FreeOrShrink
, this);
280 forEachBlock(freeOrShrink
);
283 static void clearNewlyAllocatedInBlock(MarkedBlock
* block
)
287 block
->clearNewlyAllocated();
290 struct ClearNewlyAllocated
: MarkedBlock::VoidFunctor
{
291 void operator()(MarkedBlock
* block
) { block
->clearNewlyAllocated(); }
295 struct VerifyNewlyAllocated
: MarkedBlock::VoidFunctor
{
296 void operator()(MarkedBlock
* block
) { ASSERT(!block
->clearNewlyAllocated()); }
300 void MarkedSpace::clearNewlyAllocated()
302 for (size_t i
= 0; i
< preciseCount
; ++i
) {
303 clearNewlyAllocatedInBlock(m_normalSpace
.preciseAllocators
[i
].takeLastActiveBlock());
304 clearNewlyAllocatedInBlock(m_normalDestructorSpace
.preciseAllocators
[i
].takeLastActiveBlock());
305 clearNewlyAllocatedInBlock(m_immortalStructureDestructorSpace
.preciseAllocators
[i
].takeLastActiveBlock());
308 for (size_t i
= 0; i
< impreciseCount
; ++i
) {
309 clearNewlyAllocatedInBlock(m_normalSpace
.impreciseAllocators
[i
].takeLastActiveBlock());
310 clearNewlyAllocatedInBlock(m_normalDestructorSpace
.impreciseAllocators
[i
].takeLastActiveBlock());
311 clearNewlyAllocatedInBlock(m_immortalStructureDestructorSpace
.impreciseAllocators
[i
].takeLastActiveBlock());
314 // We have to iterate all of the blocks in the large allocators because they are
315 // canonicalized as they are used up (see MarkedAllocator::tryAllocateHelper)
316 // which creates the m_newlyAllocated bitmap.
317 ClearNewlyAllocated functor
;
318 m_normalSpace
.largeAllocator
.forEachBlock(functor
);
319 m_normalDestructorSpace
.largeAllocator
.forEachBlock(functor
);
320 m_immortalStructureDestructorSpace
.largeAllocator
.forEachBlock(functor
);
323 VerifyNewlyAllocated verifyFunctor
;
324 forEachBlock(verifyFunctor
);
329 struct VerifyMarkedOrRetired
: MarkedBlock::VoidFunctor
{
330 void operator()(MarkedBlock
* block
)
332 switch (block
->m_state
) {
333 case MarkedBlock::Marked
:
334 case MarkedBlock::Retired
:
337 RELEASE_ASSERT_NOT_REACHED();
343 void MarkedSpace::clearMarks()
345 if (m_heap
->operationInProgress() == EdenCollection
) {
346 for (unsigned i
= 0; i
< m_blocksWithNewObjects
.size(); ++i
)
347 m_blocksWithNewObjects
[i
]->clearMarks();
349 forEachBlock
<ClearMarks
>();
352 VerifyMarkedOrRetired verifyFunctor
;
353 forEachBlock(verifyFunctor
);
357 void MarkedSpace::willStartIterating()
359 ASSERT(!isIterating());
361 m_isIterating
= true;
364 void MarkedSpace::didFinishIterating()
366 ASSERT(isIterating());
367 DelayedReleaseScope
scope(*this);
369 m_isIterating
= false;