]> git.saurik.com Git - apple/javascriptcore.git/blame - heap/MarkedSpace.cpp
JavaScriptCore-7600.1.4.13.1.tar.gz
[apple/javascriptcore.git] / heap / MarkedSpace.cpp
CommitLineData
14957cd0
A
1/*
2 * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2007 Eric Seidel <eric@webkit.org>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Lesser General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Lesser General Public License for more details.
14 *
15 * You should have received a copy of the GNU Lesser General Public
16 * License along with this library; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 *
19 */
20
21#include "config.h"
22#include "MarkedSpace.h"
23
81345200 24#include "DelayedReleaseScope.h"
93a37866 25#include "IncrementalSweeper.h"
14957cd0 26#include "JSGlobalObject.h"
14957cd0
A
27#include "JSLock.h"
28#include "JSObject.h"
81345200 29#include "JSCInlines.h"
14957cd0
A
30
31namespace JSC {
32
33class Structure;
34
93a37866
A
35class Free {
36public:
37 typedef MarkedBlock* ReturnType;
38
39 enum FreeMode { FreeOrShrink, FreeAll };
40
41 Free(FreeMode, MarkedSpace*);
42 void operator()(MarkedBlock*);
43 ReturnType returnValue();
44
45private:
46 FreeMode m_freeMode;
47 MarkedSpace* m_markedSpace;
48 DoublyLinkedList<MarkedBlock> m_blocks;
49};
50
51inline Free::Free(FreeMode freeMode, MarkedSpace* newSpace)
52 : m_freeMode(freeMode)
53 , m_markedSpace(newSpace)
54{
55}
56
57inline void Free::operator()(MarkedBlock* block)
58{
59 if (m_freeMode == FreeOrShrink)
60 m_markedSpace->freeOrShrinkBlock(block);
61 else
62 m_markedSpace->freeBlock(block);
63}
64
65inline Free::ReturnType Free::returnValue()
66{
67 return m_blocks.head();
68}
69
70struct VisitWeakSet : MarkedBlock::VoidFunctor {
71 VisitWeakSet(HeapRootVisitor& heapRootVisitor) : m_heapRootVisitor(heapRootVisitor) { }
72 void operator()(MarkedBlock* block) { block->visitWeakSet(m_heapRootVisitor); }
73private:
74 HeapRootVisitor& m_heapRootVisitor;
75};
76
77struct ReapWeakSet : MarkedBlock::VoidFunctor {
78 void operator()(MarkedBlock* block) { block->reapWeakSet(); }
79};
80
6fe7ccc8
A
81MarkedSpace::MarkedSpace(Heap* heap)
82 : m_heap(heap)
81345200
A
83 , m_capacity(0)
84 , m_isIterating(false)
85 , m_currentDelayedReleaseScope(nullptr)
14957cd0 86{
6fe7ccc8 87 for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
93a37866
A
88 allocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::None);
89 normalDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::Normal);
90 immortalStructureDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::ImmortalStructure);
6fe7ccc8 91 }
14957cd0 92
6fe7ccc8 93 for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
93a37866
A
94 allocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::None);
95 normalDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::Normal);
96 immortalStructureDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::ImmortalStructure);
6fe7ccc8 97 }
93a37866
A
98
99 m_normalSpace.largeAllocator.init(heap, this, 0, MarkedBlock::None);
100 m_normalDestructorSpace.largeAllocator.init(heap, this, 0, MarkedBlock::Normal);
101 m_immortalStructureDestructorSpace.largeAllocator.init(heap, this, 0, MarkedBlock::ImmortalStructure);
102}
103
104MarkedSpace::~MarkedSpace()
105{
106 Free free(Free::FreeAll, this);
107 forEachBlock(free);
81345200 108 ASSERT(!m_blocks.set().size());
93a37866
A
109}
110
81345200
A
111struct LastChanceToFinalize {
112 void operator()(MarkedAllocator& allocator) { allocator.lastChanceToFinalize(); }
93a37866
A
113};
114
115void MarkedSpace::lastChanceToFinalize()
116{
81345200
A
117 DelayedReleaseScope delayedReleaseScope(*this);
118 stopAllocating();
119 forEachAllocator<LastChanceToFinalize>();
93a37866
A
120}
121
122void MarkedSpace::sweep()
123{
81345200
A
124 if (Options::logGC())
125 dataLog("Eagerly sweeping...");
93a37866
A
126 m_heap->sweeper()->willFinishSweeping();
127 forEachBlock<Sweep>();
14957cd0
A
128}
129
81345200
A
130void MarkedSpace::zombifySweep()
131{
132 if (Options::logGC())
133 dataLog("Zombifying sweep...");
134 m_heap->sweeper()->willFinishSweeping();
135 forEachBlock<ZombifySweep>();
136}
137
6fe7ccc8 138void MarkedSpace::resetAllocators()
14957cd0 139{
6fe7ccc8
A
140 for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
141 allocatorFor(cellSize).reset();
93a37866
A
142 normalDestructorAllocatorFor(cellSize).reset();
143 immortalStructureDestructorAllocatorFor(cellSize).reset();
6fe7ccc8
A
144 }
145
146 for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
147 allocatorFor(cellSize).reset();
93a37866
A
148 normalDestructorAllocatorFor(cellSize).reset();
149 immortalStructureDestructorAllocatorFor(cellSize).reset();
6fe7ccc8 150 }
93a37866
A
151
152 m_normalSpace.largeAllocator.reset();
153 m_normalDestructorSpace.largeAllocator.reset();
154 m_immortalStructureDestructorSpace.largeAllocator.reset();
81345200
A
155
156#if ENABLE(GGC)
157 m_blocksWithNewObjects.clear();
158#endif
93a37866
A
159}
160
161void MarkedSpace::visitWeakSets(HeapRootVisitor& heapRootVisitor)
162{
163 VisitWeakSet visitWeakSet(heapRootVisitor);
81345200
A
164 if (m_heap->operationInProgress() == EdenCollection) {
165 for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i)
166 visitWeakSet(m_blocksWithNewObjects[i]);
167 } else
168 forEachBlock(visitWeakSet);
93a37866
A
169}
170
171void MarkedSpace::reapWeakSets()
172{
81345200
A
173 if (m_heap->operationInProgress() == EdenCollection) {
174 for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i)
175 m_blocksWithNewObjects[i]->reapWeakSet();
176 } else
177 forEachBlock<ReapWeakSet>();
14957cd0
A
178}
179
81345200
A
180template <typename Functor>
181void MarkedSpace::forEachAllocator()
182{
183 Functor functor;
184 forEachAllocator(functor);
185}
186
187template <typename Functor>
188void MarkedSpace::forEachAllocator(Functor& functor)
14957cd0 189{
6fe7ccc8 190 for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
81345200
A
191 functor(allocatorFor(cellSize));
192 functor(normalDestructorAllocatorFor(cellSize));
193 functor(immortalStructureDestructorAllocatorFor(cellSize));
6fe7ccc8 194 }
14957cd0 195
6fe7ccc8 196 for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
81345200
A
197 functor(allocatorFor(cellSize));
198 functor(normalDestructorAllocatorFor(cellSize));
199 functor(immortalStructureDestructorAllocatorFor(cellSize));
6fe7ccc8 200 }
93a37866 201
81345200
A
202 functor(m_normalSpace.largeAllocator);
203 functor(m_normalDestructorSpace.largeAllocator);
204 functor(m_immortalStructureDestructorSpace.largeAllocator);
205}
206
207struct StopAllocatingFunctor {
208 void operator()(MarkedAllocator& allocator) { allocator.stopAllocating(); }
209};
210
211void MarkedSpace::stopAllocating()
212{
213 ASSERT(!isIterating());
214 forEachAllocator<StopAllocatingFunctor>();
215}
216
217struct ResumeAllocatingFunctor {
218 void operator()(MarkedAllocator& allocator) { allocator.resumeAllocating(); }
219};
220
221void MarkedSpace::resumeAllocating()
222{
223 ASSERT(isIterating());
224 forEachAllocator<ResumeAllocatingFunctor>();
14957cd0
A
225}
226
6fe7ccc8 227bool MarkedSpace::isPagedOut(double deadline)
14957cd0 228{
6fe7ccc8 229 for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
93a37866
A
230 if (allocatorFor(cellSize).isPagedOut(deadline)
231 || normalDestructorAllocatorFor(cellSize).isPagedOut(deadline)
232 || immortalStructureDestructorAllocatorFor(cellSize).isPagedOut(deadline))
6fe7ccc8
A
233 return true;
234 }
14957cd0 235
6fe7ccc8 236 for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
93a37866
A
237 if (allocatorFor(cellSize).isPagedOut(deadline)
238 || normalDestructorAllocatorFor(cellSize).isPagedOut(deadline)
239 || immortalStructureDestructorAllocatorFor(cellSize).isPagedOut(deadline))
6fe7ccc8 240 return true;
14957cd0 241 }
6fe7ccc8 242
93a37866
A
243 if (m_normalSpace.largeAllocator.isPagedOut(deadline)
244 || m_normalDestructorSpace.largeAllocator.isPagedOut(deadline)
245 || m_immortalStructureDestructorSpace.largeAllocator.isPagedOut(deadline))
246 return true;
247
6fe7ccc8 248 return false;
14957cd0
A
249}
250
93a37866 251void MarkedSpace::freeBlock(MarkedBlock* block)
14957cd0 252{
93a37866 253 block->allocator()->removeBlock(block);
81345200 254 m_capacity -= block->capacity();
93a37866
A
255 m_blocks.remove(block);
256 if (block->capacity() == MarkedBlock::blockSize) {
257 m_heap->blockAllocator().deallocate(MarkedBlock::destroy(block));
258 return;
14957cd0 259 }
93a37866 260 m_heap->blockAllocator().deallocateCustomSize(MarkedBlock::destroy(block));
6fe7ccc8 261}
14957cd0 262
93a37866 263void MarkedSpace::freeOrShrinkBlock(MarkedBlock* block)
14957cd0 264{
93a37866
A
265 if (!block->isEmpty()) {
266 block->shrink();
6fe7ccc8 267 return;
93a37866 268 }
14957cd0 269
93a37866 270 freeBlock(block);
14957cd0
A
271}
272
93a37866
A
273struct Shrink : MarkedBlock::VoidFunctor {
274 void operator()(MarkedBlock* block) { block->shrink(); }
6fe7ccc8 275};
14957cd0 276
93a37866 277void MarkedSpace::shrink()
14957cd0 278{
93a37866
A
279 Free freeOrShrink(Free::FreeOrShrink, this);
280 forEachBlock(freeOrShrink);
14957cd0
A
281}
282
81345200
A
283static void clearNewlyAllocatedInBlock(MarkedBlock* block)
284{
285 if (!block)
286 return;
287 block->clearNewlyAllocated();
288}
289
290struct ClearNewlyAllocated : MarkedBlock::VoidFunctor {
291 void operator()(MarkedBlock* block) { block->clearNewlyAllocated(); }
292};
293
294#ifndef NDEBUG
295struct VerifyNewlyAllocated : MarkedBlock::VoidFunctor {
296 void operator()(MarkedBlock* block) { ASSERT(!block->clearNewlyAllocated()); }
297};
298#endif
299
300void MarkedSpace::clearNewlyAllocated()
301{
302 for (size_t i = 0; i < preciseCount; ++i) {
303 clearNewlyAllocatedInBlock(m_normalSpace.preciseAllocators[i].takeLastActiveBlock());
304 clearNewlyAllocatedInBlock(m_normalDestructorSpace.preciseAllocators[i].takeLastActiveBlock());
305 clearNewlyAllocatedInBlock(m_immortalStructureDestructorSpace.preciseAllocators[i].takeLastActiveBlock());
306 }
307
308 for (size_t i = 0; i < impreciseCount; ++i) {
309 clearNewlyAllocatedInBlock(m_normalSpace.impreciseAllocators[i].takeLastActiveBlock());
310 clearNewlyAllocatedInBlock(m_normalDestructorSpace.impreciseAllocators[i].takeLastActiveBlock());
311 clearNewlyAllocatedInBlock(m_immortalStructureDestructorSpace.impreciseAllocators[i].takeLastActiveBlock());
312 }
313
314 // We have to iterate all of the blocks in the large allocators because they are
315 // canonicalized as they are used up (see MarkedAllocator::tryAllocateHelper)
316 // which creates the m_newlyAllocated bitmap.
317 ClearNewlyAllocated functor;
318 m_normalSpace.largeAllocator.forEachBlock(functor);
319 m_normalDestructorSpace.largeAllocator.forEachBlock(functor);
320 m_immortalStructureDestructorSpace.largeAllocator.forEachBlock(functor);
321
322#ifndef NDEBUG
323 VerifyNewlyAllocated verifyFunctor;
324 forEachBlock(verifyFunctor);
325#endif
326}
327
328#ifndef NDEBUG
329struct VerifyMarkedOrRetired : MarkedBlock::VoidFunctor {
330 void operator()(MarkedBlock* block)
331 {
332 switch (block->m_state) {
333 case MarkedBlock::Marked:
334 case MarkedBlock::Retired:
335 return;
336 default:
337 RELEASE_ASSERT_NOT_REACHED();
338 }
339 }
340};
341#endif
342
343void MarkedSpace::clearMarks()
344{
345 if (m_heap->operationInProgress() == EdenCollection) {
346 for (unsigned i = 0; i < m_blocksWithNewObjects.size(); ++i)
347 m_blocksWithNewObjects[i]->clearMarks();
348 } else
349 forEachBlock<ClearMarks>();
350
351#ifndef NDEBUG
352 VerifyMarkedOrRetired verifyFunctor;
353 forEachBlock(verifyFunctor);
354#endif
355}
356
357void MarkedSpace::willStartIterating()
358{
359 ASSERT(!isIterating());
360 stopAllocating();
361 m_isIterating = true;
362}
363
364void MarkedSpace::didFinishIterating()
365{
366 ASSERT(isIterating());
367 DelayedReleaseScope scope(*this);
368 resumeAllocating();
369 m_isIterating = false;
370}
371
14957cd0 372} // namespace JSC