]> git.saurik.com Git - apple/javascriptcore.git/blob - heap/MarkedSpace.h
JavaScriptCore-7601.1.46.3.tar.gz
[apple/javascriptcore.git] / heap / MarkedSpace.h
1 /*
2 * Copyright (C) 1999-2000 Harri Porten (porten@kde.org)
3 * Copyright (C) 2001 Peter Kelly (pmk@post.com)
4 * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2011 Apple Inc. All rights reserved.
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 *
20 */
21
22 #ifndef MarkedSpace_h
23 #define MarkedSpace_h
24
25 #include "MachineStackMarker.h"
26 #include "MarkedAllocator.h"
27 #include "MarkedBlock.h"
28 #include "MarkedBlockSet.h"
29 #include <array>
30 #include <wtf/Bitmap.h>
31 #include <wtf/DoublyLinkedList.h>
32 #include <wtf/HashSet.h>
33 #include <wtf/Noncopyable.h>
34 #include <wtf/RetainPtr.h>
35 #include <wtf/Vector.h>
36
37 namespace JSC {
38
39 class Heap;
40 class HeapIterationScope;
41 class JSCell;
42 class LiveObjectIterator;
43 class LLIntOffsetsExtractor;
44 class WeakGCHandle;
45 class SlotVisitor;
46
47 struct ClearMarks : MarkedBlock::VoidFunctor {
48 void operator()(MarkedBlock* block)
49 {
50 block->clearMarks();
51 }
52 };
53
54 struct Sweep : MarkedBlock::VoidFunctor {
55 void operator()(MarkedBlock* block) { block->sweep(); }
56 };
57
58 struct ZombifySweep : MarkedBlock::VoidFunctor {
59 void operator()(MarkedBlock* block)
60 {
61 if (block->needsSweeping())
62 block->sweep();
63 }
64 };
65
66 struct MarkCount : MarkedBlock::CountFunctor {
67 void operator()(MarkedBlock* block) { count(block->markCount()); }
68 };
69
70 struct Size : MarkedBlock::CountFunctor {
71 void operator()(MarkedBlock* block) { count(block->markCount() * block->cellSize()); }
72 };
73
74 class MarkedSpace {
75 WTF_MAKE_NONCOPYABLE(MarkedSpace);
76 public:
77 // [ 32... 128 ]
78 static const size_t preciseStep = MarkedBlock::atomSize;
79 static const size_t preciseCutoff = 128;
80 static const size_t preciseCount = preciseCutoff / preciseStep;
81
82 // [ 1024... blockSize ]
83 static const size_t impreciseStep = 2 * preciseCutoff;
84 static const size_t impreciseCutoff = MarkedBlock::blockSize / 2;
85 static const size_t impreciseCount = impreciseCutoff / impreciseStep;
86
87 struct Subspace {
88 std::array<MarkedAllocator, preciseCount> preciseAllocators;
89 std::array<MarkedAllocator, impreciseCount> impreciseAllocators;
90 MarkedAllocator largeAllocator;
91 };
92
93 MarkedSpace(Heap*);
94 ~MarkedSpace();
95 void lastChanceToFinalize();
96
97 MarkedAllocator& firstAllocator();
98 MarkedAllocator& allocatorFor(size_t);
99 MarkedAllocator& destructorAllocatorFor(size_t);
100 void* allocateWithDestructor(size_t);
101 void* allocateWithoutDestructor(size_t);
102
103 Subspace& subspaceForObjectsWithDestructor() { return m_destructorSpace; }
104 Subspace& subspaceForObjectsWithoutDestructor() { return m_normalSpace; }
105
106 void resetAllocators();
107
108 void visitWeakSets(HeapRootVisitor&);
109 void reapWeakSets();
110
111 MarkedBlockSet& blocks() { return m_blocks; }
112
113 void willStartIterating();
114 bool isIterating() { return m_isIterating; }
115 void didFinishIterating();
116
117 void stopAllocating();
118 void resumeAllocating(); // If we just stopped allocation but we didn't do a collection, we need to resume allocation.
119
120 typedef HashSet<MarkedBlock*>::iterator BlockIterator;
121
122 template<typename Functor> typename Functor::ReturnType forEachLiveCell(HeapIterationScope&, Functor&);
123 template<typename Functor> typename Functor::ReturnType forEachLiveCell(HeapIterationScope&);
124 template<typename Functor> typename Functor::ReturnType forEachDeadCell(HeapIterationScope&, Functor&);
125 template<typename Functor> typename Functor::ReturnType forEachDeadCell(HeapIterationScope&);
126 template<typename Functor> typename Functor::ReturnType forEachBlock(Functor&);
127 template<typename Functor> typename Functor::ReturnType forEachBlock();
128
129 void shrink();
130 void freeBlock(MarkedBlock*);
131 void freeOrShrinkBlock(MarkedBlock*);
132
133 void didAddBlock(MarkedBlock*);
134 void didConsumeFreeList(MarkedBlock*);
135 void didAllocateInBlock(MarkedBlock*);
136
137 void clearMarks();
138 void clearNewlyAllocated();
139 void sweep();
140 void zombifySweep();
141 size_t objectCount();
142 size_t size();
143 size_t capacity();
144
145 bool isPagedOut(double deadline);
146
147 #if USE(CF)
148 template<typename T> void releaseSoon(RetainPtr<T>&&);
149 #endif
150
151 const Vector<MarkedBlock*>& blocksWithNewObjects() const { return m_blocksWithNewObjects; }
152
153 private:
154 friend class LLIntOffsetsExtractor;
155 friend class JIT;
156
157 template<typename Functor> void forEachAllocator(Functor&);
158 template<typename Functor> void forEachAllocator();
159
160 Subspace m_destructorSpace;
161 Subspace m_normalSpace;
162
163 Heap* m_heap;
164 size_t m_capacity;
165 bool m_isIterating;
166 MarkedBlockSet m_blocks;
167 Vector<MarkedBlock*> m_blocksWithNewObjects;
168 };
169
170 template<typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachLiveCell(HeapIterationScope&, Functor& functor)
171 {
172 ASSERT(isIterating());
173 BlockIterator end = m_blocks.set().end();
174 for (BlockIterator it = m_blocks.set().begin(); it != end; ++it) {
175 if ((*it)->forEachLiveCell(functor) == IterationStatus::Done)
176 break;
177 }
178 return functor.returnValue();
179 }
180
181 template<typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachLiveCell(HeapIterationScope& scope)
182 {
183 Functor functor;
184 return forEachLiveCell(scope, functor);
185 }
186
187 template<typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachDeadCell(HeapIterationScope&, Functor& functor)
188 {
189 ASSERT(isIterating());
190 BlockIterator end = m_blocks.set().end();
191 for (BlockIterator it = m_blocks.set().begin(); it != end; ++it) {
192 if ((*it)->forEachDeadCell(functor) == IterationStatus::Done)
193 break;
194 }
195 return functor.returnValue();
196 }
197
198 template<typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachDeadCell(HeapIterationScope& scope)
199 {
200 Functor functor;
201 return forEachDeadCell(scope, functor);
202 }
203
204 inline MarkedAllocator& MarkedSpace::allocatorFor(size_t bytes)
205 {
206 ASSERT(bytes);
207 if (bytes <= preciseCutoff)
208 return m_normalSpace.preciseAllocators[(bytes - 1) / preciseStep];
209 if (bytes <= impreciseCutoff)
210 return m_normalSpace.impreciseAllocators[(bytes - 1) / impreciseStep];
211 return m_normalSpace.largeAllocator;
212 }
213
214 inline MarkedAllocator& MarkedSpace::destructorAllocatorFor(size_t bytes)
215 {
216 ASSERT(bytes);
217 if (bytes <= preciseCutoff)
218 return m_destructorSpace.preciseAllocators[(bytes - 1) / preciseStep];
219 if (bytes <= impreciseCutoff)
220 return m_destructorSpace.impreciseAllocators[(bytes - 1) / impreciseStep];
221 return m_destructorSpace.largeAllocator;
222 }
223
224 inline void* MarkedSpace::allocateWithoutDestructor(size_t bytes)
225 {
226 return allocatorFor(bytes).allocate(bytes);
227 }
228
229 inline void* MarkedSpace::allocateWithDestructor(size_t bytes)
230 {
231 return destructorAllocatorFor(bytes).allocate(bytes);
232 }
233
234 template <typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachBlock(Functor& functor)
235 {
236 for (size_t i = 0; i < preciseCount; ++i)
237 m_normalSpace.preciseAllocators[i].forEachBlock(functor);
238 for (size_t i = 0; i < impreciseCount; ++i)
239 m_normalSpace.impreciseAllocators[i].forEachBlock(functor);
240 m_normalSpace.largeAllocator.forEachBlock(functor);
241
242 for (size_t i = 0; i < preciseCount; ++i)
243 m_destructorSpace.preciseAllocators[i].forEachBlock(functor);
244 for (size_t i = 0; i < impreciseCount; ++i)
245 m_destructorSpace.impreciseAllocators[i].forEachBlock(functor);
246 m_destructorSpace.largeAllocator.forEachBlock(functor);
247
248 return functor.returnValue();
249 }
250
251 template <typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachBlock()
252 {
253 Functor functor;
254 return forEachBlock(functor);
255 }
256
257 inline void MarkedSpace::didAddBlock(MarkedBlock* block)
258 {
259 m_capacity += block->capacity();
260 m_blocks.add(block);
261 }
262
263 inline void MarkedSpace::didAllocateInBlock(MarkedBlock* block)
264 {
265 #if ENABLE(GGC)
266 m_blocksWithNewObjects.append(block);
267 #else
268 UNUSED_PARAM(block);
269 #endif
270 }
271
272 inline size_t MarkedSpace::objectCount()
273 {
274 return forEachBlock<MarkCount>();
275 }
276
277 inline size_t MarkedSpace::size()
278 {
279 return forEachBlock<Size>();
280 }
281
282 inline size_t MarkedSpace::capacity()
283 {
284 return m_capacity;
285 }
286
287 } // namespace JSC
288
289 #endif // MarkedSpace_h