]>
Commit | Line | Data |
---|---|---|
1 | /* | |
2 | * Copyright (C) 1999-2000 Harri Porten (porten@kde.org) | |
3 | * Copyright (C) 2001 Peter Kelly (pmk@post.com) | |
4 | * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2011 Apple Inc. All rights reserved. | |
5 | * | |
6 | * This library is free software; you can redistribute it and/or | |
7 | * modify it under the terms of the GNU Lesser General Public | |
8 | * License as published by the Free Software Foundation; either | |
9 | * version 2 of the License, or (at your option) any later version. | |
10 | * | |
11 | * This library is distributed in the hope that it will be useful, | |
12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
14 | * Lesser General Public License for more details. | |
15 | * | |
16 | * You should have received a copy of the GNU Lesser General Public | |
17 | * License along with this library; if not, write to the Free Software | |
18 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
19 | * | |
20 | */ | |
21 | ||
22 | #ifndef MarkedSpace_h | |
23 | #define MarkedSpace_h | |
24 | ||
25 | #include "MachineStackMarker.h" | |
26 | #include "MarkedAllocator.h" | |
27 | #include "MarkedBlock.h" | |
28 | #include "MarkedBlockSet.h" | |
29 | #include <array> | |
30 | #include <wtf/PageAllocationAligned.h> | |
31 | #include <wtf/Bitmap.h> | |
32 | #include <wtf/DoublyLinkedList.h> | |
33 | #include <wtf/HashSet.h> | |
34 | #include <wtf/Noncopyable.h> | |
35 | #include <wtf/Vector.h> | |
36 | ||
37 | namespace JSC { | |
38 | ||
39 | class DelayedReleaseScope; | |
40 | class Heap; | |
41 | class HeapIterationScope; | |
42 | class JSCell; | |
43 | class LiveObjectIterator; | |
44 | class LLIntOffsetsExtractor; | |
45 | class WeakGCHandle; | |
46 | class SlotVisitor; | |
47 | ||
48 | struct ClearMarks : MarkedBlock::VoidFunctor { | |
49 | void operator()(MarkedBlock* block) | |
50 | { | |
51 | block->clearMarks(); | |
52 | } | |
53 | }; | |
54 | ||
55 | struct ClearRememberedSet : MarkedBlock::VoidFunctor { | |
56 | void operator()(MarkedBlock* block) | |
57 | { | |
58 | block->clearRememberedSet(); | |
59 | } | |
60 | }; | |
61 | ||
62 | struct Sweep : MarkedBlock::VoidFunctor { | |
63 | void operator()(MarkedBlock* block) { block->sweep(); } | |
64 | }; | |
65 | ||
66 | struct ZombifySweep : MarkedBlock::VoidFunctor { | |
67 | void operator()(MarkedBlock* block) | |
68 | { | |
69 | if (block->needsSweeping()) | |
70 | block->sweep(); | |
71 | } | |
72 | }; | |
73 | ||
74 | struct MarkCount : MarkedBlock::CountFunctor { | |
75 | void operator()(MarkedBlock* block) { count(block->markCount()); } | |
76 | }; | |
77 | ||
78 | struct Size : MarkedBlock::CountFunctor { | |
79 | void operator()(MarkedBlock* block) { count(block->markCount() * block->cellSize()); } | |
80 | }; | |
81 | ||
82 | class MarkedSpace { | |
83 | WTF_MAKE_NONCOPYABLE(MarkedSpace); | |
84 | public: | |
85 | MarkedSpace(Heap*); | |
86 | ~MarkedSpace(); | |
87 | void lastChanceToFinalize(); | |
88 | ||
89 | MarkedAllocator& firstAllocator(); | |
90 | MarkedAllocator& allocatorFor(size_t); | |
91 | MarkedAllocator& immortalStructureDestructorAllocatorFor(size_t); | |
92 | MarkedAllocator& normalDestructorAllocatorFor(size_t); | |
93 | void* allocateWithNormalDestructor(size_t); | |
94 | void* allocateWithImmortalStructureDestructor(size_t); | |
95 | void* allocateWithoutDestructor(size_t); | |
96 | ||
97 | void resetAllocators(); | |
98 | ||
99 | void visitWeakSets(HeapRootVisitor&); | |
100 | void reapWeakSets(); | |
101 | ||
102 | MarkedBlockSet& blocks() { return m_blocks; } | |
103 | ||
104 | void willStartIterating(); | |
105 | bool isIterating() { return m_isIterating; } | |
106 | void didFinishIterating(); | |
107 | ||
108 | void stopAllocating(); | |
109 | void resumeAllocating(); // If we just stopped allocation but we didn't do a collection, we need to resume allocation. | |
110 | ||
111 | typedef HashSet<MarkedBlock*>::iterator BlockIterator; | |
112 | ||
113 | template<typename Functor> typename Functor::ReturnType forEachLiveCell(HeapIterationScope&, Functor&); | |
114 | template<typename Functor> typename Functor::ReturnType forEachLiveCell(HeapIterationScope&); | |
115 | template<typename Functor> typename Functor::ReturnType forEachDeadCell(HeapIterationScope&, Functor&); | |
116 | template<typename Functor> typename Functor::ReturnType forEachDeadCell(HeapIterationScope&); | |
117 | template<typename Functor> typename Functor::ReturnType forEachBlock(Functor&); | |
118 | template<typename Functor> typename Functor::ReturnType forEachBlock(); | |
119 | ||
120 | void shrink(); | |
121 | void freeBlock(MarkedBlock*); | |
122 | void freeOrShrinkBlock(MarkedBlock*); | |
123 | ||
124 | void didAddBlock(MarkedBlock*); | |
125 | void didConsumeFreeList(MarkedBlock*); | |
126 | void didAllocateInBlock(MarkedBlock*); | |
127 | ||
128 | void clearMarks(); | |
129 | void clearRememberedSet(); | |
130 | void clearNewlyAllocated(); | |
131 | void sweep(); | |
132 | void zombifySweep(); | |
133 | size_t objectCount(); | |
134 | size_t size(); | |
135 | size_t capacity(); | |
136 | ||
137 | bool isPagedOut(double deadline); | |
138 | ||
139 | #if USE(CF) | |
140 | template<typename T> void releaseSoon(RetainPtr<T>&&); | |
141 | #endif | |
142 | ||
143 | private: | |
144 | friend class DelayedReleaseScope; | |
145 | friend class LLIntOffsetsExtractor; | |
146 | ||
147 | template<typename Functor> void forEachAllocator(Functor&); | |
148 | template<typename Functor> void forEachAllocator(); | |
149 | ||
150 | // [ 32... 128 ] | |
151 | static const size_t preciseStep = MarkedBlock::atomSize; | |
152 | static const size_t preciseCutoff = 128; | |
153 | static const size_t preciseCount = preciseCutoff / preciseStep; | |
154 | ||
155 | // [ 1024... blockSize ] | |
156 | static const size_t impreciseStep = 2 * preciseCutoff; | |
157 | static const size_t impreciseCutoff = MarkedBlock::blockSize / 2; | |
158 | static const size_t impreciseCount = impreciseCutoff / impreciseStep; | |
159 | ||
160 | struct Subspace { | |
161 | std::array<MarkedAllocator, preciseCount> preciseAllocators; | |
162 | std::array<MarkedAllocator, impreciseCount> impreciseAllocators; | |
163 | MarkedAllocator largeAllocator; | |
164 | }; | |
165 | ||
166 | Subspace m_normalDestructorSpace; | |
167 | Subspace m_immortalStructureDestructorSpace; | |
168 | Subspace m_normalSpace; | |
169 | ||
170 | Heap* m_heap; | |
171 | size_t m_capacity; | |
172 | bool m_isIterating; | |
173 | MarkedBlockSet m_blocks; | |
174 | Vector<MarkedBlock*> m_blocksWithNewObjects; | |
175 | ||
176 | DelayedReleaseScope* m_currentDelayedReleaseScope; | |
177 | }; | |
178 | ||
179 | template<typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachLiveCell(HeapIterationScope&, Functor& functor) | |
180 | { | |
181 | ASSERT(isIterating()); | |
182 | BlockIterator end = m_blocks.set().end(); | |
183 | for (BlockIterator it = m_blocks.set().begin(); it != end; ++it) | |
184 | (*it)->forEachLiveCell(functor); | |
185 | return functor.returnValue(); | |
186 | } | |
187 | ||
188 | template<typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachLiveCell(HeapIterationScope& scope) | |
189 | { | |
190 | Functor functor; | |
191 | return forEachLiveCell(scope, functor); | |
192 | } | |
193 | ||
194 | template<typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachDeadCell(HeapIterationScope&, Functor& functor) | |
195 | { | |
196 | ASSERT(isIterating()); | |
197 | BlockIterator end = m_blocks.set().end(); | |
198 | for (BlockIterator it = m_blocks.set().begin(); it != end; ++it) | |
199 | (*it)->forEachDeadCell(functor); | |
200 | return functor.returnValue(); | |
201 | } | |
202 | ||
203 | template<typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachDeadCell(HeapIterationScope& scope) | |
204 | { | |
205 | Functor functor; | |
206 | return forEachDeadCell(scope, functor); | |
207 | } | |
208 | ||
209 | inline MarkedAllocator& MarkedSpace::allocatorFor(size_t bytes) | |
210 | { | |
211 | ASSERT(bytes); | |
212 | if (bytes <= preciseCutoff) | |
213 | return m_normalSpace.preciseAllocators[(bytes - 1) / preciseStep]; | |
214 | if (bytes <= impreciseCutoff) | |
215 | return m_normalSpace.impreciseAllocators[(bytes - 1) / impreciseStep]; | |
216 | return m_normalSpace.largeAllocator; | |
217 | } | |
218 | ||
219 | inline MarkedAllocator& MarkedSpace::immortalStructureDestructorAllocatorFor(size_t bytes) | |
220 | { | |
221 | ASSERT(bytes); | |
222 | if (bytes <= preciseCutoff) | |
223 | return m_immortalStructureDestructorSpace.preciseAllocators[(bytes - 1) / preciseStep]; | |
224 | if (bytes <= impreciseCutoff) | |
225 | return m_immortalStructureDestructorSpace.impreciseAllocators[(bytes - 1) / impreciseStep]; | |
226 | return m_immortalStructureDestructorSpace.largeAllocator; | |
227 | } | |
228 | ||
229 | inline MarkedAllocator& MarkedSpace::normalDestructorAllocatorFor(size_t bytes) | |
230 | { | |
231 | ASSERT(bytes); | |
232 | if (bytes <= preciseCutoff) | |
233 | return m_normalDestructorSpace.preciseAllocators[(bytes - 1) / preciseStep]; | |
234 | if (bytes <= impreciseCutoff) | |
235 | return m_normalDestructorSpace.impreciseAllocators[(bytes - 1) / impreciseStep]; | |
236 | return m_normalDestructorSpace.largeAllocator; | |
237 | } | |
238 | ||
239 | inline void* MarkedSpace::allocateWithoutDestructor(size_t bytes) | |
240 | { | |
241 | return allocatorFor(bytes).allocate(bytes); | |
242 | } | |
243 | ||
244 | inline void* MarkedSpace::allocateWithImmortalStructureDestructor(size_t bytes) | |
245 | { | |
246 | return immortalStructureDestructorAllocatorFor(bytes).allocate(bytes); | |
247 | } | |
248 | ||
249 | inline void* MarkedSpace::allocateWithNormalDestructor(size_t bytes) | |
250 | { | |
251 | return normalDestructorAllocatorFor(bytes).allocate(bytes); | |
252 | } | |
253 | ||
254 | template <typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachBlock(Functor& functor) | |
255 | { | |
256 | for (size_t i = 0; i < preciseCount; ++i) | |
257 | m_normalSpace.preciseAllocators[i].forEachBlock(functor); | |
258 | for (size_t i = 0; i < impreciseCount; ++i) | |
259 | m_normalSpace.impreciseAllocators[i].forEachBlock(functor); | |
260 | m_normalSpace.largeAllocator.forEachBlock(functor); | |
261 | ||
262 | for (size_t i = 0; i < preciseCount; ++i) | |
263 | m_normalDestructorSpace.preciseAllocators[i].forEachBlock(functor); | |
264 | for (size_t i = 0; i < impreciseCount; ++i) | |
265 | m_normalDestructorSpace.impreciseAllocators[i].forEachBlock(functor); | |
266 | m_normalDestructorSpace.largeAllocator.forEachBlock(functor); | |
267 | ||
268 | for (size_t i = 0; i < preciseCount; ++i) | |
269 | m_immortalStructureDestructorSpace.preciseAllocators[i].forEachBlock(functor); | |
270 | for (size_t i = 0; i < impreciseCount; ++i) | |
271 | m_immortalStructureDestructorSpace.impreciseAllocators[i].forEachBlock(functor); | |
272 | m_immortalStructureDestructorSpace.largeAllocator.forEachBlock(functor); | |
273 | ||
274 | return functor.returnValue(); | |
275 | } | |
276 | ||
277 | template <typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachBlock() | |
278 | { | |
279 | Functor functor; | |
280 | return forEachBlock(functor); | |
281 | } | |
282 | ||
283 | inline void MarkedSpace::didAddBlock(MarkedBlock* block) | |
284 | { | |
285 | m_capacity += block->capacity(); | |
286 | m_blocks.add(block); | |
287 | } | |
288 | ||
289 | inline void MarkedSpace::didAllocateInBlock(MarkedBlock* block) | |
290 | { | |
291 | #if ENABLE(GGC) | |
292 | m_blocksWithNewObjects.append(block); | |
293 | #else | |
294 | UNUSED_PARAM(block); | |
295 | #endif | |
296 | } | |
297 | ||
298 | inline void MarkedSpace::clearRememberedSet() | |
299 | { | |
300 | forEachBlock<ClearRememberedSet>(); | |
301 | } | |
302 | ||
303 | inline size_t MarkedSpace::objectCount() | |
304 | { | |
305 | return forEachBlock<MarkCount>(); | |
306 | } | |
307 | ||
308 | inline size_t MarkedSpace::size() | |
309 | { | |
310 | return forEachBlock<Size>(); | |
311 | } | |
312 | ||
313 | inline size_t MarkedSpace::capacity() | |
314 | { | |
315 | return m_capacity; | |
316 | } | |
317 | ||
318 | } // namespace JSC | |
319 | ||
320 | #endif // MarkedSpace_h |