]> git.saurik.com Git - apple/javascriptcore.git/blob - heap/MarkStack.h
JavaScriptCore-1097.3.3.tar.gz
[apple/javascriptcore.git] / heap / MarkStack.h
1 /*
2 * Copyright (C) 2009, 2011 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #ifndef MarkStack_h
27 #define MarkStack_h
28
29 #include "CopiedSpace.h"
30 #include "HandleTypes.h"
31 #include "Options.h"
32 #include "JSValue.h"
33 #include "Register.h"
34 #include "UnconditionalFinalizer.h"
35 #include "VTableSpectrum.h"
36 #include "WeakReferenceHarvester.h"
37 #include <wtf/HashMap.h>
38 #include <wtf/HashSet.h>
39 #include <wtf/Vector.h>
40 #include <wtf/Noncopyable.h>
41 #include <wtf/OSAllocator.h>
42 #include <wtf/PageBlock.h>
43
44 namespace JSC {
45
46 class ConservativeRoots;
47 class JSGlobalData;
48 class MarkStack;
49 class ParallelModeEnabler;
50 class Register;
51 class SlotVisitor;
52 template<typename T> class WriteBarrierBase;
53 template<typename T> class JITWriteBarrier;
54
55 struct MarkStackSegment {
56 MarkStackSegment* m_previous;
57 #if !ASSERT_DISABLED
58 size_t m_top;
59 #endif
60
61 const JSCell** data()
62 {
63 return bitwise_cast<const JSCell**>(this + 1);
64 }
65
66 static size_t capacityFromSize(size_t size)
67 {
68 return (size - sizeof(MarkStackSegment)) / sizeof(const JSCell*);
69 }
70
71 static size_t sizeFromCapacity(size_t capacity)
72 {
73 return sizeof(MarkStackSegment) + capacity * sizeof(const JSCell*);
74 }
75 };
76
77 class MarkStackSegmentAllocator {
78 public:
79 MarkStackSegmentAllocator();
80 ~MarkStackSegmentAllocator();
81
82 MarkStackSegment* allocate();
83 void release(MarkStackSegment*);
84
85 void shrinkReserve();
86
87 private:
88 Mutex m_lock;
89 MarkStackSegment* m_nextFreeSegment;
90 };
91
92 class MarkStackArray {
93 public:
94 MarkStackArray(MarkStackSegmentAllocator&);
95 ~MarkStackArray();
96
97 void append(const JSCell*);
98
99 bool canRemoveLast();
100 const JSCell* removeLast();
101 bool refill();
102
103 bool isEmpty();
104
105 bool canDonateSomeCells(); // Returns false if you should definitely not call doanteSomeCellsTo().
106 bool donateSomeCellsTo(MarkStackArray& other); // Returns true if some cells were donated.
107
108 void stealSomeCellsFrom(MarkStackArray& other);
109
110 size_t size();
111
112 private:
113 MarkStackSegment* m_topSegment;
114
115 JS_EXPORT_PRIVATE void expand();
116
117 MarkStackSegmentAllocator& m_allocator;
118
119 size_t m_segmentCapacity;
120 size_t m_top;
121 size_t m_numberOfPreviousSegments;
122
123 size_t postIncTop()
124 {
125 size_t result = m_top++;
126 ASSERT(result == m_topSegment->m_top++);
127 return result;
128 }
129
130 size_t preDecTop()
131 {
132 size_t result = --m_top;
133 ASSERT(result == --m_topSegment->m_top);
134 return result;
135 }
136
137 void setTopForFullSegment()
138 {
139 ASSERT(m_topSegment->m_top == m_segmentCapacity);
140 m_top = m_segmentCapacity;
141 }
142
143 void setTopForEmptySegment()
144 {
145 ASSERT(!m_topSegment->m_top);
146 m_top = 0;
147 }
148
149 size_t top()
150 {
151 ASSERT(m_top == m_topSegment->m_top);
152 return m_top;
153 }
154
155 #if ASSERT_DISABLED
156 void validatePrevious() { }
157 #else
158 void validatePrevious()
159 {
160 unsigned count = 0;
161 for (MarkStackSegment* current = m_topSegment->m_previous; current; current = current->m_previous)
162 count++;
163 ASSERT(count == m_numberOfPreviousSegments);
164 }
165 #endif
166 };
167
168 class MarkStackThreadSharedData {
169 public:
170 MarkStackThreadSharedData(JSGlobalData*);
171 ~MarkStackThreadSharedData();
172
173 void reset();
174
175 private:
176 friend class MarkStack;
177 friend class SlotVisitor;
178
179 #if ENABLE(PARALLEL_GC)
180 void markingThreadMain();
181 static void markingThreadStartFunc(void* heap);
182 #endif
183
184 JSGlobalData* m_globalData;
185 CopiedSpace* m_copiedSpace;
186
187 MarkStackSegmentAllocator m_segmentAllocator;
188
189 Vector<ThreadIdentifier> m_markingThreads;
190
191 Mutex m_markingLock;
192 ThreadCondition m_markingCondition;
193 MarkStackArray m_sharedMarkStack;
194 unsigned m_numberOfActiveParallelMarkers;
195 bool m_parallelMarkersShouldExit;
196
197 Mutex m_opaqueRootsLock;
198 HashSet<void*> m_opaqueRoots;
199
200 ListableHandler<WeakReferenceHarvester>::List m_weakReferenceHarvesters;
201 ListableHandler<UnconditionalFinalizer>::List m_unconditionalFinalizers;
202 };
203
204 class MarkStack {
205 WTF_MAKE_NONCOPYABLE(MarkStack);
206 friend class HeapRootVisitor; // Allowed to mark a JSValue* or JSCell** directly.
207
208 public:
209 MarkStack(MarkStackThreadSharedData&);
210 ~MarkStack();
211
212 void append(ConservativeRoots&);
213
214 template<typename T> void append(JITWriteBarrier<T>*);
215 template<typename T> void append(WriteBarrierBase<T>*);
216 void appendValues(WriteBarrierBase<Unknown>*, size_t count);
217
218 template<typename T>
219 void appendUnbarrieredPointer(T**);
220
221 void addOpaqueRoot(void*);
222 bool containsOpaqueRoot(void*);
223 int opaqueRootCount();
224
225 bool isEmpty() { return m_stack.isEmpty(); }
226
227 void reset();
228
229 size_t visitCount() const { return m_visitCount; }
230
231 #if ENABLE(SIMPLE_HEAP_PROFILING)
232 VTableSpectrum m_visitedTypeCounts;
233 #endif
234
235 void addWeakReferenceHarvester(WeakReferenceHarvester* weakReferenceHarvester)
236 {
237 m_shared.m_weakReferenceHarvesters.addThreadSafe(weakReferenceHarvester);
238 }
239
240 void addUnconditionalFinalizer(UnconditionalFinalizer* unconditionalFinalizer)
241 {
242 m_shared.m_unconditionalFinalizers.addThreadSafe(unconditionalFinalizer);
243 }
244
245 protected:
246 JS_EXPORT_PRIVATE static void validate(JSCell*);
247
248 void append(JSValue*);
249 void append(JSValue*, size_t count);
250 void append(JSCell**);
251
252 void internalAppend(JSCell*);
253 void internalAppend(JSValue);
254
255 JS_EXPORT_PRIVATE void mergeOpaqueRoots();
256
257 void mergeOpaqueRootsIfNecessary()
258 {
259 if (m_opaqueRoots.isEmpty())
260 return;
261 mergeOpaqueRoots();
262 }
263
264 void mergeOpaqueRootsIfProfitable()
265 {
266 if (static_cast<unsigned>(m_opaqueRoots.size()) < Options::opaqueRootMergeThreshold)
267 return;
268 mergeOpaqueRoots();
269 }
270
271 MarkStackArray m_stack;
272 HashSet<void*> m_opaqueRoots; // Handle-owning data structures not visible to the garbage collector.
273
274 #if !ASSERT_DISABLED
275 public:
276 bool m_isCheckingForDefaultMarkViolation;
277 bool m_isDraining;
278 #endif
279 protected:
280 friend class ParallelModeEnabler;
281
282 size_t m_visitCount;
283 bool m_isInParallelMode;
284
285 MarkStackThreadSharedData& m_shared;
286 };
287
288 inline MarkStack::MarkStack(MarkStackThreadSharedData& shared)
289 : m_stack(shared.m_segmentAllocator)
290 #if !ASSERT_DISABLED
291 , m_isCheckingForDefaultMarkViolation(false)
292 , m_isDraining(false)
293 #endif
294 , m_visitCount(0)
295 , m_isInParallelMode(false)
296 , m_shared(shared)
297 {
298 }
299
300 inline MarkStack::~MarkStack()
301 {
302 ASSERT(m_stack.isEmpty());
303 }
304
305 inline void MarkStack::addOpaqueRoot(void* root)
306 {
307 #if ENABLE(PARALLEL_GC)
308 if (Options::numberOfGCMarkers == 1) {
309 // Put directly into the shared HashSet.
310 m_shared.m_opaqueRoots.add(root);
311 return;
312 }
313 // Put into the local set, but merge with the shared one every once in
314 // a while to make sure that the local sets don't grow too large.
315 mergeOpaqueRootsIfProfitable();
316 m_opaqueRoots.add(root);
317 #else
318 m_opaqueRoots.add(root);
319 #endif
320 }
321
322 inline bool MarkStack::containsOpaqueRoot(void* root)
323 {
324 ASSERT(!m_isInParallelMode);
325 #if ENABLE(PARALLEL_GC)
326 ASSERT(m_opaqueRoots.isEmpty());
327 return m_shared.m_opaqueRoots.contains(root);
328 #else
329 return m_opaqueRoots.contains(root);
330 #endif
331 }
332
333 inline int MarkStack::opaqueRootCount()
334 {
335 ASSERT(!m_isInParallelMode);
336 #if ENABLE(PARALLEL_GC)
337 ASSERT(m_opaqueRoots.isEmpty());
338 return m_shared.m_opaqueRoots.size();
339 #else
340 return m_opaqueRoots.size();
341 #endif
342 }
343
344 inline void MarkStackArray::append(const JSCell* cell)
345 {
346 if (m_top == m_segmentCapacity)
347 expand();
348 m_topSegment->data()[postIncTop()] = cell;
349 }
350
351 inline bool MarkStackArray::canRemoveLast()
352 {
353 return !!m_top;
354 }
355
356 inline const JSCell* MarkStackArray::removeLast()
357 {
358 return m_topSegment->data()[preDecTop()];
359 }
360
361 inline bool MarkStackArray::isEmpty()
362 {
363 if (m_top)
364 return false;
365 if (m_topSegment->m_previous) {
366 ASSERT(m_topSegment->m_previous->m_top == m_segmentCapacity);
367 return false;
368 }
369 return true;
370 }
371
372 inline bool MarkStackArray::canDonateSomeCells()
373 {
374 size_t numberOfCellsToKeep = Options::minimumNumberOfCellsToKeep;
375 // Another check: see if we have enough cells to warrant donation.
376 if (m_top <= numberOfCellsToKeep) {
377 // This indicates that we might not want to donate anything; check if we have
378 // another full segment. If not, then don't donate.
379 if (!m_topSegment->m_previous)
380 return false;
381
382 ASSERT(m_topSegment->m_previous->m_top == m_segmentCapacity);
383 }
384
385 return true;
386 }
387
388 inline size_t MarkStackArray::size()
389 {
390 return m_top + m_segmentCapacity * m_numberOfPreviousSegments;
391 }
392
393 ALWAYS_INLINE void MarkStack::append(JSValue* slot, size_t count)
394 {
395 for (size_t i = 0; i < count; ++i) {
396 JSValue& value = slot[i];
397 if (!value)
398 continue;
399 internalAppend(value);
400 }
401 }
402
403 template<typename T>
404 inline void MarkStack::appendUnbarrieredPointer(T** slot)
405 {
406 ASSERT(slot);
407 JSCell* cell = *slot;
408 if (cell)
409 internalAppend(cell);
410 }
411
412 ALWAYS_INLINE void MarkStack::append(JSValue* slot)
413 {
414 ASSERT(slot);
415 internalAppend(*slot);
416 }
417
418 ALWAYS_INLINE void MarkStack::append(JSCell** slot)
419 {
420 ASSERT(slot);
421 internalAppend(*slot);
422 }
423
424 ALWAYS_INLINE void MarkStack::internalAppend(JSValue value)
425 {
426 ASSERT(value);
427 if (!value.isCell())
428 return;
429 internalAppend(value.asCell());
430 }
431
432 class ParallelModeEnabler {
433 public:
434 ParallelModeEnabler(MarkStack& stack)
435 : m_stack(stack)
436 {
437 ASSERT(!m_stack.m_isInParallelMode);
438 m_stack.m_isInParallelMode = true;
439 }
440
441 ~ParallelModeEnabler()
442 {
443 ASSERT(m_stack.m_isInParallelMode);
444 m_stack.m_isInParallelMode = false;
445 }
446
447 private:
448 MarkStack& m_stack;
449 };
450
451 } // namespace JSC
452
453 #endif