]> git.saurik.com Git - apple/javascriptcore.git/blob - heap/SlotVisitorInlines.h
JavaScriptCore-7600.1.4.15.12.tar.gz
[apple/javascriptcore.git] / heap / SlotVisitorInlines.h
1 /*
2 * Copyright (C) 2012, 2013 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15 * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23 * THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #ifndef SlotVisitorInlines_h
27 #define SlotVisitorInlines_h
28
29 #include "CopiedBlockInlines.h"
30 #include "CopiedSpaceInlines.h"
31 #include "Options.h"
32 #include "SlotVisitor.h"
33 #include "Weak.h"
34 #include "WeakInlines.h"
35
36 namespace JSC {
37
38 ALWAYS_INLINE void SlotVisitor::append(JSValue* slot, size_t count)
39 {
40 for (size_t i = 0; i < count; ++i) {
41 JSValue& value = slot[i];
42 internalAppend(&value, value);
43 }
44 }
45
46 template<typename T>
47 inline void SlotVisitor::appendUnbarrieredPointer(T** slot)
48 {
49 ASSERT(slot);
50 JSCell* cell = *slot;
51 internalAppend(slot, cell);
52 }
53
54 template<typename T>
55 inline void SlotVisitor::appendUnbarrieredReadOnlyPointer(T* cell)
56 {
57 internalAppend(0, cell);
58 }
59
60 ALWAYS_INLINE void SlotVisitor::append(JSValue* slot)
61 {
62 ASSERT(slot);
63 internalAppend(slot, *slot);
64 }
65
66 ALWAYS_INLINE void SlotVisitor::appendUnbarrieredValue(JSValue* slot)
67 {
68 ASSERT(slot);
69 internalAppend(slot, *slot);
70 }
71
72 ALWAYS_INLINE void SlotVisitor::appendUnbarrieredReadOnlyValue(JSValue value)
73 {
74 internalAppend(0, value);
75 }
76
77 ALWAYS_INLINE void SlotVisitor::append(JSCell** slot)
78 {
79 ASSERT(slot);
80 internalAppend(slot, *slot);
81 }
82
83 template<typename T>
84 ALWAYS_INLINE void SlotVisitor::appendUnbarrieredWeak(Weak<T>* weak)
85 {
86 ASSERT(weak);
87 if (weak->get())
88 internalAppend(0, weak->get());
89 }
90
91 ALWAYS_INLINE void SlotVisitor::internalAppend(void* from, JSValue value)
92 {
93 if (!value || !value.isCell())
94 return;
95 internalAppend(from, value.asCell());
96 }
97
98 ALWAYS_INLINE void SlotVisitor::internalAppend(void* from, JSCell* cell)
99 {
100 ASSERT(!m_isCheckingForDefaultMarkViolation);
101 if (!cell)
102 return;
103 #if ENABLE(ALLOCATION_LOGGING)
104 dataLogF("JSC GC noticing reference from %p to %p.\n", from, cell);
105 #else
106 UNUSED_PARAM(from);
107 #endif
108 #if ENABLE(GC_VALIDATION)
109 validate(cell);
110 #endif
111 if (Heap::testAndSetMarked(cell) || !cell->structure()) {
112 ASSERT(cell->structure());
113 return;
114 }
115
116 cell->setMarked();
117 m_bytesVisited += MarkedBlock::blockFor(cell)->cellSize();
118
119 unconditionallyAppend(cell);
120 }
121
122 ALWAYS_INLINE void SlotVisitor::unconditionallyAppend(JSCell* cell)
123 {
124 ASSERT(Heap::isMarked(cell));
125 m_visitCount++;
126
127 // Should never attempt to mark something that is zapped.
128 ASSERT(!cell->isZapped());
129
130 m_stack.append(cell);
131 }
132
133 template<typename T> inline void SlotVisitor::append(WriteBarrierBase<T>* slot)
134 {
135 internalAppend(slot, *slot->slot());
136 }
137
138 template<typename Iterator> inline void SlotVisitor::append(Iterator begin, Iterator end)
139 {
140 for (auto it = begin; it != end; ++it)
141 append(&*it);
142 }
143
144 ALWAYS_INLINE void SlotVisitor::appendValues(WriteBarrierBase<Unknown>* barriers, size_t count)
145 {
146 append(barriers->slot(), count);
147 }
148
149 inline void SlotVisitor::addWeakReferenceHarvester(WeakReferenceHarvester* weakReferenceHarvester)
150 {
151 m_shared.m_weakReferenceHarvesters.addThreadSafe(weakReferenceHarvester);
152 }
153
154 inline void SlotVisitor::addUnconditionalFinalizer(UnconditionalFinalizer* unconditionalFinalizer)
155 {
156 m_shared.m_unconditionalFinalizers.addThreadSafe(unconditionalFinalizer);
157 }
158
159 inline void SlotVisitor::addOpaqueRoot(void* root)
160 {
161 #if ENABLE(PARALLEL_GC)
162 if (Options::numberOfGCMarkers() == 1) {
163 // Put directly into the shared HashSet.
164 m_shared.m_opaqueRoots.add(root);
165 return;
166 }
167 // Put into the local set, but merge with the shared one every once in
168 // a while to make sure that the local sets don't grow too large.
169 mergeOpaqueRootsIfProfitable();
170 m_opaqueRoots.add(root);
171 #else
172 m_opaqueRoots.add(root);
173 #endif
174 }
175
176 inline bool SlotVisitor::containsOpaqueRoot(void* root) const
177 {
178 ASSERT(!m_isInParallelMode);
179 #if ENABLE(PARALLEL_GC)
180 ASSERT(m_opaqueRoots.isEmpty());
181 return m_shared.m_opaqueRoots.contains(root);
182 #else
183 return m_opaqueRoots.contains(root);
184 #endif
185 }
186
187 inline TriState SlotVisitor::containsOpaqueRootTriState(void* root) const
188 {
189 if (m_opaqueRoots.contains(root))
190 return TrueTriState;
191 std::lock_guard<std::mutex> lock(m_shared.m_opaqueRootsMutex);
192 if (m_shared.m_opaqueRoots.contains(root))
193 return TrueTriState;
194 return MixedTriState;
195 }
196
197 inline int SlotVisitor::opaqueRootCount()
198 {
199 ASSERT(!m_isInParallelMode);
200 #if ENABLE(PARALLEL_GC)
201 ASSERT(m_opaqueRoots.isEmpty());
202 return m_shared.m_opaqueRoots.size();
203 #else
204 return m_opaqueRoots.size();
205 #endif
206 }
207
208 inline void SlotVisitor::mergeOpaqueRootsIfNecessary()
209 {
210 if (m_opaqueRoots.isEmpty())
211 return;
212 mergeOpaqueRoots();
213 }
214
215 inline void SlotVisitor::mergeOpaqueRootsIfProfitable()
216 {
217 if (static_cast<unsigned>(m_opaqueRoots.size()) < Options::opaqueRootMergeThreshold())
218 return;
219 mergeOpaqueRoots();
220 }
221
222 inline void SlotVisitor::donate()
223 {
224 ASSERT(m_isInParallelMode);
225 if (Options::numberOfGCMarkers() == 1)
226 return;
227
228 donateKnownParallel();
229 }
230
231 inline void SlotVisitor::donateAndDrain()
232 {
233 donate();
234 drain();
235 }
236
237 inline void SlotVisitor::copyLater(JSCell* owner, CopyToken token, void* ptr, size_t bytes)
238 {
239 ASSERT(bytes);
240 CopiedBlock* block = CopiedSpace::blockFor(ptr);
241 if (block->isOversize()) {
242 m_shared.m_copiedSpace->pin(block);
243 return;
244 }
245
246 ASSERT(heap()->m_storageSpace.contains(block));
247
248 SpinLockHolder locker(&block->workListLock());
249 if (heap()->operationInProgress() == FullCollection || block->shouldReportLiveBytes(locker, owner)) {
250 m_bytesCopied += bytes;
251 block->reportLiveBytes(locker, owner, token, bytes);
252 }
253 }
254
255 inline void SlotVisitor::reportExtraMemoryUsage(JSCell* owner, size_t size)
256 {
257 #if ENABLE(GGC)
258 // We don't want to double-count the extra memory that was reported in previous collections.
259 if (heap()->operationInProgress() == EdenCollection && Heap::isRemembered(owner))
260 return;
261 #else
262 UNUSED_PARAM(owner);
263 #endif
264
265 size_t* counter = &m_shared.m_vm->heap.m_extraMemoryUsage;
266
267 #if ENABLE(COMPARE_AND_SWAP)
268 for (;;) {
269 size_t oldSize = *counter;
270 if (WTF::weakCompareAndSwapSize(counter, oldSize, oldSize + size))
271 return;
272 }
273 #else
274 (*counter) += size;
275 #endif
276 }
277
278 inline Heap* SlotVisitor::heap() const
279 {
280 return &sharedData().m_vm->heap;
281 }
282
283 inline VM& SlotVisitor::vm()
284 {
285 return *sharedData().m_vm;
286 }
287
288 inline const VM& SlotVisitor::vm() const
289 {
290 return *sharedData().m_vm;
291 }
292
293 } // namespace JSC
294
295 #endif // SlotVisitorInlines_h
296