2 * Copyright (C) 2012, 2013 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15 * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23 * THE POSSIBILITY OF SUCH DAMAGE.
26 #ifndef SlotVisitorInlines_h
27 #define SlotVisitorInlines_h
29 #include "CopiedBlockInlines.h"
30 #include "CopiedSpaceInlines.h"
32 #include "SlotVisitor.h"
34 #include "WeakInlines.h"
38 ALWAYS_INLINE
void SlotVisitor::append(JSValue
* slot
, size_t count
)
40 for (size_t i
= 0; i
< count
; ++i
) {
41 JSValue
& value
= slot
[i
];
42 internalAppend(&value
, value
);
47 inline void SlotVisitor::appendUnbarrieredPointer(T
** slot
)
51 internalAppend(slot
, cell
);
55 inline void SlotVisitor::appendUnbarrieredReadOnlyPointer(T
* cell
)
57 internalAppend(0, cell
);
60 ALWAYS_INLINE
void SlotVisitor::append(JSValue
* slot
)
63 internalAppend(slot
, *slot
);
66 ALWAYS_INLINE
void SlotVisitor::appendUnbarrieredValue(JSValue
* slot
)
69 internalAppend(slot
, *slot
);
72 ALWAYS_INLINE
void SlotVisitor::appendUnbarrieredReadOnlyValue(JSValue value
)
74 internalAppend(0, value
);
77 ALWAYS_INLINE
void SlotVisitor::append(JSCell
** slot
)
80 internalAppend(slot
, *slot
);
84 ALWAYS_INLINE
void SlotVisitor::appendUnbarrieredWeak(Weak
<T
>* weak
)
88 internalAppend(0, weak
->get());
91 ALWAYS_INLINE
void SlotVisitor::internalAppend(void* from
, JSValue value
)
93 if (!value
|| !value
.isCell())
95 internalAppend(from
, value
.asCell());
98 ALWAYS_INLINE
void SlotVisitor::internalAppend(void* from
, JSCell
* cell
)
100 ASSERT(!m_isCheckingForDefaultMarkViolation
);
103 #if ENABLE(ALLOCATION_LOGGING)
104 dataLogF("JSC GC noticing reference from %p to %p.\n", from
, cell
);
108 #if ENABLE(GC_VALIDATION)
111 if (Heap::testAndSetMarked(cell
) || !cell
->structure()) {
112 ASSERT(cell
->structure());
117 m_bytesVisited
+= MarkedBlock::blockFor(cell
)->cellSize();
119 unconditionallyAppend(cell
);
122 ALWAYS_INLINE
void SlotVisitor::unconditionallyAppend(JSCell
* cell
)
124 ASSERT(Heap::isMarked(cell
));
127 // Should never attempt to mark something that is zapped.
128 ASSERT(!cell
->isZapped());
130 m_stack
.append(cell
);
133 template<typename T
> inline void SlotVisitor::append(WriteBarrierBase
<T
>* slot
)
135 internalAppend(slot
, *slot
->slot());
138 template<typename Iterator
> inline void SlotVisitor::append(Iterator begin
, Iterator end
)
140 for (auto it
= begin
; it
!= end
; ++it
)
144 ALWAYS_INLINE
void SlotVisitor::appendValues(WriteBarrierBase
<Unknown
>* barriers
, size_t count
)
146 append(barriers
->slot(), count
);
149 inline void SlotVisitor::addWeakReferenceHarvester(WeakReferenceHarvester
* weakReferenceHarvester
)
151 m_shared
.m_weakReferenceHarvesters
.addThreadSafe(weakReferenceHarvester
);
154 inline void SlotVisitor::addUnconditionalFinalizer(UnconditionalFinalizer
* unconditionalFinalizer
)
156 m_shared
.m_unconditionalFinalizers
.addThreadSafe(unconditionalFinalizer
);
159 inline void SlotVisitor::addOpaqueRoot(void* root
)
161 #if ENABLE(PARALLEL_GC)
162 if (Options::numberOfGCMarkers() == 1) {
163 // Put directly into the shared HashSet.
164 m_shared
.m_opaqueRoots
.add(root
);
167 // Put into the local set, but merge with the shared one every once in
168 // a while to make sure that the local sets don't grow too large.
169 mergeOpaqueRootsIfProfitable();
170 m_opaqueRoots
.add(root
);
172 m_opaqueRoots
.add(root
);
176 inline bool SlotVisitor::containsOpaqueRoot(void* root
) const
178 ASSERT(!m_isInParallelMode
);
179 #if ENABLE(PARALLEL_GC)
180 ASSERT(m_opaqueRoots
.isEmpty());
181 return m_shared
.m_opaqueRoots
.contains(root
);
183 return m_opaqueRoots
.contains(root
);
187 inline TriState
SlotVisitor::containsOpaqueRootTriState(void* root
) const
189 if (m_opaqueRoots
.contains(root
))
191 std::lock_guard
<std::mutex
> lock(m_shared
.m_opaqueRootsMutex
);
192 if (m_shared
.m_opaqueRoots
.contains(root
))
194 return MixedTriState
;
197 inline int SlotVisitor::opaqueRootCount()
199 ASSERT(!m_isInParallelMode
);
200 #if ENABLE(PARALLEL_GC)
201 ASSERT(m_opaqueRoots
.isEmpty());
202 return m_shared
.m_opaqueRoots
.size();
204 return m_opaqueRoots
.size();
208 inline void SlotVisitor::mergeOpaqueRootsIfNecessary()
210 if (m_opaqueRoots
.isEmpty())
215 inline void SlotVisitor::mergeOpaqueRootsIfProfitable()
217 if (static_cast<unsigned>(m_opaqueRoots
.size()) < Options::opaqueRootMergeThreshold())
222 inline void SlotVisitor::donate()
224 ASSERT(m_isInParallelMode
);
225 if (Options::numberOfGCMarkers() == 1)
228 donateKnownParallel();
231 inline void SlotVisitor::donateAndDrain()
237 inline void SlotVisitor::copyLater(JSCell
* owner
, CopyToken token
, void* ptr
, size_t bytes
)
240 CopiedBlock
* block
= CopiedSpace::blockFor(ptr
);
241 if (block
->isOversize()) {
242 m_shared
.m_copiedSpace
->pin(block
);
246 ASSERT(heap()->m_storageSpace
.contains(block
));
248 SpinLockHolder
locker(&block
->workListLock());
249 if (heap()->operationInProgress() == FullCollection
|| block
->shouldReportLiveBytes(locker
, owner
)) {
250 m_bytesCopied
+= bytes
;
251 block
->reportLiveBytes(locker
, owner
, token
, bytes
);
255 inline void SlotVisitor::reportExtraMemoryUsage(JSCell
* owner
, size_t size
)
258 // We don't want to double-count the extra memory that was reported in previous collections.
259 if (heap()->operationInProgress() == EdenCollection
&& Heap::isRemembered(owner
))
265 size_t* counter
= &m_shared
.m_vm
->heap
.m_extraMemoryUsage
;
267 #if ENABLE(COMPARE_AND_SWAP)
269 size_t oldSize
= *counter
;
270 if (WTF::weakCompareAndSwapSize(counter
, oldSize
, oldSize
+ size
))
278 inline Heap
* SlotVisitor::heap() const
280 return &sharedData().m_vm
->heap
;
283 inline VM
& SlotVisitor::vm()
285 return *sharedData().m_vm
;
288 inline const VM
& SlotVisitor::vm() const
290 return *sharedData().m_vm
;
295 #endif // SlotVisitorInlines_h