]> git.saurik.com Git - apple/javascriptcore.git/blob - heap/HeapInlines.h
JavaScriptCore-7601.1.46.3.tar.gz
[apple/javascriptcore.git] / heap / HeapInlines.h
1 /*
2 * Copyright (C) 2014 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15 * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23 * THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #ifndef HeapInlines_h
27 #define HeapInlines_h
28
29 #include "Heap.h"
30 #include "JSCell.h"
31 #include "Structure.h"
32 #include <type_traits>
33 #include <wtf/Assertions.h>
34
35 namespace JSC {
36
37 inline bool Heap::shouldCollect()
38 {
39 if (isDeferred())
40 return false;
41 if (Options::gcMaxHeapSize())
42 return m_bytesAllocatedThisCycle > Options::gcMaxHeapSize() && m_isSafeToCollect && m_operationInProgress == NoOperation;
43 return m_bytesAllocatedThisCycle > m_maxEdenSize && m_isSafeToCollect && m_operationInProgress == NoOperation;
44 }
45
46 inline bool Heap::isBusy()
47 {
48 return m_operationInProgress != NoOperation;
49 }
50
51 inline bool Heap::isCollecting()
52 {
53 return m_operationInProgress == FullCollection || m_operationInProgress == EdenCollection;
54 }
55
56 inline Heap* Heap::heap(const JSCell* cell)
57 {
58 return MarkedBlock::blockFor(cell)->heap();
59 }
60
61 inline Heap* Heap::heap(const JSValue v)
62 {
63 if (!v.isCell())
64 return 0;
65 return heap(v.asCell());
66 }
67
68 inline bool Heap::isLive(const void* cell)
69 {
70 return MarkedBlock::blockFor(cell)->isLiveCell(cell);
71 }
72
73 inline bool Heap::isRemembered(const void* ptr)
74 {
75 const JSCell* cell = static_cast<const JSCell*>(ptr);
76 ASSERT(cell);
77 ASSERT(!Options::enableConcurrentJIT() || !isCompilationThread());
78 return cell->isRemembered();
79 }
80
81 inline bool Heap::isMarked(const void* cell)
82 {
83 return MarkedBlock::blockFor(cell)->isMarked(cell);
84 }
85
86 inline bool Heap::testAndSetMarked(const void* cell)
87 {
88 return MarkedBlock::blockFor(cell)->testAndSetMarked(cell);
89 }
90
91 inline void Heap::setMarked(const void* cell)
92 {
93 MarkedBlock::blockFor(cell)->setMarked(cell);
94 }
95
96 inline bool Heap::isWriteBarrierEnabled()
97 {
98 #if ENABLE(WRITE_BARRIER_PROFILING) || ENABLE(GGC)
99 return true;
100 #else
101 return false;
102 #endif
103 }
104
105 inline void Heap::writeBarrier(const JSCell* from, JSValue to)
106 {
107 #if ENABLE(WRITE_BARRIER_PROFILING)
108 WriteBarrierCounters::countWriteBarrier();
109 #endif
110 #if ENABLE(GGC)
111 if (!to.isCell())
112 return;
113 writeBarrier(from, to.asCell());
114 #else
115 UNUSED_PARAM(from);
116 UNUSED_PARAM(to);
117 #endif
118 }
119
120 inline void Heap::writeBarrier(const JSCell* from, JSCell* to)
121 {
122 #if ENABLE(WRITE_BARRIER_PROFILING)
123 WriteBarrierCounters::countWriteBarrier();
124 #endif
125 #if ENABLE(GGC)
126 if (!from || !from->isMarked()) {
127 ASSERT(!from || !isMarked(from));
128 return;
129 }
130 if (!to || to->isMarked()) {
131 ASSERT(!to || isMarked(to));
132 return;
133 }
134 addToRememberedSet(from);
135 #else
136 UNUSED_PARAM(from);
137 UNUSED_PARAM(to);
138 #endif
139 }
140
141 inline void Heap::writeBarrier(const JSCell* from)
142 {
143 #if ENABLE(GGC)
144 ASSERT_GC_OBJECT_LOOKS_VALID(const_cast<JSCell*>(from));
145 if (!from || !from->isMarked()) {
146 ASSERT(!from || !isMarked(from));
147 return;
148 }
149 ASSERT(isMarked(from));
150 addToRememberedSet(from);
151 #else
152 UNUSED_PARAM(from);
153 #endif
154 }
155
156 inline void Heap::reportExtraMemoryAllocated(size_t size)
157 {
158 if (size > minExtraMemory)
159 reportExtraMemoryAllocatedSlowCase(size);
160 }
161
162 inline void Heap::reportExtraMemoryVisited(JSCell* owner, size_t size)
163 {
164 #if ENABLE(GGC)
165 // We don't want to double-count the extra memory that was reported in previous collections.
166 if (operationInProgress() == EdenCollection && Heap::isRemembered(owner))
167 return;
168 #else
169 UNUSED_PARAM(owner);
170 #endif
171
172 size_t* counter = &m_extraMemorySize;
173
174 #if ENABLE(COMPARE_AND_SWAP)
175 for (;;) {
176 size_t oldSize = *counter;
177 if (WTF::weakCompareAndSwapSize(counter, oldSize, oldSize + size))
178 return;
179 }
180 #else
181 (*counter) += size;
182 #endif
183 }
184
185 inline void Heap::deprecatedReportExtraMemory(size_t size)
186 {
187 if (size > minExtraMemory)
188 deprecatedReportExtraMemorySlowCase(size);
189 }
190
191 template<typename Functor> inline typename Functor::ReturnType Heap::forEachProtectedCell(Functor& functor)
192 {
193 for (auto& pair : m_protectedValues)
194 functor(pair.key);
195 m_handleSet.forEachStrongHandle(functor, m_protectedValues);
196
197 return functor.returnValue();
198 }
199
200 template<typename Functor> inline typename Functor::ReturnType Heap::forEachProtectedCell()
201 {
202 Functor functor;
203 return forEachProtectedCell(functor);
204 }
205
206 template<typename Functor> inline void Heap::forEachCodeBlock(Functor& functor)
207 {
208 return m_codeBlocks.iterate<Functor>(functor);
209 }
210
211 inline void* Heap::allocateWithDestructor(size_t bytes)
212 {
213 #if ENABLE(ALLOCATION_LOGGING)
214 dataLogF("JSC GC allocating %lu bytes with normal destructor.\n", bytes);
215 #endif
216 ASSERT(isValidAllocation(bytes));
217 return m_objectSpace.allocateWithDestructor(bytes);
218 }
219
220 inline void* Heap::allocateWithoutDestructor(size_t bytes)
221 {
222 #if ENABLE(ALLOCATION_LOGGING)
223 dataLogF("JSC GC allocating %lu bytes without destructor.\n", bytes);
224 #endif
225 ASSERT(isValidAllocation(bytes));
226 return m_objectSpace.allocateWithoutDestructor(bytes);
227 }
228
229 template<typename ClassType>
230 void* Heap::allocateObjectOfType(size_t bytes)
231 {
232 // JSCell::classInfo() expects objects allocated with normal destructor to derive from JSDestructibleObject.
233 ASSERT((!ClassType::needsDestruction || (ClassType::StructureFlags & StructureIsImmortal) || std::is_convertible<ClassType, JSDestructibleObject>::value));
234
235 if (ClassType::needsDestruction)
236 return allocateWithDestructor(bytes);
237 return allocateWithoutDestructor(bytes);
238 }
239
240 template<typename ClassType>
241 MarkedSpace::Subspace& Heap::subspaceForObjectOfType()
242 {
243 // JSCell::classInfo() expects objects allocated with normal destructor to derive from JSDestructibleObject.
244 ASSERT((!ClassType::needsDestruction || (ClassType::StructureFlags & StructureIsImmortal) || std::is_convertible<ClassType, JSDestructibleObject>::value));
245
246 if (ClassType::needsDestruction)
247 return subspaceForObjectDestructor();
248 return subspaceForObjectWithoutDestructor();
249 }
250
251 template<typename ClassType>
252 MarkedAllocator& Heap::allocatorForObjectOfType(size_t bytes)
253 {
254 // JSCell::classInfo() expects objects allocated with normal destructor to derive from JSDestructibleObject.
255 ASSERT((!ClassType::needsDestruction || (ClassType::StructureFlags & StructureIsImmortal) || std::is_convertible<ClassType, JSDestructibleObject>::value));
256
257 if (ClassType::needsDestruction)
258 return allocatorForObjectWithDestructor(bytes);
259 return allocatorForObjectWithoutDestructor(bytes);
260 }
261
262 inline CheckedBoolean Heap::tryAllocateStorage(JSCell* intendedOwner, size_t bytes, void** outPtr)
263 {
264 CheckedBoolean result = m_storageSpace.tryAllocate(bytes, outPtr);
265 #if ENABLE(ALLOCATION_LOGGING)
266 dataLogF("JSC GC allocating %lu bytes of storage for %p: %p.\n", bytes, intendedOwner, *outPtr);
267 #else
268 UNUSED_PARAM(intendedOwner);
269 #endif
270 return result;
271 }
272
273 inline CheckedBoolean Heap::tryReallocateStorage(JSCell* intendedOwner, void** ptr, size_t oldSize, size_t newSize)
274 {
275 #if ENABLE(ALLOCATION_LOGGING)
276 void* oldPtr = *ptr;
277 #endif
278 CheckedBoolean result = m_storageSpace.tryReallocate(ptr, oldSize, newSize);
279 #if ENABLE(ALLOCATION_LOGGING)
280 dataLogF("JSC GC reallocating %lu -> %lu bytes of storage for %p: %p -> %p.\n", oldSize, newSize, intendedOwner, oldPtr, *ptr);
281 #else
282 UNUSED_PARAM(intendedOwner);
283 #endif
284 return result;
285 }
286
287 inline void Heap::ascribeOwner(JSCell* intendedOwner, void* storage)
288 {
289 #if ENABLE(ALLOCATION_LOGGING)
290 dataLogF("JSC GC ascribing %p as owner of storage %p.\n", intendedOwner, storage);
291 #else
292 UNUSED_PARAM(intendedOwner);
293 UNUSED_PARAM(storage);
294 #endif
295 }
296
297 #if USE(CF)
298 template <typename T>
299 inline void Heap::releaseSoon(RetainPtr<T>&& object)
300 {
301 m_delayedReleaseObjects.append(WTF::move(object));
302 }
303 #endif
304
305 inline void Heap::incrementDeferralDepth()
306 {
307 RELEASE_ASSERT(m_deferralDepth < 100); // Sanity check to make sure this doesn't get ridiculous.
308 m_deferralDepth++;
309 }
310
311 inline void Heap::decrementDeferralDepth()
312 {
313 RELEASE_ASSERT(m_deferralDepth >= 1);
314 m_deferralDepth--;
315 }
316
317 inline bool Heap::collectIfNecessaryOrDefer()
318 {
319 if (isDeferred())
320 return false;
321
322 if (!shouldCollect())
323 return false;
324
325 collect();
326 return true;
327 }
328
329 inline void Heap::decrementDeferralDepthAndGCIfNeeded()
330 {
331 decrementDeferralDepth();
332 collectIfNecessaryOrDefer();
333 }
334
335 inline HashSet<MarkedArgumentBuffer*>& Heap::markListSet()
336 {
337 if (!m_markListSet)
338 m_markListSet = std::make_unique<HashSet<MarkedArgumentBuffer*>>();
339 return *m_markListSet;
340 }
341
342 inline void Heap::registerWeakGCMap(void* weakGCMap, std::function<void()> pruningCallback)
343 {
344 m_weakGCMaps.add(weakGCMap, WTF::move(pruningCallback));
345 }
346
347 inline void Heap::unregisterWeakGCMap(void* weakGCMap)
348 {
349 m_weakGCMaps.remove(weakGCMap);
350 }
351
352 } // namespace JSC
353
354 #endif // HeapInlines_h