2 * Copyright (C) 2014 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15 * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23 * THE POSSIBILITY OF SUCH DAMAGE.
31 #include "Structure.h"
32 #include <type_traits>
33 #include <wtf/Assertions.h>
37 inline bool Heap::shouldCollect()
41 if (Options::gcMaxHeapSize())
42 return m_bytesAllocatedThisCycle
> Options::gcMaxHeapSize() && m_isSafeToCollect
&& m_operationInProgress
== NoOperation
;
43 return m_bytesAllocatedThisCycle
> m_maxEdenSize
&& m_isSafeToCollect
&& m_operationInProgress
== NoOperation
;
46 inline bool Heap::isBusy()
48 return m_operationInProgress
!= NoOperation
;
51 inline bool Heap::isCollecting()
53 return m_operationInProgress
== FullCollection
|| m_operationInProgress
== EdenCollection
;
56 inline Heap
* Heap::heap(const JSCell
* cell
)
58 return MarkedBlock::blockFor(cell
)->heap();
61 inline Heap
* Heap::heap(const JSValue v
)
65 return heap(v
.asCell());
68 inline bool Heap::isLive(const void* cell
)
70 return MarkedBlock::blockFor(cell
)->isLiveCell(cell
);
73 inline bool Heap::isRemembered(const void* ptr
)
75 const JSCell
* cell
= static_cast<const JSCell
*>(ptr
);
77 ASSERT(!Options::enableConcurrentJIT() || !isCompilationThread());
78 return cell
->isRemembered();
81 inline bool Heap::isMarked(const void* cell
)
83 return MarkedBlock::blockFor(cell
)->isMarked(cell
);
86 inline bool Heap::testAndSetMarked(const void* cell
)
88 return MarkedBlock::blockFor(cell
)->testAndSetMarked(cell
);
91 inline void Heap::setMarked(const void* cell
)
93 MarkedBlock::blockFor(cell
)->setMarked(cell
);
96 inline bool Heap::isWriteBarrierEnabled()
98 #if ENABLE(WRITE_BARRIER_PROFILING) || ENABLE(GGC)
105 inline void Heap::writeBarrier(const JSCell
* from
, JSValue to
)
107 #if ENABLE(WRITE_BARRIER_PROFILING)
108 WriteBarrierCounters::countWriteBarrier();
113 writeBarrier(from
, to
.asCell());
120 inline void Heap::writeBarrier(const JSCell
* from
, JSCell
* to
)
122 #if ENABLE(WRITE_BARRIER_PROFILING)
123 WriteBarrierCounters::countWriteBarrier();
126 if (!from
|| !from
->isMarked()) {
127 ASSERT(!from
|| !isMarked(from
));
130 if (!to
|| to
->isMarked()) {
131 ASSERT(!to
|| isMarked(to
));
134 addToRememberedSet(from
);
141 inline void Heap::writeBarrier(const JSCell
* from
)
144 ASSERT_GC_OBJECT_LOOKS_VALID(const_cast<JSCell
*>(from
));
145 if (!from
|| !from
->isMarked()) {
146 ASSERT(!from
|| !isMarked(from
));
149 ASSERT(isMarked(from
));
150 addToRememberedSet(from
);
156 inline void Heap::reportExtraMemoryAllocated(size_t size
)
158 if (size
> minExtraMemory
)
159 reportExtraMemoryAllocatedSlowCase(size
);
162 inline void Heap::reportExtraMemoryVisited(JSCell
* owner
, size_t size
)
165 // We don't want to double-count the extra memory that was reported in previous collections.
166 if (operationInProgress() == EdenCollection
&& Heap::isRemembered(owner
))
172 size_t* counter
= &m_extraMemorySize
;
174 #if ENABLE(COMPARE_AND_SWAP)
176 size_t oldSize
= *counter
;
177 if (WTF::weakCompareAndSwapSize(counter
, oldSize
, oldSize
+ size
))
185 inline void Heap::deprecatedReportExtraMemory(size_t size
)
187 if (size
> minExtraMemory
)
188 deprecatedReportExtraMemorySlowCase(size
);
191 template<typename Functor
> inline typename
Functor::ReturnType
Heap::forEachProtectedCell(Functor
& functor
)
193 for (auto& pair
: m_protectedValues
)
195 m_handleSet
.forEachStrongHandle(functor
, m_protectedValues
);
197 return functor
.returnValue();
200 template<typename Functor
> inline typename
Functor::ReturnType
Heap::forEachProtectedCell()
203 return forEachProtectedCell(functor
);
206 template<typename Functor
> inline void Heap::forEachCodeBlock(Functor
& functor
)
208 return m_codeBlocks
.iterate
<Functor
>(functor
);
211 inline void* Heap::allocateWithDestructor(size_t bytes
)
213 #if ENABLE(ALLOCATION_LOGGING)
214 dataLogF("JSC GC allocating %lu bytes with normal destructor.\n", bytes
);
216 ASSERT(isValidAllocation(bytes
));
217 return m_objectSpace
.allocateWithDestructor(bytes
);
220 inline void* Heap::allocateWithoutDestructor(size_t bytes
)
222 #if ENABLE(ALLOCATION_LOGGING)
223 dataLogF("JSC GC allocating %lu bytes without destructor.\n", bytes
);
225 ASSERT(isValidAllocation(bytes
));
226 return m_objectSpace
.allocateWithoutDestructor(bytes
);
229 template<typename ClassType
>
230 void* Heap::allocateObjectOfType(size_t bytes
)
232 // JSCell::classInfo() expects objects allocated with normal destructor to derive from JSDestructibleObject.
233 ASSERT((!ClassType::needsDestruction
|| (ClassType::StructureFlags
& StructureIsImmortal
) || std::is_convertible
<ClassType
, JSDestructibleObject
>::value
));
235 if (ClassType::needsDestruction
)
236 return allocateWithDestructor(bytes
);
237 return allocateWithoutDestructor(bytes
);
240 template<typename ClassType
>
241 MarkedSpace::Subspace
& Heap::subspaceForObjectOfType()
243 // JSCell::classInfo() expects objects allocated with normal destructor to derive from JSDestructibleObject.
244 ASSERT((!ClassType::needsDestruction
|| (ClassType::StructureFlags
& StructureIsImmortal
) || std::is_convertible
<ClassType
, JSDestructibleObject
>::value
));
246 if (ClassType::needsDestruction
)
247 return subspaceForObjectDestructor();
248 return subspaceForObjectWithoutDestructor();
251 template<typename ClassType
>
252 MarkedAllocator
& Heap::allocatorForObjectOfType(size_t bytes
)
254 // JSCell::classInfo() expects objects allocated with normal destructor to derive from JSDestructibleObject.
255 ASSERT((!ClassType::needsDestruction
|| (ClassType::StructureFlags
& StructureIsImmortal
) || std::is_convertible
<ClassType
, JSDestructibleObject
>::value
));
257 if (ClassType::needsDestruction
)
258 return allocatorForObjectWithDestructor(bytes
);
259 return allocatorForObjectWithoutDestructor(bytes
);
262 inline CheckedBoolean
Heap::tryAllocateStorage(JSCell
* intendedOwner
, size_t bytes
, void** outPtr
)
264 CheckedBoolean result
= m_storageSpace
.tryAllocate(bytes
, outPtr
);
265 #if ENABLE(ALLOCATION_LOGGING)
266 dataLogF("JSC GC allocating %lu bytes of storage for %p: %p.\n", bytes
, intendedOwner
, *outPtr
);
268 UNUSED_PARAM(intendedOwner
);
273 inline CheckedBoolean
Heap::tryReallocateStorage(JSCell
* intendedOwner
, void** ptr
, size_t oldSize
, size_t newSize
)
275 #if ENABLE(ALLOCATION_LOGGING)
278 CheckedBoolean result
= m_storageSpace
.tryReallocate(ptr
, oldSize
, newSize
);
279 #if ENABLE(ALLOCATION_LOGGING)
280 dataLogF("JSC GC reallocating %lu -> %lu bytes of storage for %p: %p -> %p.\n", oldSize
, newSize
, intendedOwner
, oldPtr
, *ptr
);
282 UNUSED_PARAM(intendedOwner
);
287 inline void Heap::ascribeOwner(JSCell
* intendedOwner
, void* storage
)
289 #if ENABLE(ALLOCATION_LOGGING)
290 dataLogF("JSC GC ascribing %p as owner of storage %p.\n", intendedOwner
, storage
);
292 UNUSED_PARAM(intendedOwner
);
293 UNUSED_PARAM(storage
);
298 template <typename T
>
299 inline void Heap::releaseSoon(RetainPtr
<T
>&& object
)
301 m_delayedReleaseObjects
.append(WTF::move(object
));
305 inline void Heap::incrementDeferralDepth()
307 RELEASE_ASSERT(m_deferralDepth
< 100); // Sanity check to make sure this doesn't get ridiculous.
311 inline void Heap::decrementDeferralDepth()
313 RELEASE_ASSERT(m_deferralDepth
>= 1);
317 inline bool Heap::collectIfNecessaryOrDefer()
322 if (!shouldCollect())
329 inline void Heap::decrementDeferralDepthAndGCIfNeeded()
331 decrementDeferralDepth();
332 collectIfNecessaryOrDefer();
335 inline HashSet
<MarkedArgumentBuffer
*>& Heap::markListSet()
338 m_markListSet
= std::make_unique
<HashSet
<MarkedArgumentBuffer
*>>();
339 return *m_markListSet
;
342 inline void Heap::registerWeakGCMap(void* weakGCMap
, std::function
<void()> pruningCallback
)
344 m_weakGCMaps
.add(weakGCMap
, WTF::move(pruningCallback
));
347 inline void Heap::unregisterWeakGCMap(void* weakGCMap
)
349 m_weakGCMaps
.remove(weakGCMap
);
354 #endif // HeapInlines_h