2 * Copyright (C) 2014 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15 * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23 * THE POSSIBILITY OF SUCH DAMAGE.
31 #include "Structure.h"
35 inline bool Heap::shouldCollect()
39 if (Options::gcMaxHeapSize())
40 return m_bytesAllocatedThisCycle
> Options::gcMaxHeapSize() && m_isSafeToCollect
&& m_operationInProgress
== NoOperation
;
41 return m_bytesAllocatedThisCycle
> m_maxEdenSize
&& m_isSafeToCollect
&& m_operationInProgress
== NoOperation
;
44 inline bool Heap::isBusy()
46 return m_operationInProgress
!= NoOperation
;
49 inline bool Heap::isCollecting()
51 return m_operationInProgress
== FullCollection
|| m_operationInProgress
== EdenCollection
;
54 inline Heap
* Heap::heap(const JSCell
* cell
)
56 return MarkedBlock::blockFor(cell
)->heap();
59 inline Heap
* Heap::heap(const JSValue v
)
63 return heap(v
.asCell());
66 inline bool Heap::isLive(const void* cell
)
68 return MarkedBlock::blockFor(cell
)->isLiveCell(cell
);
71 inline bool Heap::isRemembered(const void* ptr
)
73 const JSCell
* cell
= static_cast<const JSCell
*>(ptr
);
75 ASSERT(!Options::enableConcurrentJIT() || !isCompilationThread());
76 ASSERT(MarkedBlock::blockFor(cell
)->isRemembered(cell
) == cell
->isRemembered());
77 return cell
->isRemembered();
80 inline bool Heap::isMarked(const void* cell
)
82 return MarkedBlock::blockFor(cell
)->isMarked(cell
);
85 inline bool Heap::testAndSetMarked(const void* cell
)
87 return MarkedBlock::blockFor(cell
)->testAndSetMarked(cell
);
90 inline void Heap::setMarked(const void* cell
)
92 MarkedBlock::blockFor(cell
)->setMarked(cell
);
95 inline bool Heap::isWriteBarrierEnabled()
97 #if ENABLE(WRITE_BARRIER_PROFILING) || ENABLE(GGC)
104 inline void Heap::writeBarrier(const JSCell
* from
, JSValue to
)
106 #if ENABLE(WRITE_BARRIER_PROFILING)
107 WriteBarrierCounters::countWriteBarrier();
112 writeBarrier(from
, to
.asCell());
119 inline void Heap::writeBarrier(const JSCell
* from
, JSCell
* to
)
121 #if ENABLE(WRITE_BARRIER_PROFILING)
122 WriteBarrierCounters::countWriteBarrier();
125 if (!from
|| !from
->isMarked()) {
126 ASSERT(!from
|| !isMarked(from
));
129 if (!to
|| to
->isMarked()) {
130 ASSERT(!to
|| isMarked(to
));
133 addToRememberedSet(from
);
140 inline void Heap::writeBarrier(const JSCell
* from
)
143 ASSERT_GC_OBJECT_LOOKS_VALID(const_cast<JSCell
*>(from
));
144 if (!from
|| !from
->isMarked()) {
145 ASSERT(!from
|| !isMarked(from
));
148 ASSERT(isMarked(from
));
149 addToRememberedSet(from
);
155 inline void Heap::reportExtraMemoryCost(size_t cost
)
157 if (cost
> minExtraCost
)
158 reportExtraMemoryCostSlowCase(cost
);
161 template<typename Functor
> inline typename
Functor::ReturnType
Heap::forEachProtectedCell(Functor
& functor
)
163 for (auto& pair
: m_protectedValues
)
165 m_handleSet
.forEachStrongHandle(functor
, m_protectedValues
);
167 return functor
.returnValue();
170 template<typename Functor
> inline typename
Functor::ReturnType
Heap::forEachProtectedCell()
173 return forEachProtectedCell(functor
);
176 template<typename Functor
> inline void Heap::forEachCodeBlock(Functor
& functor
)
178 return m_codeBlocks
.iterate
<Functor
>(functor
);
181 inline void* Heap::allocateWithNormalDestructor(size_t bytes
)
183 #if ENABLE(ALLOCATION_LOGGING)
184 dataLogF("JSC GC allocating %lu bytes with normal destructor.\n", bytes
);
186 ASSERT(isValidAllocation(bytes
));
187 return m_objectSpace
.allocateWithNormalDestructor(bytes
);
190 inline void* Heap::allocateWithImmortalStructureDestructor(size_t bytes
)
192 #if ENABLE(ALLOCATION_LOGGING)
193 dataLogF("JSC GC allocating %lu bytes with immortal structure destructor.\n", bytes
);
195 ASSERT(isValidAllocation(bytes
));
196 return m_objectSpace
.allocateWithImmortalStructureDestructor(bytes
);
199 inline void* Heap::allocateWithoutDestructor(size_t bytes
)
201 #if ENABLE(ALLOCATION_LOGGING)
202 dataLogF("JSC GC allocating %lu bytes without destructor.\n", bytes
);
204 ASSERT(isValidAllocation(bytes
));
205 return m_objectSpace
.allocateWithoutDestructor(bytes
);
208 inline CheckedBoolean
Heap::tryAllocateStorage(JSCell
* intendedOwner
, size_t bytes
, void** outPtr
)
210 CheckedBoolean result
= m_storageSpace
.tryAllocate(bytes
, outPtr
);
211 #if ENABLE(ALLOCATION_LOGGING)
212 dataLogF("JSC GC allocating %lu bytes of storage for %p: %p.\n", bytes
, intendedOwner
, *outPtr
);
214 UNUSED_PARAM(intendedOwner
);
219 inline CheckedBoolean
Heap::tryReallocateStorage(JSCell
* intendedOwner
, void** ptr
, size_t oldSize
, size_t newSize
)
221 #if ENABLE(ALLOCATION_LOGGING)
224 CheckedBoolean result
= m_storageSpace
.tryReallocate(ptr
, oldSize
, newSize
);
225 #if ENABLE(ALLOCATION_LOGGING)
226 dataLogF("JSC GC reallocating %lu -> %lu bytes of storage for %p: %p -> %p.\n", oldSize
, newSize
, intendedOwner
, oldPtr
, *ptr
);
228 UNUSED_PARAM(intendedOwner
);
233 inline void Heap::ascribeOwner(JSCell
* intendedOwner
, void* storage
)
235 #if ENABLE(ALLOCATION_LOGGING)
236 dataLogF("JSC GC ascribing %p as owner of storage %p.\n", intendedOwner
, storage
);
238 UNUSED_PARAM(intendedOwner
);
239 UNUSED_PARAM(storage
);
243 inline BlockAllocator
& Heap::blockAllocator()
245 return m_blockAllocator
;
249 template <typename T
>
250 inline void Heap::releaseSoon(RetainPtr
<T
>&& object
)
252 m_objectSpace
.releaseSoon(WTF::move(object
));
256 inline void Heap::incrementDeferralDepth()
258 RELEASE_ASSERT(m_deferralDepth
< 100); // Sanity check to make sure this doesn't get ridiculous.
262 inline void Heap::decrementDeferralDepth()
264 RELEASE_ASSERT(m_deferralDepth
>= 1);
268 inline bool Heap::collectIfNecessaryOrDefer()
273 if (!shouldCollect())
280 inline void Heap::decrementDeferralDepthAndGCIfNeeded()
282 decrementDeferralDepth();
283 collectIfNecessaryOrDefer();
286 inline HashSet
<MarkedArgumentBuffer
*>& Heap::markListSet()
289 m_markListSet
= adoptPtr(new HashSet
<MarkedArgumentBuffer
*>);
290 return *m_markListSet
;
295 #endif // HeapInlines_h