]> git.saurik.com Git - apple/javascriptcore.git/blob - heap/HeapInlines.h
JavaScriptCore-7600.1.4.16.1.tar.gz
[apple/javascriptcore.git] / heap / HeapInlines.h
1 /*
2 * Copyright (C) 2014 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15 * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23 * THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #ifndef HeapInlines_h
27 #define HeapInlines_h
28
29 #include "Heap.h"
30 #include "JSCell.h"
31 #include "Structure.h"
32
33 namespace JSC {
34
35 inline bool Heap::shouldCollect()
36 {
37 if (isDeferred())
38 return false;
39 if (Options::gcMaxHeapSize())
40 return m_bytesAllocatedThisCycle > Options::gcMaxHeapSize() && m_isSafeToCollect && m_operationInProgress == NoOperation;
41 return m_bytesAllocatedThisCycle > m_maxEdenSize && m_isSafeToCollect && m_operationInProgress == NoOperation;
42 }
43
44 inline bool Heap::isBusy()
45 {
46 return m_operationInProgress != NoOperation;
47 }
48
49 inline bool Heap::isCollecting()
50 {
51 return m_operationInProgress == FullCollection || m_operationInProgress == EdenCollection;
52 }
53
54 inline Heap* Heap::heap(const JSCell* cell)
55 {
56 return MarkedBlock::blockFor(cell)->heap();
57 }
58
59 inline Heap* Heap::heap(const JSValue v)
60 {
61 if (!v.isCell())
62 return 0;
63 return heap(v.asCell());
64 }
65
66 inline bool Heap::isLive(const void* cell)
67 {
68 return MarkedBlock::blockFor(cell)->isLiveCell(cell);
69 }
70
71 inline bool Heap::isRemembered(const void* ptr)
72 {
73 const JSCell* cell = static_cast<const JSCell*>(ptr);
74 ASSERT(cell);
75 ASSERT(!Options::enableConcurrentJIT() || !isCompilationThread());
76 ASSERT(MarkedBlock::blockFor(cell)->isRemembered(cell) == cell->isRemembered());
77 return cell->isRemembered();
78 }
79
80 inline bool Heap::isMarked(const void* cell)
81 {
82 return MarkedBlock::blockFor(cell)->isMarked(cell);
83 }
84
85 inline bool Heap::testAndSetMarked(const void* cell)
86 {
87 return MarkedBlock::blockFor(cell)->testAndSetMarked(cell);
88 }
89
90 inline void Heap::setMarked(const void* cell)
91 {
92 MarkedBlock::blockFor(cell)->setMarked(cell);
93 }
94
95 inline bool Heap::isWriteBarrierEnabled()
96 {
97 #if ENABLE(WRITE_BARRIER_PROFILING) || ENABLE(GGC)
98 return true;
99 #else
100 return false;
101 #endif
102 }
103
104 inline void Heap::writeBarrier(const JSCell* from, JSValue to)
105 {
106 #if ENABLE(WRITE_BARRIER_PROFILING)
107 WriteBarrierCounters::countWriteBarrier();
108 #endif
109 #if ENABLE(GGC)
110 if (!to.isCell())
111 return;
112 writeBarrier(from, to.asCell());
113 #else
114 UNUSED_PARAM(from);
115 UNUSED_PARAM(to);
116 #endif
117 }
118
119 inline void Heap::writeBarrier(const JSCell* from, JSCell* to)
120 {
121 #if ENABLE(WRITE_BARRIER_PROFILING)
122 WriteBarrierCounters::countWriteBarrier();
123 #endif
124 #if ENABLE(GGC)
125 if (!from || !from->isMarked()) {
126 ASSERT(!from || !isMarked(from));
127 return;
128 }
129 if (!to || to->isMarked()) {
130 ASSERT(!to || isMarked(to));
131 return;
132 }
133 addToRememberedSet(from);
134 #else
135 UNUSED_PARAM(from);
136 UNUSED_PARAM(to);
137 #endif
138 }
139
140 inline void Heap::writeBarrier(const JSCell* from)
141 {
142 #if ENABLE(GGC)
143 ASSERT_GC_OBJECT_LOOKS_VALID(const_cast<JSCell*>(from));
144 if (!from || !from->isMarked()) {
145 ASSERT(!from || !isMarked(from));
146 return;
147 }
148 ASSERT(isMarked(from));
149 addToRememberedSet(from);
150 #else
151 UNUSED_PARAM(from);
152 #endif
153 }
154
155 inline void Heap::reportExtraMemoryCost(size_t cost)
156 {
157 if (cost > minExtraCost)
158 reportExtraMemoryCostSlowCase(cost);
159 }
160
161 template<typename Functor> inline typename Functor::ReturnType Heap::forEachProtectedCell(Functor& functor)
162 {
163 for (auto& pair : m_protectedValues)
164 functor(pair.key);
165 m_handleSet.forEachStrongHandle(functor, m_protectedValues);
166
167 return functor.returnValue();
168 }
169
170 template<typename Functor> inline typename Functor::ReturnType Heap::forEachProtectedCell()
171 {
172 Functor functor;
173 return forEachProtectedCell(functor);
174 }
175
176 template<typename Functor> inline void Heap::forEachCodeBlock(Functor& functor)
177 {
178 return m_codeBlocks.iterate<Functor>(functor);
179 }
180
181 inline void* Heap::allocateWithNormalDestructor(size_t bytes)
182 {
183 #if ENABLE(ALLOCATION_LOGGING)
184 dataLogF("JSC GC allocating %lu bytes with normal destructor.\n", bytes);
185 #endif
186 ASSERT(isValidAllocation(bytes));
187 return m_objectSpace.allocateWithNormalDestructor(bytes);
188 }
189
190 inline void* Heap::allocateWithImmortalStructureDestructor(size_t bytes)
191 {
192 #if ENABLE(ALLOCATION_LOGGING)
193 dataLogF("JSC GC allocating %lu bytes with immortal structure destructor.\n", bytes);
194 #endif
195 ASSERT(isValidAllocation(bytes));
196 return m_objectSpace.allocateWithImmortalStructureDestructor(bytes);
197 }
198
199 inline void* Heap::allocateWithoutDestructor(size_t bytes)
200 {
201 #if ENABLE(ALLOCATION_LOGGING)
202 dataLogF("JSC GC allocating %lu bytes without destructor.\n", bytes);
203 #endif
204 ASSERT(isValidAllocation(bytes));
205 return m_objectSpace.allocateWithoutDestructor(bytes);
206 }
207
208 inline CheckedBoolean Heap::tryAllocateStorage(JSCell* intendedOwner, size_t bytes, void** outPtr)
209 {
210 CheckedBoolean result = m_storageSpace.tryAllocate(bytes, outPtr);
211 #if ENABLE(ALLOCATION_LOGGING)
212 dataLogF("JSC GC allocating %lu bytes of storage for %p: %p.\n", bytes, intendedOwner, *outPtr);
213 #else
214 UNUSED_PARAM(intendedOwner);
215 #endif
216 return result;
217 }
218
219 inline CheckedBoolean Heap::tryReallocateStorage(JSCell* intendedOwner, void** ptr, size_t oldSize, size_t newSize)
220 {
221 #if ENABLE(ALLOCATION_LOGGING)
222 void* oldPtr = *ptr;
223 #endif
224 CheckedBoolean result = m_storageSpace.tryReallocate(ptr, oldSize, newSize);
225 #if ENABLE(ALLOCATION_LOGGING)
226 dataLogF("JSC GC reallocating %lu -> %lu bytes of storage for %p: %p -> %p.\n", oldSize, newSize, intendedOwner, oldPtr, *ptr);
227 #else
228 UNUSED_PARAM(intendedOwner);
229 #endif
230 return result;
231 }
232
233 inline void Heap::ascribeOwner(JSCell* intendedOwner, void* storage)
234 {
235 #if ENABLE(ALLOCATION_LOGGING)
236 dataLogF("JSC GC ascribing %p as owner of storage %p.\n", intendedOwner, storage);
237 #else
238 UNUSED_PARAM(intendedOwner);
239 UNUSED_PARAM(storage);
240 #endif
241 }
242
243 inline BlockAllocator& Heap::blockAllocator()
244 {
245 return m_blockAllocator;
246 }
247
248 #if USE(CF)
249 template <typename T>
250 inline void Heap::releaseSoon(RetainPtr<T>&& object)
251 {
252 m_objectSpace.releaseSoon(WTF::move(object));
253 }
254 #endif
255
256 inline void Heap::incrementDeferralDepth()
257 {
258 RELEASE_ASSERT(m_deferralDepth < 100); // Sanity check to make sure this doesn't get ridiculous.
259 m_deferralDepth++;
260 }
261
262 inline void Heap::decrementDeferralDepth()
263 {
264 RELEASE_ASSERT(m_deferralDepth >= 1);
265 m_deferralDepth--;
266 }
267
268 inline bool Heap::collectIfNecessaryOrDefer()
269 {
270 if (isDeferred())
271 return false;
272
273 if (!shouldCollect())
274 return false;
275
276 collect();
277 return true;
278 }
279
280 inline void Heap::decrementDeferralDepthAndGCIfNeeded()
281 {
282 decrementDeferralDepth();
283 collectIfNecessaryOrDefer();
284 }
285
286 inline HashSet<MarkedArgumentBuffer*>& Heap::markListSet()
287 {
288 if (!m_markListSet)
289 m_markListSet = adoptPtr(new HashSet<MarkedArgumentBuffer*>);
290 return *m_markListSet;
291 }
292
293 } // namespace JSC
294
295 #endif // HeapInlines_h