]> git.saurik.com Git - apple/javascriptcore.git/blob - heap/Heap.h
JavaScriptCore-1218.34.tar.gz
[apple/javascriptcore.git] / heap / Heap.h
1 /*
2 * Copyright (C) 1999-2000 Harri Porten (porten@kde.org)
3 * Copyright (C) 2001 Peter Kelly (pmk@post.com)
4 * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved.
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 *
20 */
21
22 #ifndef Heap_h
23 #define Heap_h
24
25 #include "BlockAllocator.h"
26 #include "CopyVisitor.h"
27 #include "DFGCodeBlocks.h"
28 #include "GCThreadSharedData.h"
29 #include "HandleSet.h"
30 #include "HandleStack.h"
31 #include "JITStubRoutineSet.h"
32 #include "MarkedAllocator.h"
33 #include "MarkedBlock.h"
34 #include "MarkedBlockSet.h"
35 #include "MarkedSpace.h"
36 #include "Options.h"
37 #include "SlotVisitor.h"
38 #include "WeakHandleOwner.h"
39 #include "WriteBarrierSupport.h"
40 #include <wtf/HashCountedSet.h>
41 #include <wtf/HashSet.h>
42
43 #define COLLECT_ON_EVERY_ALLOCATION 0
44
45 namespace JSC {
46
47 class CopiedSpace;
48 class CodeBlock;
49 class ExecutableBase;
50 class GCActivityCallback;
51 class GCAwareJITStubRoutine;
52 class GlobalCodeBlock;
53 class Heap;
54 class HeapRootVisitor;
55 class IncrementalSweeper;
56 class JITStubRoutine;
57 class JSCell;
58 class VM;
59 class JSStack;
60 class JSValue;
61 class LiveObjectIterator;
62 class LLIntOffsetsExtractor;
63 class MarkedArgumentBuffer;
64 class WeakGCHandlePool;
65 class SlotVisitor;
66
67 typedef std::pair<JSValue, WTF::String> ValueStringPair;
68 typedef HashCountedSet<JSCell*> ProtectCountSet;
69 typedef HashCountedSet<const char*> TypeCountSet;
70
71 enum OperationInProgress { NoOperation, Allocation, Collection };
72
73 enum HeapType { SmallHeap, LargeHeap };
74
75 class Heap {
76 WTF_MAKE_NONCOPYABLE(Heap);
77 public:
78 friend class JIT;
79 friend class GCThreadSharedData;
80 static Heap* heap(const JSValue); // 0 for immediate values
81 static Heap* heap(const JSCell*);
82
83 // This constant determines how many blocks we iterate between checks of our
84 // deadline when calling Heap::isPagedOut. Decreasing it will cause us to detect
85 // overstepping our deadline more quickly, while increasing it will cause
86 // our scan to run faster.
87 static const unsigned s_timeCheckResolution = 16;
88
89 static bool isLive(const void*);
90 static bool isMarked(const void*);
91 static bool testAndSetMarked(const void*);
92 static void setMarked(const void*);
93
94 static bool isWriteBarrierEnabled();
95 static void writeBarrier(const JSCell*, JSValue);
96 static void writeBarrier(const JSCell*, JSCell*);
97 static uint8_t* addressOfCardFor(JSCell*);
98
99 Heap(VM*, HeapType);
100 ~Heap();
101 JS_EXPORT_PRIVATE void lastChanceToFinalize();
102
103 VM* vm() const { return m_vm; }
104 MarkedSpace& objectSpace() { return m_objectSpace; }
105 MachineThreads& machineThreads() { return m_machineThreads; }
106
107 JS_EXPORT_PRIVATE GCActivityCallback* activityCallback();
108 JS_EXPORT_PRIVATE void setActivityCallback(PassOwnPtr<GCActivityCallback>);
109 JS_EXPORT_PRIVATE void setGarbageCollectionTimerEnabled(bool);
110
111 JS_EXPORT_PRIVATE IncrementalSweeper* sweeper();
112 #if PLATFORM(IOS)
113 JS_EXPORT_PRIVATE void setIncrementalSweeper(PassOwnPtr<IncrementalSweeper>);
114 #endif // PLATFORM(IOS)
115
116 // true if an allocation or collection is in progress
117 inline bool isBusy();
118
119 MarkedAllocator& allocatorForObjectWithoutDestructor(size_t bytes) { return m_objectSpace.allocatorFor(bytes); }
120 MarkedAllocator& allocatorForObjectWithNormalDestructor(size_t bytes) { return m_objectSpace.normalDestructorAllocatorFor(bytes); }
121 MarkedAllocator& allocatorForObjectWithImmortalStructureDestructor(size_t bytes) { return m_objectSpace.immortalStructureDestructorAllocatorFor(bytes); }
122 CopiedAllocator& storageAllocator() { return m_storageSpace.allocator(); }
123 CheckedBoolean tryAllocateStorage(size_t, void**);
124 CheckedBoolean tryReallocateStorage(void**, size_t, size_t);
125
126 typedef void (*Finalizer)(JSCell*);
127 JS_EXPORT_PRIVATE void addFinalizer(JSCell*, Finalizer);
128 void addCompiledCode(ExecutableBase*);
129
130 void notifyIsSafeToCollect() { m_isSafeToCollect = true; }
131 bool isSafeToCollect() const { return m_isSafeToCollect; }
132
133 JS_EXPORT_PRIVATE void collectAllGarbage();
134 enum SweepToggle { DoNotSweep, DoSweep };
135 bool shouldCollect();
136 void collect(SweepToggle);
137
138 void reportExtraMemoryCost(size_t cost);
139 JS_EXPORT_PRIVATE void reportAbandonedObjectGraph();
140
141 JS_EXPORT_PRIVATE void protect(JSValue);
142 JS_EXPORT_PRIVATE bool unprotect(JSValue); // True when the protect count drops to 0.
143
144 void jettisonDFGCodeBlock(PassOwnPtr<CodeBlock>);
145
146 JS_EXPORT_PRIVATE size_t size();
147 JS_EXPORT_PRIVATE size_t capacity();
148 JS_EXPORT_PRIVATE size_t objectCount();
149 JS_EXPORT_PRIVATE size_t globalObjectCount();
150 JS_EXPORT_PRIVATE size_t protectedObjectCount();
151 JS_EXPORT_PRIVATE size_t protectedGlobalObjectCount();
152 JS_EXPORT_PRIVATE PassOwnPtr<TypeCountSet> protectedObjectTypeCounts();
153 JS_EXPORT_PRIVATE PassOwnPtr<TypeCountSet> objectTypeCounts();
154 void showStatistics();
155
156 void pushTempSortVector(Vector<ValueStringPair, 0, UnsafeVectorOverflow>*);
157 void popTempSortVector(Vector<ValueStringPair, 0, UnsafeVectorOverflow>*);
158
159 HashSet<MarkedArgumentBuffer*>& markListSet() { if (!m_markListSet) m_markListSet = adoptPtr(new HashSet<MarkedArgumentBuffer*>); return *m_markListSet; }
160
161 template<typename Functor> typename Functor::ReturnType forEachProtectedCell(Functor&);
162 template<typename Functor> typename Functor::ReturnType forEachProtectedCell();
163
164 HandleSet* handleSet() { return &m_handleSet; }
165 HandleStack* handleStack() { return &m_handleStack; }
166
167 void canonicalizeCellLivenessData();
168 void getConservativeRegisterRoots(HashSet<JSCell*>& roots);
169
170 double lastGCLength() { return m_lastGCLength; }
171 void increaseLastGCLength(double amount) { m_lastGCLength += amount; }
172
173 JS_EXPORT_PRIVATE void deleteAllCompiledCode();
174
175 void didAllocate(size_t);
176 void didAbandon(size_t);
177
178 bool isPagedOut(double deadline);
179
180 const JITStubRoutineSet& jitStubRoutines() { return m_jitStubRoutines; }
181
182 private:
183 friend class CodeBlock;
184 friend class CopiedBlock;
185 friend class GCAwareJITStubRoutine;
186 friend class HandleSet;
187 friend class JITStubRoutine;
188 friend class LLIntOffsetsExtractor;
189 friend class MarkedSpace;
190 friend class MarkedAllocator;
191 friend class MarkedBlock;
192 friend class CopiedSpace;
193 friend class CopyVisitor;
194 friend class SlotVisitor;
195 friend class SuperRegion;
196 friend class IncrementalSweeper;
197 friend class HeapStatistics;
198 friend class WeakSet;
199 template<typename T> friend void* allocateCell(Heap&);
200 template<typename T> friend void* allocateCell(Heap&, size_t);
201
202 void* allocateWithImmortalStructureDestructor(size_t); // For use with special objects whose Structures never die.
203 void* allocateWithNormalDestructor(size_t); // For use with objects that inherit directly or indirectly from JSDestructibleObject.
204 void* allocateWithoutDestructor(size_t); // For use with objects without destructors.
205
206 static const size_t minExtraCost = 256;
207 static const size_t maxExtraCost = 1024 * 1024;
208
209 class FinalizerOwner : public WeakHandleOwner {
210 virtual void finalize(Handle<Unknown>, void* context);
211 };
212
213 JS_EXPORT_PRIVATE bool isValidAllocation(size_t);
214 JS_EXPORT_PRIVATE void reportExtraMemoryCostSlowCase(size_t);
215
216 void markRoots();
217 void markProtectedObjects(HeapRootVisitor&);
218 void markTempSortVectors(HeapRootVisitor&);
219 void copyBackingStores();
220 void harvestWeakReferences();
221 void finalizeUnconditionalFinalizers();
222 void deleteUnmarkedCompiledCode();
223 void zombifyDeadObjects();
224 void markDeadObjects();
225
226 JSStack& stack();
227 BlockAllocator& blockAllocator();
228
229 const HeapType m_heapType;
230 const size_t m_ramSize;
231 const size_t m_minBytesPerCycle;
232 size_t m_sizeAfterLastCollect;
233
234 size_t m_bytesAllocatedLimit;
235 size_t m_bytesAllocated;
236 size_t m_bytesAbandoned;
237
238 OperationInProgress m_operationInProgress;
239 BlockAllocator m_blockAllocator;
240 MarkedSpace m_objectSpace;
241 CopiedSpace m_storageSpace;
242
243 #if ENABLE(SIMPLE_HEAP_PROFILING)
244 VTableSpectrum m_destroyedTypeCounts;
245 #endif
246
247 ProtectCountSet m_protectedValues;
248 Vector<Vector<ValueStringPair, 0, UnsafeVectorOverflow>* > m_tempSortingVectors;
249 OwnPtr<HashSet<MarkedArgumentBuffer*> > m_markListSet;
250
251 MachineThreads m_machineThreads;
252
253 GCThreadSharedData m_sharedData;
254 SlotVisitor m_slotVisitor;
255 CopyVisitor m_copyVisitor;
256
257 HandleSet m_handleSet;
258 HandleStack m_handleStack;
259 DFGCodeBlocks m_dfgCodeBlocks;
260 JITStubRoutineSet m_jitStubRoutines;
261 FinalizerOwner m_finalizerOwner;
262
263 bool m_isSafeToCollect;
264
265 VM* m_vm;
266 double m_lastGCLength;
267 double m_lastCodeDiscardTime;
268
269 DoublyLinkedList<ExecutableBase> m_compiledCode;
270
271 OwnPtr<GCActivityCallback> m_activityCallback;
272 OwnPtr<IncrementalSweeper> m_sweeper;
273 Vector<MarkedBlock*> m_blockSnapshot;
274 };
275
276 struct MarkedBlockSnapshotFunctor : public MarkedBlock::VoidFunctor {
277 MarkedBlockSnapshotFunctor(Vector<MarkedBlock*>& blocks)
278 : m_index(0)
279 , m_blocks(blocks)
280 {
281 }
282
283 void operator()(MarkedBlock* block) { m_blocks[m_index++] = block; }
284
285 size_t m_index;
286 Vector<MarkedBlock*>& m_blocks;
287 };
288
289 inline bool Heap::shouldCollect()
290 {
291 if (Options::gcMaxHeapSize())
292 return m_bytesAllocated > Options::gcMaxHeapSize() && m_isSafeToCollect && m_operationInProgress == NoOperation;
293 return m_bytesAllocated > m_bytesAllocatedLimit && m_isSafeToCollect && m_operationInProgress == NoOperation;
294 }
295
296 bool Heap::isBusy()
297 {
298 return m_operationInProgress != NoOperation;
299 }
300
301 inline Heap* Heap::heap(const JSCell* cell)
302 {
303 return MarkedBlock::blockFor(cell)->heap();
304 }
305
306 inline Heap* Heap::heap(const JSValue v)
307 {
308 if (!v.isCell())
309 return 0;
310 return heap(v.asCell());
311 }
312
313 inline bool Heap::isLive(const void* cell)
314 {
315 return MarkedBlock::blockFor(cell)->isLiveCell(cell);
316 }
317
318 inline bool Heap::isMarked(const void* cell)
319 {
320 return MarkedBlock::blockFor(cell)->isMarked(cell);
321 }
322
323 inline bool Heap::testAndSetMarked(const void* cell)
324 {
325 return MarkedBlock::blockFor(cell)->testAndSetMarked(cell);
326 }
327
328 inline void Heap::setMarked(const void* cell)
329 {
330 MarkedBlock::blockFor(cell)->setMarked(cell);
331 }
332
333 inline bool Heap::isWriteBarrierEnabled()
334 {
335 #if ENABLE(WRITE_BARRIER_PROFILING)
336 return true;
337 #else
338 return false;
339 #endif
340 }
341
342 inline void Heap::writeBarrier(const JSCell*, JSCell*)
343 {
344 WriteBarrierCounters::countWriteBarrier();
345 }
346
347 inline void Heap::writeBarrier(const JSCell*, JSValue)
348 {
349 WriteBarrierCounters::countWriteBarrier();
350 }
351
352 inline void Heap::reportExtraMemoryCost(size_t cost)
353 {
354 if (cost > minExtraCost)
355 reportExtraMemoryCostSlowCase(cost);
356 }
357
358 template<typename Functor> inline typename Functor::ReturnType Heap::forEachProtectedCell(Functor& functor)
359 {
360 ProtectCountSet::iterator end = m_protectedValues.end();
361 for (ProtectCountSet::iterator it = m_protectedValues.begin(); it != end; ++it)
362 functor(it->key);
363 m_handleSet.forEachStrongHandle(functor, m_protectedValues);
364
365 return functor.returnValue();
366 }
367
368 template<typename Functor> inline typename Functor::ReturnType Heap::forEachProtectedCell()
369 {
370 Functor functor;
371 return forEachProtectedCell(functor);
372 }
373
374 inline void* Heap::allocateWithNormalDestructor(size_t bytes)
375 {
376 ASSERT(isValidAllocation(bytes));
377 return m_objectSpace.allocateWithNormalDestructor(bytes);
378 }
379
380 inline void* Heap::allocateWithImmortalStructureDestructor(size_t bytes)
381 {
382 ASSERT(isValidAllocation(bytes));
383 return m_objectSpace.allocateWithImmortalStructureDestructor(bytes);
384 }
385
386 inline void* Heap::allocateWithoutDestructor(size_t bytes)
387 {
388 ASSERT(isValidAllocation(bytes));
389 return m_objectSpace.allocateWithoutDestructor(bytes);
390 }
391
392 inline CheckedBoolean Heap::tryAllocateStorage(size_t bytes, void** outPtr)
393 {
394 return m_storageSpace.tryAllocate(bytes, outPtr);
395 }
396
397 inline CheckedBoolean Heap::tryReallocateStorage(void** ptr, size_t oldSize, size_t newSize)
398 {
399 return m_storageSpace.tryReallocate(ptr, oldSize, newSize);
400 }
401
402 inline BlockAllocator& Heap::blockAllocator()
403 {
404 return m_blockAllocator;
405 }
406
407 } // namespace JSC
408
409 #endif // Heap_h