]> git.saurik.com Git - apple/javascriptcore.git/blame_incremental - heap/Heap.h
JavaScriptCore-7600.1.4.13.1.tar.gz
[apple/javascriptcore.git] / heap / Heap.h
... / ...
CommitLineData
1/*
2 * Copyright (C) 1999-2000 Harri Porten (porten@kde.org)
3 * Copyright (C) 2001 Peter Kelly (pmk@post.com)
4 * Copyright (C) 2003-2009, 2013-2014 Apple Inc. All rights reserved.
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 *
20 */
21
22#ifndef Heap_h
23#define Heap_h
24
25#include "ArrayBuffer.h"
26#include "BlockAllocator.h"
27#include "CodeBlockSet.h"
28#include "CopyVisitor.h"
29#include "GCIncomingRefCountedSet.h"
30#include "GCThreadSharedData.h"
31#include "HandleSet.h"
32#include "HandleStack.h"
33#include "HeapOperation.h"
34#include "JITStubRoutineSet.h"
35#include "MarkedAllocator.h"
36#include "MarkedBlock.h"
37#include "MarkedBlockSet.h"
38#include "MarkedSpace.h"
39#include "Options.h"
40#include "SlotVisitor.h"
41#include "StructureIDTable.h"
42#include "WeakHandleOwner.h"
43#include "WriteBarrierBuffer.h"
44#include "WriteBarrierSupport.h"
45#include <wtf/HashCountedSet.h>
46#include <wtf/HashSet.h>
47
48namespace JSC {
49
50class CopiedSpace;
51class CodeBlock;
52class ExecutableBase;
53class EdenGCActivityCallback;
54class FullGCActivityCallback;
55class GCActivityCallback;
56class GCAwareJITStubRoutine;
57class GlobalCodeBlock;
58class Heap;
59class HeapRootVisitor;
60class IncrementalSweeper;
61class JITStubRoutine;
62class JSCell;
63class VM;
64class JSStack;
65class JSValue;
66class LiveObjectIterator;
67class LLIntOffsetsExtractor;
68class MarkedArgumentBuffer;
69class WeakGCHandlePool;
70class SlotVisitor;
71
72namespace DFG {
73class Worklist;
74}
75
76static void* const zombifiedBits = reinterpret_cast<void*>(0xdeadbeef);
77
78typedef std::pair<JSValue, WTF::String> ValueStringPair;
79typedef HashCountedSet<JSCell*> ProtectCountSet;
80typedef HashCountedSet<const char*> TypeCountSet;
81
82enum HeapType { SmallHeap, LargeHeap };
83
84class Heap {
85 WTF_MAKE_NONCOPYABLE(Heap);
86public:
87 friend class JIT;
88 friend class DFG::SpeculativeJIT;
89 friend class GCThreadSharedData;
90 static Heap* heap(const JSValue); // 0 for immediate values
91 static Heap* heap(const JSCell*);
92
93 // This constant determines how many blocks we iterate between checks of our
94 // deadline when calling Heap::isPagedOut. Decreasing it will cause us to detect
95 // overstepping our deadline more quickly, while increasing it will cause
96 // our scan to run faster.
97 static const unsigned s_timeCheckResolution = 16;
98
99 static bool isLive(const void*);
100 static bool isMarked(const void*);
101 static bool testAndSetMarked(const void*);
102 static void setMarked(const void*);
103 static bool isRemembered(const void*);
104
105 JS_EXPORT_PRIVATE void addToRememberedSet(const JSCell*);
106 static bool isWriteBarrierEnabled();
107 void writeBarrier(const JSCell*);
108 void writeBarrier(const JSCell*, JSValue);
109 void writeBarrier(const JSCell*, JSCell*);
110
111 WriteBarrierBuffer& writeBarrierBuffer() { return m_writeBarrierBuffer; }
112 void flushWriteBarrierBuffer(JSCell*);
113
114 Heap(VM*, HeapType);
115 ~Heap();
116 JS_EXPORT_PRIVATE void lastChanceToFinalize();
117
118 VM* vm() const { return m_vm; }
119 MarkedSpace& objectSpace() { return m_objectSpace; }
120 MachineThreads& machineThreads() { return m_machineThreads; }
121
122 const SlotVisitor& slotVisitor() const { return m_slotVisitor; }
123
124 JS_EXPORT_PRIVATE GCActivityCallback* fullActivityCallback();
125 JS_EXPORT_PRIVATE GCActivityCallback* edenActivityCallback();
126 JS_EXPORT_PRIVATE void setFullActivityCallback(PassRefPtr<FullGCActivityCallback>);
127 JS_EXPORT_PRIVATE void setEdenActivityCallback(PassRefPtr<EdenGCActivityCallback>);
128 JS_EXPORT_PRIVATE void setGarbageCollectionTimerEnabled(bool);
129
130 JS_EXPORT_PRIVATE IncrementalSweeper* sweeper();
131 JS_EXPORT_PRIVATE void setIncrementalSweeper(PassOwnPtr<IncrementalSweeper>);
132
133 // true if collection is in progress
134 bool isCollecting();
135 HeapOperation operationInProgress() { return m_operationInProgress; }
136 // true if an allocation or collection is in progress
137 bool isBusy();
138
139 MarkedAllocator& allocatorForObjectWithoutDestructor(size_t bytes) { return m_objectSpace.allocatorFor(bytes); }
140 MarkedAllocator& allocatorForObjectWithNormalDestructor(size_t bytes) { return m_objectSpace.normalDestructorAllocatorFor(bytes); }
141 MarkedAllocator& allocatorForObjectWithImmortalStructureDestructor(size_t bytes) { return m_objectSpace.immortalStructureDestructorAllocatorFor(bytes); }
142 CopiedAllocator& storageAllocator() { return m_storageSpace.allocator(); }
143 CheckedBoolean tryAllocateStorage(JSCell* intendedOwner, size_t, void**);
144 CheckedBoolean tryReallocateStorage(JSCell* intendedOwner, void**, size_t, size_t);
145 void ascribeOwner(JSCell* intendedOwner, void*);
146
147 typedef void (*Finalizer)(JSCell*);
148 JS_EXPORT_PRIVATE void addFinalizer(JSCell*, Finalizer);
149 void addCompiledCode(ExecutableBase*);
150
151 void notifyIsSafeToCollect() { m_isSafeToCollect = true; }
152 bool isSafeToCollect() const { return m_isSafeToCollect; }
153
154 JS_EXPORT_PRIVATE void collectAllGarbage();
155 bool shouldCollect();
156 JS_EXPORT_PRIVATE void collect(HeapOperation collectionType = AnyCollection);
157 bool collectIfNecessaryOrDefer(); // Returns true if it did collect.
158
159 void reportExtraMemoryCost(size_t cost);
160 JS_EXPORT_PRIVATE void reportAbandonedObjectGraph();
161
162 JS_EXPORT_PRIVATE void protect(JSValue);
163 JS_EXPORT_PRIVATE bool unprotect(JSValue); // True when the protect count drops to 0.
164
165 size_t extraSize(); // extra memory usage outside of pages allocated by the heap
166 JS_EXPORT_PRIVATE size_t size();
167 JS_EXPORT_PRIVATE size_t capacity();
168 JS_EXPORT_PRIVATE size_t objectCount();
169 JS_EXPORT_PRIVATE size_t globalObjectCount();
170 JS_EXPORT_PRIVATE size_t protectedObjectCount();
171 JS_EXPORT_PRIVATE size_t protectedGlobalObjectCount();
172 JS_EXPORT_PRIVATE PassOwnPtr<TypeCountSet> protectedObjectTypeCounts();
173 JS_EXPORT_PRIVATE PassOwnPtr<TypeCountSet> objectTypeCounts();
174 void showStatistics();
175
176 void pushTempSortVector(Vector<ValueStringPair, 0, UnsafeVectorOverflow>*);
177 void popTempSortVector(Vector<ValueStringPair, 0, UnsafeVectorOverflow>*);
178
179 HashSet<MarkedArgumentBuffer*>& markListSet();
180
181 template<typename Functor> typename Functor::ReturnType forEachProtectedCell(Functor&);
182 template<typename Functor> typename Functor::ReturnType forEachProtectedCell();
183 template<typename Functor> void forEachCodeBlock(Functor&);
184
185 HandleSet* handleSet() { return &m_handleSet; }
186 HandleStack* handleStack() { return &m_handleStack; }
187
188 void willStartIterating();
189 void didFinishIterating();
190 void getConservativeRegisterRoots(HashSet<JSCell*>& roots);
191
192 double lastFullGCLength() const { return m_lastFullGCLength; }
193 double lastEdenGCLength() const { return m_lastEdenGCLength; }
194 void increaseLastFullGCLength(double amount) { m_lastFullGCLength += amount; }
195
196 size_t sizeBeforeLastEdenCollection() const { return m_sizeBeforeLastEdenCollect; }
197 size_t sizeAfterLastEdenCollection() const { return m_sizeAfterLastEdenCollect; }
198 size_t sizeBeforeLastFullCollection() const { return m_sizeBeforeLastFullCollect; }
199 size_t sizeAfterLastFullCollection() const { return m_sizeAfterLastFullCollect; }
200
201 JS_EXPORT_PRIVATE void deleteAllCompiledCode();
202 void deleteAllUnlinkedFunctionCode();
203
204 void didAllocate(size_t);
205 void didAbandon(size_t);
206
207 bool isPagedOut(double deadline);
208
209 const JITStubRoutineSet& jitStubRoutines() { return m_jitStubRoutines; }
210
211 void addReference(JSCell*, ArrayBuffer*);
212
213 bool isDeferred() const { return !!m_deferralDepth || Options::disableGC(); }
214
215 BlockAllocator& blockAllocator();
216 StructureIDTable& structureIDTable() { return m_structureIDTable; }
217
218#if USE(CF)
219 template<typename T> void releaseSoon(RetainPtr<T>&&);
220#endif
221
222 void removeCodeBlock(CodeBlock* cb) { m_codeBlocks.remove(cb); }
223
224 static bool isZombified(JSCell* cell) { return *(void**)cell == zombifiedBits; }
225
226private:
227 friend class CodeBlock;
228 friend class CopiedBlock;
229 friend class DeferGC;
230 friend class DeferGCForAWhile;
231 friend class DelayedReleaseScope;
232 friend class GCAwareJITStubRoutine;
233 friend class GCLogging;
234 friend class HandleSet;
235 friend class JITStubRoutine;
236 friend class LLIntOffsetsExtractor;
237 friend class MarkedSpace;
238 friend class MarkedAllocator;
239 friend class MarkedBlock;
240 friend class CopiedSpace;
241 friend class CopyVisitor;
242 friend class RecursiveAllocationScope;
243 friend class SlotVisitor;
244 friend class SuperRegion;
245 friend class IncrementalSweeper;
246 friend class HeapStatistics;
247 friend class VM;
248 friend class WeakSet;
249 template<typename T> friend void* allocateCell(Heap&);
250 template<typename T> friend void* allocateCell(Heap&, size_t);
251
252 void* allocateWithImmortalStructureDestructor(size_t); // For use with special objects whose Structures never die.
253 void* allocateWithNormalDestructor(size_t); // For use with objects that inherit directly or indirectly from JSDestructibleObject.
254 void* allocateWithoutDestructor(size_t); // For use with objects without destructors.
255
256 static const size_t minExtraCost = 256;
257 static const size_t maxExtraCost = 1024 * 1024;
258
259 class FinalizerOwner : public WeakHandleOwner {
260 virtual void finalize(Handle<Unknown>, void* context) override;
261 };
262
263 JS_EXPORT_PRIVATE bool isValidAllocation(size_t);
264 JS_EXPORT_PRIVATE void reportExtraMemoryCostSlowCase(size_t);
265
266 void suspendCompilerThreads();
267 void willStartCollection(HeapOperation collectionType);
268 void deleteOldCode(double gcStartTime);
269 void flushOldStructureIDTables();
270 void flushWriteBarrierBuffer();
271 void stopAllocation();
272
273 void markRoots(double gcStartTime);
274 void gatherStackRoots(ConservativeRoots&, void** dummy);
275 void gatherJSStackRoots(ConservativeRoots&);
276 void gatherScratchBufferRoots(ConservativeRoots&);
277 void clearLivenessData();
278 void visitExternalRememberedSet();
279 void visitSmallStrings();
280 void visitConservativeRoots(ConservativeRoots&);
281 void visitCompilerWorklistWeakReferences();
282 void removeDeadCompilerWorklistEntries();
283 void visitProtectedObjects(HeapRootVisitor&);
284 void visitTempSortVectors(HeapRootVisitor&);
285 void visitArgumentBuffers(HeapRootVisitor&);
286 void visitException(HeapRootVisitor&);
287 void visitStrongHandles(HeapRootVisitor&);
288 void visitHandleStack(HeapRootVisitor&);
289 void traceCodeBlocksAndJITStubRoutines();
290 void converge();
291 void visitWeakHandles(HeapRootVisitor&);
292 void clearRememberedSet(Vector<const JSCell*>&);
293 void updateObjectCounts(double gcStartTime);
294 void resetVisitors();
295
296 void reapWeakHandles();
297 void sweepArrayBuffers();
298 void snapshotMarkedSpace();
299 void deleteSourceProviderCaches();
300 void notifyIncrementalSweeper();
301 void rememberCurrentlyExecutingCodeBlocks();
302 void resetAllocators();
303 void copyBackingStores();
304 void harvestWeakReferences();
305 void finalizeUnconditionalFinalizers();
306 void clearUnmarkedExecutables();
307 void deleteUnmarkedCompiledCode();
308 void updateAllocationLimits();
309 void didFinishCollection(double gcStartTime);
310 void resumeCompilerThreads();
311 void zombifyDeadObjects();
312 void markDeadObjects();
313
314 bool shouldDoFullCollection(HeapOperation requestedCollectionType) const;
315 size_t sizeAfterCollect();
316
317 JSStack& stack();
318
319 void incrementDeferralDepth();
320 void decrementDeferralDepth();
321 void decrementDeferralDepthAndGCIfNeeded();
322
323 const HeapType m_heapType;
324 const size_t m_ramSize;
325 const size_t m_minBytesPerCycle;
326 size_t m_sizeAfterLastCollect;
327 size_t m_sizeAfterLastFullCollect;
328 size_t m_sizeBeforeLastFullCollect;
329 size_t m_sizeAfterLastEdenCollect;
330 size_t m_sizeBeforeLastEdenCollect;
331
332 size_t m_bytesAllocatedThisCycle;
333 size_t m_bytesAbandonedSinceLastFullCollect;
334 size_t m_maxEdenSize;
335 size_t m_maxHeapSize;
336 bool m_shouldDoFullCollection;
337 size_t m_totalBytesVisited;
338 size_t m_totalBytesCopied;
339
340 HeapOperation m_operationInProgress;
341 BlockAllocator m_blockAllocator;
342 StructureIDTable m_structureIDTable;
343 MarkedSpace m_objectSpace;
344 CopiedSpace m_storageSpace;
345 GCIncomingRefCountedSet<ArrayBuffer> m_arrayBuffers;
346 size_t m_extraMemoryUsage;
347
348 HashSet<const JSCell*> m_copyingRememberedSet;
349
350 ProtectCountSet m_protectedValues;
351 Vector<Vector<ValueStringPair, 0, UnsafeVectorOverflow>*> m_tempSortingVectors;
352 OwnPtr<HashSet<MarkedArgumentBuffer*>> m_markListSet;
353
354 MachineThreads m_machineThreads;
355
356 GCThreadSharedData m_sharedData;
357 SlotVisitor m_slotVisitor;
358 CopyVisitor m_copyVisitor;
359
360 HandleSet m_handleSet;
361 HandleStack m_handleStack;
362 CodeBlockSet m_codeBlocks;
363 JITStubRoutineSet m_jitStubRoutines;
364 FinalizerOwner m_finalizerOwner;
365
366 bool m_isSafeToCollect;
367
368 WriteBarrierBuffer m_writeBarrierBuffer;
369
370 VM* m_vm;
371 double m_lastFullGCLength;
372 double m_lastEdenGCLength;
373 double m_lastCodeDiscardTime;
374
375 DoublyLinkedList<ExecutableBase> m_compiledCode;
376
377 RefPtr<GCActivityCallback> m_fullActivityCallback;
378 RefPtr<GCActivityCallback> m_edenActivityCallback;
379 OwnPtr<IncrementalSweeper> m_sweeper;
380 Vector<MarkedBlock*> m_blockSnapshot;
381
382 unsigned m_deferralDepth;
383 Vector<DFG::Worklist*> m_suspendedCompilerWorklists;
384};
385
386} // namespace JSC
387
388#endif // Heap_h