2 * Copyright (C) 1999-2000 Harri Porten (porten@kde.org)
3 * Copyright (C) 2001 Peter Kelly (pmk@post.com)
4 * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved.
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
25 #include "BlockAllocator.h"
26 #include "CopyVisitor.h"
27 #include "DFGCodeBlocks.h"
28 #include "GCThreadSharedData.h"
29 #include "HandleSet.h"
30 #include "HandleStack.h"
31 #include "JITStubRoutineSet.h"
32 #include "MarkedAllocator.h"
33 #include "MarkedBlock.h"
34 #include "MarkedBlockSet.h"
35 #include "MarkedSpace.h"
37 #include "SlotVisitor.h"
38 #include "WeakHandleOwner.h"
39 #include "WriteBarrierSupport.h"
40 #include <wtf/HashCountedSet.h>
41 #include <wtf/HashSet.h>
43 #define COLLECT_ON_EVERY_ALLOCATION 0
50 class GCActivityCallback
;
51 class GCAwareJITStubRoutine
;
52 class GlobalCodeBlock
;
54 class HeapRootVisitor
;
55 class IncrementalSweeper
;
61 class LiveObjectIterator
;
62 class LLIntOffsetsExtractor
;
63 class MarkedArgumentBuffer
;
64 class WeakGCHandlePool
;
67 typedef std::pair
<JSValue
, WTF::String
> ValueStringPair
;
68 typedef HashCountedSet
<JSCell
*> ProtectCountSet
;
69 typedef HashCountedSet
<const char*> TypeCountSet
;
71 enum OperationInProgress
{ NoOperation
, Allocation
, Collection
};
73 enum HeapType
{ SmallHeap
, LargeHeap
};
76 WTF_MAKE_NONCOPYABLE(Heap
);
79 friend class GCThreadSharedData
;
80 static Heap
* heap(const JSValue
); // 0 for immediate values
81 static Heap
* heap(const JSCell
*);
83 // This constant determines how many blocks we iterate between checks of our
84 // deadline when calling Heap::isPagedOut. Decreasing it will cause us to detect
85 // overstepping our deadline more quickly, while increasing it will cause
86 // our scan to run faster.
87 static const unsigned s_timeCheckResolution
= 16;
89 static bool isLive(const void*);
90 static bool isMarked(const void*);
91 static bool testAndSetMarked(const void*);
92 static void setMarked(const void*);
94 static bool isWriteBarrierEnabled();
95 static void writeBarrier(const JSCell
*, JSValue
);
96 static void writeBarrier(const JSCell
*, JSCell
*);
97 static uint8_t* addressOfCardFor(JSCell
*);
101 JS_EXPORT_PRIVATE
void lastChanceToFinalize();
103 VM
* vm() const { return m_vm
; }
104 MarkedSpace
& objectSpace() { return m_objectSpace
; }
105 MachineThreads
& machineThreads() { return m_machineThreads
; }
107 JS_EXPORT_PRIVATE GCActivityCallback
* activityCallback();
108 JS_EXPORT_PRIVATE
void setActivityCallback(PassOwnPtr
<GCActivityCallback
>);
109 JS_EXPORT_PRIVATE
void setGarbageCollectionTimerEnabled(bool);
111 JS_EXPORT_PRIVATE IncrementalSweeper
* sweeper();
113 JS_EXPORT_PRIVATE
void setIncrementalSweeper(PassOwnPtr
<IncrementalSweeper
>);
114 #endif // PLATFORM(IOS)
116 // true if an allocation or collection is in progress
117 inline bool isBusy();
119 MarkedAllocator
& allocatorForObjectWithoutDestructor(size_t bytes
) { return m_objectSpace
.allocatorFor(bytes
); }
120 MarkedAllocator
& allocatorForObjectWithNormalDestructor(size_t bytes
) { return m_objectSpace
.normalDestructorAllocatorFor(bytes
); }
121 MarkedAllocator
& allocatorForObjectWithImmortalStructureDestructor(size_t bytes
) { return m_objectSpace
.immortalStructureDestructorAllocatorFor(bytes
); }
122 CopiedAllocator
& storageAllocator() { return m_storageSpace
.allocator(); }
123 CheckedBoolean
tryAllocateStorage(size_t, void**);
124 CheckedBoolean
tryReallocateStorage(void**, size_t, size_t);
126 typedef void (*Finalizer
)(JSCell
*);
127 JS_EXPORT_PRIVATE
void addFinalizer(JSCell
*, Finalizer
);
128 void addCompiledCode(ExecutableBase
*);
130 void notifyIsSafeToCollect() { m_isSafeToCollect
= true; }
131 bool isSafeToCollect() const { return m_isSafeToCollect
; }
133 JS_EXPORT_PRIVATE
void collectAllGarbage();
134 enum SweepToggle
{ DoNotSweep
, DoSweep
};
135 bool shouldCollect();
136 void collect(SweepToggle
);
138 void reportExtraMemoryCost(size_t cost
);
139 JS_EXPORT_PRIVATE
void reportAbandonedObjectGraph();
141 JS_EXPORT_PRIVATE
void protect(JSValue
);
142 JS_EXPORT_PRIVATE
bool unprotect(JSValue
); // True when the protect count drops to 0.
144 void jettisonDFGCodeBlock(PassOwnPtr
<CodeBlock
>);
146 JS_EXPORT_PRIVATE
size_t size();
147 JS_EXPORT_PRIVATE
size_t capacity();
148 JS_EXPORT_PRIVATE
size_t objectCount();
149 JS_EXPORT_PRIVATE
size_t globalObjectCount();
150 JS_EXPORT_PRIVATE
size_t protectedObjectCount();
151 JS_EXPORT_PRIVATE
size_t protectedGlobalObjectCount();
152 JS_EXPORT_PRIVATE PassOwnPtr
<TypeCountSet
> protectedObjectTypeCounts();
153 JS_EXPORT_PRIVATE PassOwnPtr
<TypeCountSet
> objectTypeCounts();
154 void showStatistics();
156 void pushTempSortVector(Vector
<ValueStringPair
, 0, UnsafeVectorOverflow
>*);
157 void popTempSortVector(Vector
<ValueStringPair
, 0, UnsafeVectorOverflow
>*);
159 HashSet
<MarkedArgumentBuffer
*>& markListSet() { if (!m_markListSet
) m_markListSet
= adoptPtr(new HashSet
<MarkedArgumentBuffer
*>); return *m_markListSet
; }
161 template<typename Functor
> typename
Functor::ReturnType
forEachProtectedCell(Functor
&);
162 template<typename Functor
> typename
Functor::ReturnType
forEachProtectedCell();
164 HandleSet
* handleSet() { return &m_handleSet
; }
165 HandleStack
* handleStack() { return &m_handleStack
; }
167 void canonicalizeCellLivenessData();
168 void getConservativeRegisterRoots(HashSet
<JSCell
*>& roots
);
170 double lastGCLength() { return m_lastGCLength
; }
171 void increaseLastGCLength(double amount
) { m_lastGCLength
+= amount
; }
173 JS_EXPORT_PRIVATE
void deleteAllCompiledCode();
175 void didAllocate(size_t);
176 void didAbandon(size_t);
178 bool isPagedOut(double deadline
);
180 const JITStubRoutineSet
& jitStubRoutines() { return m_jitStubRoutines
; }
183 friend class CodeBlock
;
184 friend class CopiedBlock
;
185 friend class GCAwareJITStubRoutine
;
186 friend class HandleSet
;
187 friend class JITStubRoutine
;
188 friend class LLIntOffsetsExtractor
;
189 friend class MarkedSpace
;
190 friend class MarkedAllocator
;
191 friend class MarkedBlock
;
192 friend class CopiedSpace
;
193 friend class CopyVisitor
;
194 friend class SlotVisitor
;
195 friend class SuperRegion
;
196 friend class IncrementalSweeper
;
197 friend class HeapStatistics
;
198 friend class WeakSet
;
199 template<typename T
> friend void* allocateCell(Heap
&);
200 template<typename T
> friend void* allocateCell(Heap
&, size_t);
202 void* allocateWithImmortalStructureDestructor(size_t); // For use with special objects whose Structures never die.
203 void* allocateWithNormalDestructor(size_t); // For use with objects that inherit directly or indirectly from JSDestructibleObject.
204 void* allocateWithoutDestructor(size_t); // For use with objects without destructors.
206 static const size_t minExtraCost
= 256;
207 static const size_t maxExtraCost
= 1024 * 1024;
209 class FinalizerOwner
: public WeakHandleOwner
{
210 virtual void finalize(Handle
<Unknown
>, void* context
);
213 JS_EXPORT_PRIVATE
bool isValidAllocation(size_t);
214 JS_EXPORT_PRIVATE
void reportExtraMemoryCostSlowCase(size_t);
217 void markProtectedObjects(HeapRootVisitor
&);
218 void markTempSortVectors(HeapRootVisitor
&);
219 void copyBackingStores();
220 void harvestWeakReferences();
221 void finalizeUnconditionalFinalizers();
222 void deleteUnmarkedCompiledCode();
223 void zombifyDeadObjects();
224 void markDeadObjects();
227 BlockAllocator
& blockAllocator();
229 const HeapType m_heapType
;
230 const size_t m_ramSize
;
231 const size_t m_minBytesPerCycle
;
232 size_t m_sizeAfterLastCollect
;
234 size_t m_bytesAllocatedLimit
;
235 size_t m_bytesAllocated
;
236 size_t m_bytesAbandoned
;
238 OperationInProgress m_operationInProgress
;
239 BlockAllocator m_blockAllocator
;
240 MarkedSpace m_objectSpace
;
241 CopiedSpace m_storageSpace
;
243 #if ENABLE(SIMPLE_HEAP_PROFILING)
244 VTableSpectrum m_destroyedTypeCounts
;
247 ProtectCountSet m_protectedValues
;
248 Vector
<Vector
<ValueStringPair
, 0, UnsafeVectorOverflow
>* > m_tempSortingVectors
;
249 OwnPtr
<HashSet
<MarkedArgumentBuffer
*> > m_markListSet
;
251 MachineThreads m_machineThreads
;
253 GCThreadSharedData m_sharedData
;
254 SlotVisitor m_slotVisitor
;
255 CopyVisitor m_copyVisitor
;
257 HandleSet m_handleSet
;
258 HandleStack m_handleStack
;
259 DFGCodeBlocks m_dfgCodeBlocks
;
260 JITStubRoutineSet m_jitStubRoutines
;
261 FinalizerOwner m_finalizerOwner
;
263 bool m_isSafeToCollect
;
266 double m_lastGCLength
;
267 double m_lastCodeDiscardTime
;
269 DoublyLinkedList
<ExecutableBase
> m_compiledCode
;
271 OwnPtr
<GCActivityCallback
> m_activityCallback
;
272 OwnPtr
<IncrementalSweeper
> m_sweeper
;
273 Vector
<MarkedBlock
*> m_blockSnapshot
;
276 struct MarkedBlockSnapshotFunctor
: public MarkedBlock::VoidFunctor
{
277 MarkedBlockSnapshotFunctor(Vector
<MarkedBlock
*>& blocks
)
283 void operator()(MarkedBlock
* block
) { m_blocks
[m_index
++] = block
; }
286 Vector
<MarkedBlock
*>& m_blocks
;
289 inline bool Heap::shouldCollect()
291 if (Options::gcMaxHeapSize())
292 return m_bytesAllocated
> Options::gcMaxHeapSize() && m_isSafeToCollect
&& m_operationInProgress
== NoOperation
;
293 return m_bytesAllocated
> m_bytesAllocatedLimit
&& m_isSafeToCollect
&& m_operationInProgress
== NoOperation
;
298 return m_operationInProgress
!= NoOperation
;
301 inline Heap
* Heap::heap(const JSCell
* cell
)
303 return MarkedBlock::blockFor(cell
)->heap();
306 inline Heap
* Heap::heap(const JSValue v
)
310 return heap(v
.asCell());
313 inline bool Heap::isLive(const void* cell
)
315 return MarkedBlock::blockFor(cell
)->isLiveCell(cell
);
318 inline bool Heap::isMarked(const void* cell
)
320 return MarkedBlock::blockFor(cell
)->isMarked(cell
);
323 inline bool Heap::testAndSetMarked(const void* cell
)
325 return MarkedBlock::blockFor(cell
)->testAndSetMarked(cell
);
328 inline void Heap::setMarked(const void* cell
)
330 MarkedBlock::blockFor(cell
)->setMarked(cell
);
333 inline bool Heap::isWriteBarrierEnabled()
335 #if ENABLE(WRITE_BARRIER_PROFILING)
342 inline void Heap::writeBarrier(const JSCell
*, JSCell
*)
344 WriteBarrierCounters::countWriteBarrier();
347 inline void Heap::writeBarrier(const JSCell
*, JSValue
)
349 WriteBarrierCounters::countWriteBarrier();
352 inline void Heap::reportExtraMemoryCost(size_t cost
)
354 if (cost
> minExtraCost
)
355 reportExtraMemoryCostSlowCase(cost
);
358 template<typename Functor
> inline typename
Functor::ReturnType
Heap::forEachProtectedCell(Functor
& functor
)
360 ProtectCountSet::iterator end
= m_protectedValues
.end();
361 for (ProtectCountSet::iterator it
= m_protectedValues
.begin(); it
!= end
; ++it
)
363 m_handleSet
.forEachStrongHandle(functor
, m_protectedValues
);
365 return functor
.returnValue();
368 template<typename Functor
> inline typename
Functor::ReturnType
Heap::forEachProtectedCell()
371 return forEachProtectedCell(functor
);
374 inline void* Heap::allocateWithNormalDestructor(size_t bytes
)
376 ASSERT(isValidAllocation(bytes
));
377 return m_objectSpace
.allocateWithNormalDestructor(bytes
);
380 inline void* Heap::allocateWithImmortalStructureDestructor(size_t bytes
)
382 ASSERT(isValidAllocation(bytes
));
383 return m_objectSpace
.allocateWithImmortalStructureDestructor(bytes
);
386 inline void* Heap::allocateWithoutDestructor(size_t bytes
)
388 ASSERT(isValidAllocation(bytes
));
389 return m_objectSpace
.allocateWithoutDestructor(bytes
);
392 inline CheckedBoolean
Heap::tryAllocateStorage(size_t bytes
, void** outPtr
)
394 return m_storageSpace
.tryAllocate(bytes
, outPtr
);
397 inline CheckedBoolean
Heap::tryReallocateStorage(void** ptr
, size_t oldSize
, size_t newSize
)
399 return m_storageSpace
.tryReallocate(ptr
, oldSize
, newSize
);
402 inline BlockAllocator
& Heap::blockAllocator()
404 return m_blockAllocator
;