]> git.saurik.com Git - apple/javascriptcore.git/blob - heap/MarkedSpace.cpp
JavaScriptCore-1218.34.tar.gz
[apple/javascriptcore.git] / heap / MarkedSpace.cpp
1 /*
2 * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved.
3 * Copyright (C) 2007 Eric Seidel <eric@webkit.org>
4 *
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Lesser General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
9 *
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Lesser General Public License for more details.
14 *
15 * You should have received a copy of the GNU Lesser General Public
16 * License along with this library; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 *
19 */
20
21 #include "config.h"
22 #include "MarkedSpace.h"
23
24 #include "IncrementalSweeper.h"
25 #include "JSGlobalObject.h"
26 #include "JSLock.h"
27 #include "JSObject.h"
28
29
30 namespace JSC {
31
32 class Structure;
33
34 class Free {
35 public:
36 typedef MarkedBlock* ReturnType;
37
38 enum FreeMode { FreeOrShrink, FreeAll };
39
40 Free(FreeMode, MarkedSpace*);
41 void operator()(MarkedBlock*);
42 ReturnType returnValue();
43
44 private:
45 FreeMode m_freeMode;
46 MarkedSpace* m_markedSpace;
47 DoublyLinkedList<MarkedBlock> m_blocks;
48 };
49
50 inline Free::Free(FreeMode freeMode, MarkedSpace* newSpace)
51 : m_freeMode(freeMode)
52 , m_markedSpace(newSpace)
53 {
54 }
55
56 inline void Free::operator()(MarkedBlock* block)
57 {
58 if (m_freeMode == FreeOrShrink)
59 m_markedSpace->freeOrShrinkBlock(block);
60 else
61 m_markedSpace->freeBlock(block);
62 }
63
64 inline Free::ReturnType Free::returnValue()
65 {
66 return m_blocks.head();
67 }
68
69 struct VisitWeakSet : MarkedBlock::VoidFunctor {
70 VisitWeakSet(HeapRootVisitor& heapRootVisitor) : m_heapRootVisitor(heapRootVisitor) { }
71 void operator()(MarkedBlock* block) { block->visitWeakSet(m_heapRootVisitor); }
72 private:
73 HeapRootVisitor& m_heapRootVisitor;
74 };
75
76 struct ReapWeakSet : MarkedBlock::VoidFunctor {
77 void operator()(MarkedBlock* block) { block->reapWeakSet(); }
78 };
79
80 MarkedSpace::MarkedSpace(Heap* heap)
81 : m_heap(heap)
82 {
83 for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
84 allocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::None);
85 normalDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::Normal);
86 immortalStructureDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::ImmortalStructure);
87 }
88
89 for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
90 allocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::None);
91 normalDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::Normal);
92 immortalStructureDestructorAllocatorFor(cellSize).init(heap, this, cellSize, MarkedBlock::ImmortalStructure);
93 }
94
95 m_normalSpace.largeAllocator.init(heap, this, 0, MarkedBlock::None);
96 m_normalDestructorSpace.largeAllocator.init(heap, this, 0, MarkedBlock::Normal);
97 m_immortalStructureDestructorSpace.largeAllocator.init(heap, this, 0, MarkedBlock::ImmortalStructure);
98 }
99
100 MarkedSpace::~MarkedSpace()
101 {
102 Free free(Free::FreeAll, this);
103 forEachBlock(free);
104 }
105
106 struct LastChanceToFinalize : MarkedBlock::VoidFunctor {
107 void operator()(MarkedBlock* block) { block->lastChanceToFinalize(); }
108 };
109
110 void MarkedSpace::lastChanceToFinalize()
111 {
112 canonicalizeCellLivenessData();
113 forEachBlock<LastChanceToFinalize>();
114 }
115
116 void MarkedSpace::sweep()
117 {
118 m_heap->sweeper()->willFinishSweeping();
119 forEachBlock<Sweep>();
120 }
121
122 void MarkedSpace::resetAllocators()
123 {
124 for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
125 allocatorFor(cellSize).reset();
126 normalDestructorAllocatorFor(cellSize).reset();
127 immortalStructureDestructorAllocatorFor(cellSize).reset();
128 }
129
130 for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
131 allocatorFor(cellSize).reset();
132 normalDestructorAllocatorFor(cellSize).reset();
133 immortalStructureDestructorAllocatorFor(cellSize).reset();
134 }
135
136 m_normalSpace.largeAllocator.reset();
137 m_normalDestructorSpace.largeAllocator.reset();
138 m_immortalStructureDestructorSpace.largeAllocator.reset();
139 }
140
141 void MarkedSpace::visitWeakSets(HeapRootVisitor& heapRootVisitor)
142 {
143 VisitWeakSet visitWeakSet(heapRootVisitor);
144 forEachBlock(visitWeakSet);
145 }
146
147 void MarkedSpace::reapWeakSets()
148 {
149 forEachBlock<ReapWeakSet>();
150 }
151
152 void MarkedSpace::canonicalizeCellLivenessData()
153 {
154 for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
155 allocatorFor(cellSize).canonicalizeCellLivenessData();
156 normalDestructorAllocatorFor(cellSize).canonicalizeCellLivenessData();
157 immortalStructureDestructorAllocatorFor(cellSize).canonicalizeCellLivenessData();
158 }
159
160 for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
161 allocatorFor(cellSize).canonicalizeCellLivenessData();
162 normalDestructorAllocatorFor(cellSize).canonicalizeCellLivenessData();
163 immortalStructureDestructorAllocatorFor(cellSize).canonicalizeCellLivenessData();
164 }
165
166 m_normalSpace.largeAllocator.canonicalizeCellLivenessData();
167 m_normalDestructorSpace.largeAllocator.canonicalizeCellLivenessData();
168 m_immortalStructureDestructorSpace.largeAllocator.canonicalizeCellLivenessData();
169 }
170
171 bool MarkedSpace::isPagedOut(double deadline)
172 {
173 for (size_t cellSize = preciseStep; cellSize <= preciseCutoff; cellSize += preciseStep) {
174 if (allocatorFor(cellSize).isPagedOut(deadline)
175 || normalDestructorAllocatorFor(cellSize).isPagedOut(deadline)
176 || immortalStructureDestructorAllocatorFor(cellSize).isPagedOut(deadline))
177 return true;
178 }
179
180 for (size_t cellSize = impreciseStep; cellSize <= impreciseCutoff; cellSize += impreciseStep) {
181 if (allocatorFor(cellSize).isPagedOut(deadline)
182 || normalDestructorAllocatorFor(cellSize).isPagedOut(deadline)
183 || immortalStructureDestructorAllocatorFor(cellSize).isPagedOut(deadline))
184 return true;
185 }
186
187 if (m_normalSpace.largeAllocator.isPagedOut(deadline)
188 || m_normalDestructorSpace.largeAllocator.isPagedOut(deadline)
189 || m_immortalStructureDestructorSpace.largeAllocator.isPagedOut(deadline))
190 return true;
191
192 return false;
193 }
194
195 void MarkedSpace::freeBlock(MarkedBlock* block)
196 {
197 block->allocator()->removeBlock(block);
198 m_blocks.remove(block);
199 if (block->capacity() == MarkedBlock::blockSize) {
200 m_heap->blockAllocator().deallocate(MarkedBlock::destroy(block));
201 return;
202 }
203 m_heap->blockAllocator().deallocateCustomSize(MarkedBlock::destroy(block));
204 }
205
206 void MarkedSpace::freeOrShrinkBlock(MarkedBlock* block)
207 {
208 if (!block->isEmpty()) {
209 block->shrink();
210 return;
211 }
212
213 freeBlock(block);
214 }
215
216 struct Shrink : MarkedBlock::VoidFunctor {
217 void operator()(MarkedBlock* block) { block->shrink(); }
218 };
219
220 void MarkedSpace::shrink()
221 {
222 Free freeOrShrink(Free::FreeOrShrink, this);
223 forEachBlock(freeOrShrink);
224 }
225
226 } // namespace JSC