]> git.saurik.com Git - apple/javascriptcore.git/blob - heap/BlockAllocator.h
JavaScriptCore-1218.33.tar.gz
[apple/javascriptcore.git] / heap / BlockAllocator.h
1 /*
2 * Copyright (C) 2012 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15 * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23 * THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #ifndef BlockAllocator_h
27 #define BlockAllocator_h
28
29 #include "HeapBlock.h"
30 #include "Region.h"
31 #include <wtf/DoublyLinkedList.h>
32 #include <wtf/Forward.h>
33 #include <wtf/PageAllocationAligned.h>
34 #include <wtf/TCSpinLock.h>
35 #include <wtf/Threading.h>
36
37 #if PLATFORM(IOS)
38 #include "GCActivityCallback.h"
39 #endif
40
41 namespace JSC {
42
43 class BlockAllocator;
44 class CopiedBlock;
45 class CopyWorkListSegment;
46 class HandleBlock;
47 class VM;
48 class MarkStackSegment;
49 class MarkedBlock;
50 class WeakBlock;
51
52 // Simple allocator to reduce VM cost by holding onto blocks of memory for
53 // short periods of time and then freeing them on a secondary thread.
54
55 class BlockAllocator {
56 public:
57 BlockAllocator();
58 ~BlockAllocator();
59
60 template <typename T> DeadBlock* allocate();
61 DeadBlock* allocateCustomSize(size_t blockSize, size_t blockAlignment);
62 template <typename T> void deallocate(T*);
63 template <typename T> void deallocateCustomSize(T*);
64
65 private:
66 void waitForRelativeTimeWhileHoldingLock(double relative);
67 void waitForRelativeTime(double relative);
68
69 void blockFreeingThreadMain();
70 static void blockFreeingThreadStartFunc(void* heap);
71
72 struct RegionSet {
73 RegionSet(size_t blockSize)
74 : m_numberOfPartialRegions(0)
75 , m_blockSize(blockSize)
76 {
77 }
78
79 bool isEmpty() const
80 {
81 return m_fullRegions.isEmpty() && m_partialRegions.isEmpty();
82 }
83
84 DoublyLinkedList<Region> m_fullRegions;
85 DoublyLinkedList<Region> m_partialRegions;
86 size_t m_numberOfPartialRegions;
87 size_t m_blockSize;
88 };
89
90 DeadBlock* tryAllocateFromRegion(RegionSet&, DoublyLinkedList<Region>&, size_t&);
91
92 bool allRegionSetsAreEmpty() const;
93 void releaseFreeRegions();
94
95 template <typename T> RegionSet& regionSetFor();
96
97 SuperRegion m_superRegion;
98 RegionSet m_copiedRegionSet;
99 RegionSet m_markedRegionSet;
100 // WeakBlocks and MarkStackSegments use the same RegionSet since they're the same size.
101 RegionSet m_fourKBBlockRegionSet;
102 RegionSet m_workListRegionSet;
103
104 DoublyLinkedList<Region> m_emptyRegions;
105 size_t m_numberOfEmptyRegions;
106
107 bool m_isCurrentlyAllocating;
108 bool m_blockFreeingThreadShouldQuit;
109 SpinLock m_regionLock;
110 Mutex m_emptyRegionConditionLock;
111 ThreadCondition m_emptyRegionCondition;
112 ThreadIdentifier m_blockFreeingThread;
113 };
114
115 inline DeadBlock* BlockAllocator::tryAllocateFromRegion(RegionSet& set, DoublyLinkedList<Region>& regions, size_t& numberOfRegions)
116 {
117 if (numberOfRegions) {
118 ASSERT(!regions.isEmpty());
119 Region* region = regions.head();
120 ASSERT(!region->isFull());
121
122 if (region->isEmpty()) {
123 ASSERT(region == m_emptyRegions.head());
124 m_numberOfEmptyRegions--;
125 set.m_numberOfPartialRegions++;
126 region = m_emptyRegions.removeHead()->reset(set.m_blockSize);
127 set.m_partialRegions.push(region);
128 }
129
130 DeadBlock* block = region->allocate();
131
132 if (region->isFull()) {
133 set.m_numberOfPartialRegions--;
134 set.m_fullRegions.push(set.m_partialRegions.removeHead());
135 }
136
137 return block;
138 }
139 return 0;
140 }
141
142 template<typename T>
143 inline DeadBlock* BlockAllocator::allocate()
144 {
145 RegionSet& set = regionSetFor<T>();
146 DeadBlock* block;
147 m_isCurrentlyAllocating = true;
148 {
149 SpinLockHolder locker(&m_regionLock);
150 if ((block = tryAllocateFromRegion(set, set.m_partialRegions, set.m_numberOfPartialRegions)))
151 return block;
152 if ((block = tryAllocateFromRegion(set, m_emptyRegions, m_numberOfEmptyRegions)))
153 return block;
154 }
155
156 Region* newRegion = Region::create(&m_superRegion, T::blockSize);
157
158 SpinLockHolder locker(&m_regionLock);
159 m_emptyRegions.push(newRegion);
160 m_numberOfEmptyRegions++;
161 block = tryAllocateFromRegion(set, m_emptyRegions, m_numberOfEmptyRegions);
162 ASSERT(block);
163 return block;
164 }
165
166 inline DeadBlock* BlockAllocator::allocateCustomSize(size_t blockSize, size_t blockAlignment)
167 {
168 size_t realSize = WTF::roundUpToMultipleOf(blockAlignment, blockSize);
169 Region* newRegion = Region::createCustomSize(&m_superRegion, realSize, blockAlignment);
170 DeadBlock* block = newRegion->allocate();
171 ASSERT(block);
172 return block;
173 }
174
175 template<typename T>
176 inline void BlockAllocator::deallocate(T* block)
177 {
178 RegionSet& set = regionSetFor<T>();
179 bool shouldWakeBlockFreeingThread = false;
180 {
181 SpinLockHolder locker(&m_regionLock);
182 Region* region = block->region();
183 ASSERT(!region->isEmpty());
184 if (region->isFull())
185 set.m_fullRegions.remove(region);
186 else {
187 set.m_partialRegions.remove(region);
188 set.m_numberOfPartialRegions--;
189 }
190
191 region->deallocate(block);
192
193 if (region->isEmpty()) {
194 m_emptyRegions.push(region);
195 shouldWakeBlockFreeingThread = !m_numberOfEmptyRegions;
196 m_numberOfEmptyRegions++;
197 } else {
198 set.m_partialRegions.push(region);
199 set.m_numberOfPartialRegions++;
200 }
201 }
202
203 if (shouldWakeBlockFreeingThread) {
204 MutexLocker mutexLocker(m_emptyRegionConditionLock);
205 m_emptyRegionCondition.signal();
206 }
207
208 #if PLATFORM(IOS)
209 if (!GCActivityCallback::s_shouldCreateGCTimer);
210 releaseFreeRegions();
211 #endif
212 }
213
214 template<typename T>
215 inline void BlockAllocator::deallocateCustomSize(T* block)
216 {
217 Region* region = block->region();
218 ASSERT(region->isCustomSize());
219 region->deallocate(block);
220 region->destroy();
221 }
222
223 template <>
224 inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<CopiedBlock>()
225 {
226 return m_copiedRegionSet;
227 }
228
229 template <>
230 inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<MarkedBlock>()
231 {
232 return m_markedRegionSet;
233 }
234
235 template <>
236 inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<WeakBlock>()
237 {
238 return m_fourKBBlockRegionSet;
239 }
240
241 template <>
242 inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<MarkStackSegment>()
243 {
244 return m_fourKBBlockRegionSet;
245 }
246
247 template <>
248 inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<CopyWorkListSegment>()
249 {
250 return m_workListRegionSet;
251 }
252
253 template <>
254 inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<HandleBlock>()
255 {
256 return m_fourKBBlockRegionSet;
257 }
258
259 template <>
260 inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<HeapBlock<CopiedBlock> >()
261 {
262 return m_copiedRegionSet;
263 }
264
265 template <>
266 inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<HeapBlock<MarkedBlock> >()
267 {
268 return m_markedRegionSet;
269 }
270
271 template <>
272 inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<HeapBlock<WeakBlock> >()
273 {
274 return m_fourKBBlockRegionSet;
275 }
276
277 template <>
278 inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<HeapBlock<MarkStackSegment> >()
279 {
280 return m_fourKBBlockRegionSet;
281 }
282
283 template <>
284 inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<HeapBlock<CopyWorkListSegment> >()
285 {
286 return m_workListRegionSet;
287 }
288
289 template <>
290 inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor<HeapBlock<HandleBlock> >()
291 {
292 return m_fourKBBlockRegionSet;
293 }
294
295 template <typename T>
296 inline BlockAllocator::RegionSet& BlockAllocator::regionSetFor()
297 {
298 RELEASE_ASSERT_NOT_REACHED();
299 return *(RegionSet*)0;
300 }
301
302 } // namespace JSC
303
304 #endif // BlockAllocator_h