2 * Copyright (C) 2012 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15 * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23 * THE POSSIBILITY OF SUCH DAMAGE.
26 #ifndef BlockAllocator_h
27 #define BlockAllocator_h
29 #include "HeapBlock.h"
31 #include <wtf/DoublyLinkedList.h>
32 #include <wtf/Forward.h>
33 #include <wtf/PageAllocationAligned.h>
34 #include <wtf/TCSpinLock.h>
35 #include <wtf/Threading.h>
38 #include "GCActivityCallback.h"
45 class CopyWorkListSegment
;
48 class MarkStackSegment
;
52 // Simple allocator to reduce VM cost by holding onto blocks of memory for
53 // short periods of time and then freeing them on a secondary thread.
55 class BlockAllocator
{
60 template <typename T
> DeadBlock
* allocate();
61 DeadBlock
* allocateCustomSize(size_t blockSize
, size_t blockAlignment
);
62 template <typename T
> void deallocate(T
*);
63 template <typename T
> void deallocateCustomSize(T
*);
66 void waitForRelativeTimeWhileHoldingLock(double relative
);
67 void waitForRelativeTime(double relative
);
69 void blockFreeingThreadMain();
70 static void blockFreeingThreadStartFunc(void* heap
);
73 RegionSet(size_t blockSize
)
74 : m_numberOfPartialRegions(0)
75 , m_blockSize(blockSize
)
81 return m_fullRegions
.isEmpty() && m_partialRegions
.isEmpty();
84 DoublyLinkedList
<Region
> m_fullRegions
;
85 DoublyLinkedList
<Region
> m_partialRegions
;
86 size_t m_numberOfPartialRegions
;
90 DeadBlock
* tryAllocateFromRegion(RegionSet
&, DoublyLinkedList
<Region
>&, size_t&);
92 bool allRegionSetsAreEmpty() const;
93 void releaseFreeRegions();
95 template <typename T
> RegionSet
& regionSetFor();
97 SuperRegion m_superRegion
;
98 RegionSet m_copiedRegionSet
;
99 RegionSet m_markedRegionSet
;
100 // WeakBlocks and MarkStackSegments use the same RegionSet since they're the same size.
101 RegionSet m_fourKBBlockRegionSet
;
102 RegionSet m_workListRegionSet
;
104 DoublyLinkedList
<Region
> m_emptyRegions
;
105 size_t m_numberOfEmptyRegions
;
107 bool m_isCurrentlyAllocating
;
108 bool m_blockFreeingThreadShouldQuit
;
109 SpinLock m_regionLock
;
110 Mutex m_emptyRegionConditionLock
;
111 ThreadCondition m_emptyRegionCondition
;
112 ThreadIdentifier m_blockFreeingThread
;
115 inline DeadBlock
* BlockAllocator::tryAllocateFromRegion(RegionSet
& set
, DoublyLinkedList
<Region
>& regions
, size_t& numberOfRegions
)
117 if (numberOfRegions
) {
118 ASSERT(!regions
.isEmpty());
119 Region
* region
= regions
.head();
120 ASSERT(!region
->isFull());
122 if (region
->isEmpty()) {
123 ASSERT(region
== m_emptyRegions
.head());
124 m_numberOfEmptyRegions
--;
125 set
.m_numberOfPartialRegions
++;
126 region
= m_emptyRegions
.removeHead()->reset(set
.m_blockSize
);
127 set
.m_partialRegions
.push(region
);
130 DeadBlock
* block
= region
->allocate();
132 if (region
->isFull()) {
133 set
.m_numberOfPartialRegions
--;
134 set
.m_fullRegions
.push(set
.m_partialRegions
.removeHead());
143 inline DeadBlock
* BlockAllocator::allocate()
145 RegionSet
& set
= regionSetFor
<T
>();
147 m_isCurrentlyAllocating
= true;
149 SpinLockHolder
locker(&m_regionLock
);
150 if ((block
= tryAllocateFromRegion(set
, set
.m_partialRegions
, set
.m_numberOfPartialRegions
)))
152 if ((block
= tryAllocateFromRegion(set
, m_emptyRegions
, m_numberOfEmptyRegions
)))
156 Region
* newRegion
= Region::create(&m_superRegion
, T::blockSize
);
158 SpinLockHolder
locker(&m_regionLock
);
159 m_emptyRegions
.push(newRegion
);
160 m_numberOfEmptyRegions
++;
161 block
= tryAllocateFromRegion(set
, m_emptyRegions
, m_numberOfEmptyRegions
);
166 inline DeadBlock
* BlockAllocator::allocateCustomSize(size_t blockSize
, size_t blockAlignment
)
168 size_t realSize
= WTF::roundUpToMultipleOf(blockAlignment
, blockSize
);
169 Region
* newRegion
= Region::createCustomSize(&m_superRegion
, realSize
, blockAlignment
);
170 DeadBlock
* block
= newRegion
->allocate();
176 inline void BlockAllocator::deallocate(T
* block
)
178 RegionSet
& set
= regionSetFor
<T
>();
179 bool shouldWakeBlockFreeingThread
= false;
181 SpinLockHolder
locker(&m_regionLock
);
182 Region
* region
= block
->region();
183 ASSERT(!region
->isEmpty());
184 if (region
->isFull())
185 set
.m_fullRegions
.remove(region
);
187 set
.m_partialRegions
.remove(region
);
188 set
.m_numberOfPartialRegions
--;
191 region
->deallocate(block
);
193 if (region
->isEmpty()) {
194 m_emptyRegions
.push(region
);
195 shouldWakeBlockFreeingThread
= !m_numberOfEmptyRegions
;
196 m_numberOfEmptyRegions
++;
198 set
.m_partialRegions
.push(region
);
199 set
.m_numberOfPartialRegions
++;
203 if (shouldWakeBlockFreeingThread
) {
204 MutexLocker
mutexLocker(m_emptyRegionConditionLock
);
205 m_emptyRegionCondition
.signal();
209 if (!GCActivityCallback::s_shouldCreateGCTimer
);
210 releaseFreeRegions();
215 inline void BlockAllocator::deallocateCustomSize(T
* block
)
217 Region
* region
= block
->region();
218 ASSERT(region
->isCustomSize());
219 region
->deallocate(block
);
224 inline BlockAllocator::RegionSet
& BlockAllocator::regionSetFor
<CopiedBlock
>()
226 return m_copiedRegionSet
;
230 inline BlockAllocator::RegionSet
& BlockAllocator::regionSetFor
<MarkedBlock
>()
232 return m_markedRegionSet
;
236 inline BlockAllocator::RegionSet
& BlockAllocator::regionSetFor
<WeakBlock
>()
238 return m_fourKBBlockRegionSet
;
242 inline BlockAllocator::RegionSet
& BlockAllocator::regionSetFor
<MarkStackSegment
>()
244 return m_fourKBBlockRegionSet
;
248 inline BlockAllocator::RegionSet
& BlockAllocator::regionSetFor
<CopyWorkListSegment
>()
250 return m_workListRegionSet
;
254 inline BlockAllocator::RegionSet
& BlockAllocator::regionSetFor
<HandleBlock
>()
256 return m_fourKBBlockRegionSet
;
260 inline BlockAllocator::RegionSet
& BlockAllocator::regionSetFor
<HeapBlock
<CopiedBlock
> >()
262 return m_copiedRegionSet
;
266 inline BlockAllocator::RegionSet
& BlockAllocator::regionSetFor
<HeapBlock
<MarkedBlock
> >()
268 return m_markedRegionSet
;
272 inline BlockAllocator::RegionSet
& BlockAllocator::regionSetFor
<HeapBlock
<WeakBlock
> >()
274 return m_fourKBBlockRegionSet
;
278 inline BlockAllocator::RegionSet
& BlockAllocator::regionSetFor
<HeapBlock
<MarkStackSegment
> >()
280 return m_fourKBBlockRegionSet
;
284 inline BlockAllocator::RegionSet
& BlockAllocator::regionSetFor
<HeapBlock
<CopyWorkListSegment
> >()
286 return m_workListRegionSet
;
290 inline BlockAllocator::RegionSet
& BlockAllocator::regionSetFor
<HeapBlock
<HandleBlock
> >()
292 return m_fourKBBlockRegionSet
;
295 template <typename T
>
296 inline BlockAllocator::RegionSet
& BlockAllocator::regionSetFor()
298 RELEASE_ASSERT_NOT_REACHED();
299 return *(RegionSet
*)0;
304 #endif // BlockAllocator_h