]> git.saurik.com Git - apple/javascriptcore.git/blob - heap/CopiedSpaceInlines.h
JavaScriptCore-7601.1.46.3.tar.gz
[apple/javascriptcore.git] / heap / CopiedSpaceInlines.h
1 /*
2 * Copyright (C) 2011 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #ifndef CopiedSpaceInlines_h
27 #define CopiedSpaceInlines_h
28
29 #include "CopiedBlock.h"
30 #include "CopiedSpace.h"
31 #include "Heap.h"
32 #include "VM.h"
33 #include <wtf/CheckedBoolean.h>
34
35 namespace JSC {
36
37 inline bool CopiedSpace::contains(CopiedBlock* block)
38 {
39 return (!m_newGen.blockFilter.ruleOut(reinterpret_cast<Bits>(block)) || !m_oldGen.blockFilter.ruleOut(reinterpret_cast<Bits>(block)))
40 && m_blockSet.contains(block);
41 }
42
43 inline bool CopiedSpace::contains(void* ptr, CopiedBlock*& result)
44 {
45 CopiedBlock* block = blockFor(ptr);
46 if (contains(block)) {
47 result = block;
48 return true;
49 }
50 result = 0;
51 return false;
52 }
53
54 inline void CopiedSpace::pin(CopiedBlock* block)
55 {
56 block->pin();
57 }
58
59 inline void CopiedSpace::pinIfNecessary(void* opaquePointer)
60 {
61 // Pointers into the copied space come in the following varieties:
62 // 1) Pointers to the start of a span of memory. This is the most
63 // natural though not necessarily the most common.
64 // 2) Pointers to one value-sized (8 byte) word past the end of
65 // a span of memory. This currently occurs with semi-butterflies
66 // and should be fixed soon, once the other half of the
67 // butterfly lands.
68 // 3) Pointers to the innards arising from loop induction variable
69 // optimizations (either manual ones or automatic, by the
70 // compiler).
71 // 4) Pointers to the end of a span of memory in arising from
72 // induction variable optimizations combined with the
73 // GC-to-compiler contract laid out in the C spec: a pointer to
74 // the end of a span of memory must be considered to be a
75 // pointer to that memory.
76
77 EncodedJSValue* pointer = reinterpret_cast<EncodedJSValue*>(opaquePointer);
78 CopiedBlock* block;
79
80 // Handle (1) and (3).
81 if (contains(pointer, block))
82 pin(block);
83
84 // Handle (4). We don't have to explicitly check and pin the block under this
85 // pointer because it cannot possibly point to something that cases (1) and
86 // (3) above or case (2) below wouldn't already catch.
87 pointer--;
88
89 // Handle (2)
90 pointer--;
91 if (contains(pointer, block))
92 pin(block);
93 }
94
95 inline void CopiedSpace::recycleEvacuatedBlock(CopiedBlock* block, HeapOperation collectionType)
96 {
97 ASSERT(block);
98 ASSERT(block->canBeRecycled());
99 ASSERT(!block->m_isPinned);
100 {
101 SpinLockHolder locker(&m_toSpaceLock);
102 m_blockSet.remove(block);
103 if (collectionType == EdenCollection)
104 m_newGen.fromSpace->remove(block);
105 else
106 m_oldGen.fromSpace->remove(block);
107 }
108 CopiedBlock::destroy(block);
109 }
110
111 inline void CopiedSpace::recycleBorrowedBlock(CopiedBlock* block)
112 {
113 CopiedBlock::destroy(block);
114
115 {
116 MutexLocker locker(m_loanedBlocksLock);
117 ASSERT(m_numberOfLoanedBlocks > 0);
118 ASSERT(m_inCopyingPhase);
119 m_numberOfLoanedBlocks--;
120 if (!m_numberOfLoanedBlocks)
121 m_loanedBlocksCondition.signal();
122 }
123 }
124
125 inline CopiedBlock* CopiedSpace::allocateBlockForCopyingPhase()
126 {
127 ASSERT(m_inCopyingPhase);
128 CopiedBlock* block = CopiedBlock::createNoZeroFill();
129
130 {
131 MutexLocker locker(m_loanedBlocksLock);
132 m_numberOfLoanedBlocks++;
133 }
134
135 ASSERT(!block->dataSize());
136 return block;
137 }
138
139 inline void CopiedSpace::allocateBlock()
140 {
141 m_heap->collectIfNecessaryOrDefer();
142
143 m_allocator.resetCurrentBlock();
144
145 CopiedBlock* block = CopiedBlock::create();
146
147 m_newGen.toSpace->push(block);
148 m_newGen.blockFilter.add(reinterpret_cast<Bits>(block));
149 m_blockSet.add(block);
150 m_allocator.setCurrentBlock(block);
151 }
152
153 inline CheckedBoolean CopiedSpace::tryAllocate(size_t bytes, void** outPtr)
154 {
155 ASSERT(!m_heap->vm()->isInitializingObject());
156 ASSERT(bytes);
157
158 if (!m_allocator.tryAllocate(bytes, outPtr))
159 return tryAllocateSlowCase(bytes, outPtr);
160
161 ASSERT(*outPtr);
162 return true;
163 }
164
165 inline bool CopiedSpace::isOversize(size_t bytes)
166 {
167 return bytes > s_maxAllocationSize;
168 }
169
170 inline bool CopiedSpace::isPinned(void* ptr)
171 {
172 return blockFor(ptr)->m_isPinned;
173 }
174
175 inline CopiedBlock* CopiedSpace::blockFor(void* ptr)
176 {
177 return reinterpret_cast<CopiedBlock*>(reinterpret_cast<size_t>(ptr) & s_blockMask);
178 }
179
180 template <HeapOperation collectionType>
181 inline void CopiedSpace::startedCopying()
182 {
183 DoublyLinkedList<CopiedBlock>* fromSpace;
184 DoublyLinkedList<CopiedBlock>* oversizeBlocks;
185 TinyBloomFilter* blockFilter;
186 if (collectionType == FullCollection) {
187 ASSERT(m_oldGen.fromSpace->isEmpty());
188 ASSERT(m_newGen.fromSpace->isEmpty());
189
190 m_oldGen.toSpace->append(*m_newGen.toSpace);
191 m_oldGen.oversizeBlocks.append(m_newGen.oversizeBlocks);
192
193 ASSERT(m_newGen.toSpace->isEmpty());
194 ASSERT(m_newGen.fromSpace->isEmpty());
195 ASSERT(m_newGen.oversizeBlocks.isEmpty());
196
197 std::swap(m_oldGen.fromSpace, m_oldGen.toSpace);
198 fromSpace = m_oldGen.fromSpace;
199 oversizeBlocks = &m_oldGen.oversizeBlocks;
200 blockFilter = &m_oldGen.blockFilter;
201 } else {
202 std::swap(m_newGen.fromSpace, m_newGen.toSpace);
203 fromSpace = m_newGen.fromSpace;
204 oversizeBlocks = &m_newGen.oversizeBlocks;
205 blockFilter = &m_newGen.blockFilter;
206 }
207
208 blockFilter->reset();
209 m_allocator.resetCurrentBlock();
210
211 CopiedBlock* next = 0;
212 size_t totalLiveBytes = 0;
213 size_t totalUsableBytes = 0;
214 for (CopiedBlock* block = fromSpace->head(); block; block = next) {
215 next = block->next();
216 if (!block->isPinned() && block->canBeRecycled()) {
217 recycleEvacuatedBlock(block, collectionType);
218 continue;
219 }
220 ASSERT(block->liveBytes() <= CopiedBlock::blockSize);
221 totalLiveBytes += block->liveBytes();
222 totalUsableBytes += block->payloadCapacity();
223 block->didPromote();
224 }
225
226 CopiedBlock* block = oversizeBlocks->head();
227 while (block) {
228 CopiedBlock* next = block->next();
229 if (block->isPinned()) {
230 blockFilter->add(reinterpret_cast<Bits>(block));
231 totalLiveBytes += block->payloadCapacity();
232 totalUsableBytes += block->payloadCapacity();
233 block->didPromote();
234 } else {
235 oversizeBlocks->remove(block);
236 m_blockSet.remove(block);
237 CopiedBlock::destroy(block);
238 }
239 block = next;
240 }
241
242 double markedSpaceBytes = m_heap->objectSpace().capacity();
243 double totalFragmentation = static_cast<double>(totalLiveBytes + markedSpaceBytes) / static_cast<double>(totalUsableBytes + markedSpaceBytes);
244 m_shouldDoCopyPhase = m_heap->operationInProgress() == EdenCollection || totalFragmentation <= Options::minHeapUtilization();
245 if (!m_shouldDoCopyPhase) {
246 if (Options::logGC())
247 dataLog("Skipped copying, ");
248 return;
249 }
250
251 if (Options::logGC())
252 dataLogF("Did copy, ");
253 ASSERT(m_shouldDoCopyPhase);
254 ASSERT(!m_numberOfLoanedBlocks);
255 ASSERT(!m_inCopyingPhase);
256 m_inCopyingPhase = true;
257 }
258
259 } // namespace JSC
260
261 #endif // CopiedSpaceInlines_h
262