]> git.saurik.com Git - apple/javascriptcore.git/blame_incremental - heap/CopiedSpaceInlines.h
JavaScriptCore-1218.34.tar.gz
[apple/javascriptcore.git] / heap / CopiedSpaceInlines.h
... / ...
CommitLineData
1/*
2 * Copyright (C) 2011 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#ifndef CopiedSpaceInlines_h
27#define CopiedSpaceInlines_h
28
29#include "CopiedBlock.h"
30#include "CopiedSpace.h"
31#include "Heap.h"
32#include "HeapBlock.h"
33#include "VM.h"
34#include <wtf/CheckedBoolean.h>
35
36namespace JSC {
37
38inline bool CopiedSpace::contains(CopiedBlock* block)
39{
40 return !m_blockFilter.ruleOut(reinterpret_cast<Bits>(block)) && m_blockSet.contains(block);
41}
42
43inline bool CopiedSpace::contains(void* ptr, CopiedBlock*& result)
44{
45 CopiedBlock* block = blockFor(ptr);
46 if (contains(block)) {
47 result = block;
48 return true;
49 }
50 result = 0;
51 return false;
52}
53
54inline void CopiedSpace::pin(CopiedBlock* block)
55{
56 block->pin();
57}
58
59inline void CopiedSpace::pinIfNecessary(void* opaquePointer)
60{
61 // Pointers into the copied space come in the following varieties:
62 // 1) Pointers to the start of a span of memory. This is the most
63 // natural though not necessarily the most common.
64 // 2) Pointers to one value-sized (8 byte) word past the end of
65 // a span of memory. This currently occurs with semi-butterflies
66 // and should be fixed soon, once the other half of the
67 // butterfly lands.
68 // 3) Pointers to the innards arising from loop induction variable
69 // optimizations (either manual ones or automatic, by the
70 // compiler).
71 // 4) Pointers to the end of a span of memory in arising from
72 // induction variable optimizations combined with the
73 // GC-to-compiler contract laid out in the C spec: a pointer to
74 // the end of a span of memory must be considered to be a
75 // pointer to that memory.
76
77 EncodedJSValue* pointer = reinterpret_cast<EncodedJSValue*>(opaquePointer);
78 CopiedBlock* block;
79
80 // Handle (1) and (3).
81 if (contains(pointer, block))
82 pin(block);
83
84 // Handle (4). We don't have to explicitly check and pin the block under this
85 // pointer because it cannot possibly point to something that cases (1) and
86 // (3) above or case (2) below wouldn't already catch.
87 pointer--;
88
89 // Handle (2)
90 pointer--;
91 if (contains(pointer, block))
92 pin(block);
93}
94
95inline void CopiedSpace::recycleEvacuatedBlock(CopiedBlock* block)
96{
97 ASSERT(block);
98 ASSERT(block->canBeRecycled());
99 ASSERT(!block->m_isPinned);
100 {
101 SpinLockHolder locker(&m_toSpaceLock);
102 m_blockSet.remove(block);
103 m_fromSpace->remove(block);
104 }
105 m_heap->blockAllocator().deallocate(CopiedBlock::destroy(block));
106}
107
108inline void CopiedSpace::recycleBorrowedBlock(CopiedBlock* block)
109{
110 m_heap->blockAllocator().deallocate(CopiedBlock::destroy(block));
111
112 {
113 MutexLocker locker(m_loanedBlocksLock);
114 ASSERT(m_numberOfLoanedBlocks > 0);
115 ASSERT(m_inCopyingPhase);
116 m_numberOfLoanedBlocks--;
117 if (!m_numberOfLoanedBlocks)
118 m_loanedBlocksCondition.signal();
119 }
120}
121
122inline CopiedBlock* CopiedSpace::allocateBlockForCopyingPhase()
123{
124 ASSERT(m_inCopyingPhase);
125 CopiedBlock* block = CopiedBlock::createNoZeroFill(m_heap->blockAllocator().allocate<CopiedBlock>());
126
127 {
128 MutexLocker locker(m_loanedBlocksLock);
129 m_numberOfLoanedBlocks++;
130 }
131
132 ASSERT(!block->dataSize());
133 return block;
134}
135
136inline void CopiedSpace::allocateBlock()
137{
138 if (m_heap->shouldCollect())
139 m_heap->collect(Heap::DoNotSweep);
140
141 m_allocator.resetCurrentBlock();
142
143 CopiedBlock* block = CopiedBlock::create(m_heap->blockAllocator().allocate<CopiedBlock>());
144
145 m_toSpace->push(block);
146 m_blockFilter.add(reinterpret_cast<Bits>(block));
147 m_blockSet.add(block);
148 m_allocator.setCurrentBlock(block);
149}
150
151inline CheckedBoolean CopiedSpace::tryAllocate(size_t bytes, void** outPtr)
152{
153 ASSERT(!m_heap->vm()->isInitializingObject());
154
155 if (!m_allocator.tryAllocate(bytes, outPtr))
156 return tryAllocateSlowCase(bytes, outPtr);
157
158 ASSERT(*outPtr);
159 return true;
160}
161
162inline bool CopiedSpace::isOversize(size_t bytes)
163{
164 return bytes > s_maxAllocationSize;
165}
166
167inline bool CopiedSpace::isPinned(void* ptr)
168{
169 return blockFor(ptr)->m_isPinned;
170}
171
172inline CopiedBlock* CopiedSpace::blockFor(void* ptr)
173{
174 return reinterpret_cast<CopiedBlock*>(reinterpret_cast<size_t>(ptr) & s_blockMask);
175}
176
177} // namespace JSC
178
179#endif // CopiedSpaceInlines_h
180