]> git.saurik.com Git - apple/javascriptcore.git/blob - heap/CopiedSpaceInlineMethods.h
JavaScriptCore-1097.3.3.tar.gz
[apple/javascriptcore.git] / heap / CopiedSpaceInlineMethods.h
1 /*
2 * Copyright (C) 2011 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #ifndef CopiedSpaceInlineMethods_h
27 #define CopiedSpaceInlineMethods_h
28
29 #include "CopiedBlock.h"
30 #include "CopiedSpace.h"
31 #include "Heap.h"
32 #include "HeapBlock.h"
33 #include "JSGlobalData.h"
34 #include <wtf/CheckedBoolean.h>
35
36 namespace JSC {
37
38 inline bool CopiedSpace::contains(void* ptr, CopiedBlock*& result)
39 {
40 CopiedBlock* block = blockFor(ptr);
41 result = block;
42 return !m_toSpaceFilter.ruleOut(reinterpret_cast<Bits>(block)) && m_toSpaceSet.contains(block);
43 }
44
45 inline void CopiedSpace::pin(CopiedBlock* block)
46 {
47 block->m_isPinned = true;
48 }
49
50 inline void CopiedSpace::startedCopying()
51 {
52 DoublyLinkedList<HeapBlock>* temp = m_fromSpace;
53 m_fromSpace = m_toSpace;
54 m_toSpace = temp;
55
56 m_toSpaceFilter.reset();
57 m_allocator.startedCopying();
58
59 ASSERT(!m_inCopyingPhase);
60 ASSERT(!m_numberOfLoanedBlocks);
61 m_inCopyingPhase = true;
62 }
63
64 inline void CopiedSpace::recycleBlock(CopiedBlock* block)
65 {
66 m_heap->blockAllocator().deallocate(block);
67
68 {
69 MutexLocker locker(m_loanedBlocksLock);
70 ASSERT(m_numberOfLoanedBlocks > 0);
71 m_numberOfLoanedBlocks--;
72 if (!m_numberOfLoanedBlocks)
73 m_loanedBlocksCondition.signal();
74 }
75 }
76
77 inline CheckedBoolean CopiedSpace::borrowBlock(CopiedBlock** outBlock)
78 {
79 CopiedBlock* block = 0;
80 if (!getFreshBlock(AllocationMustSucceed, &block)) {
81 *outBlock = 0;
82 return false;
83 }
84
85 ASSERT(m_inCopyingPhase);
86 MutexLocker locker(m_loanedBlocksLock);
87 m_numberOfLoanedBlocks++;
88
89 ASSERT(block->m_offset == block->payload());
90 *outBlock = block;
91 return true;
92 }
93
94 inline CheckedBoolean CopiedSpace::addNewBlock()
95 {
96 CopiedBlock* block = 0;
97 if (!getFreshBlock(AllocationCanFail, &block))
98 return false;
99
100 m_toSpace->push(block);
101 m_toSpaceFilter.add(reinterpret_cast<Bits>(block));
102 m_toSpaceSet.add(block);
103 m_allocator.resetCurrentBlock(block);
104 return true;
105 }
106
107 inline CheckedBoolean CopiedSpace::allocateNewBlock(CopiedBlock** outBlock)
108 {
109 PageAllocationAligned allocation = PageAllocationAligned::allocate(HeapBlock::s_blockSize, HeapBlock::s_blockSize, OSAllocator::JSGCHeapPages);
110 if (!static_cast<bool>(allocation)) {
111 *outBlock = 0;
112 return false;
113 }
114
115 *outBlock = new (NotNull, allocation.base()) CopiedBlock(allocation);
116 return true;
117 }
118
119 inline bool CopiedSpace::fitsInBlock(CopiedBlock* block, size_t bytes)
120 {
121 return static_cast<char*>(block->m_offset) + bytes < reinterpret_cast<char*>(block) + block->capacity() && static_cast<char*>(block->m_offset) + bytes > block->m_offset;
122 }
123
124 inline CheckedBoolean CopiedSpace::tryAllocate(size_t bytes, void** outPtr)
125 {
126 ASSERT(!m_heap->globalData()->isInitializingObject());
127
128 if (isOversize(bytes) || !m_allocator.fitsInCurrentBlock(bytes))
129 return tryAllocateSlowCase(bytes, outPtr);
130
131 *outPtr = m_allocator.allocate(bytes);
132 ASSERT(*outPtr);
133 return true;
134 }
135
136 inline void* CopiedSpace::allocateFromBlock(CopiedBlock* block, size_t bytes)
137 {
138 ASSERT(fitsInBlock(block, bytes));
139 ASSERT(is8ByteAligned(block->m_offset));
140
141 void* ptr = block->m_offset;
142 ASSERT(block->m_offset >= block->payload() && block->m_offset < reinterpret_cast<char*>(block) + block->capacity());
143 block->m_offset = static_cast<void*>((static_cast<char*>(ptr) + bytes));
144 ASSERT(block->m_offset >= block->payload() && block->m_offset < reinterpret_cast<char*>(block) + block->capacity());
145
146 ASSERT(is8ByteAligned(ptr));
147 return ptr;
148 }
149
150 inline bool CopiedSpace::isOversize(size_t bytes)
151 {
152 return bytes > s_maxAllocationSize;
153 }
154
155 inline bool CopiedSpace::isPinned(void* ptr)
156 {
157 return blockFor(ptr)->m_isPinned;
158 }
159
160 inline CopiedBlock* CopiedSpace::oversizeBlockFor(void* ptr)
161 {
162 return reinterpret_cast<CopiedBlock*>(reinterpret_cast<size_t>(ptr) & WTF::pageMask());
163 }
164
165 inline CopiedBlock* CopiedSpace::blockFor(void* ptr)
166 {
167 return reinterpret_cast<CopiedBlock*>(reinterpret_cast<size_t>(ptr) & s_blockMask);
168 }
169
170 } // namespace JSC
171
172 #endif