2 * Copyright (C) 2011 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 #ifndef CopiedSpaceInlineMethods_h
27 #define CopiedSpaceInlineMethods_h
29 #include "CopiedBlock.h"
30 #include "CopiedSpace.h"
32 #include "HeapBlock.h"
33 #include "JSGlobalData.h"
34 #include <wtf/CheckedBoolean.h>
38 inline bool CopiedSpace::contains(void* ptr
, CopiedBlock
*& result
)
40 CopiedBlock
* block
= blockFor(ptr
);
42 return !m_toSpaceFilter
.ruleOut(reinterpret_cast<Bits
>(block
)) && m_toSpaceSet
.contains(block
);
45 inline void CopiedSpace::pin(CopiedBlock
* block
)
47 block
->m_isPinned
= true;
50 inline void CopiedSpace::startedCopying()
52 DoublyLinkedList
<HeapBlock
>* temp
= m_fromSpace
;
53 m_fromSpace
= m_toSpace
;
56 m_toSpaceFilter
.reset();
57 m_allocator
.startedCopying();
59 ASSERT(!m_inCopyingPhase
);
60 ASSERT(!m_numberOfLoanedBlocks
);
61 m_inCopyingPhase
= true;
64 inline void CopiedSpace::recycleBlock(CopiedBlock
* block
)
66 m_heap
->blockAllocator().deallocate(block
);
69 MutexLocker
locker(m_loanedBlocksLock
);
70 ASSERT(m_numberOfLoanedBlocks
> 0);
71 m_numberOfLoanedBlocks
--;
72 if (!m_numberOfLoanedBlocks
)
73 m_loanedBlocksCondition
.signal();
77 inline CheckedBoolean
CopiedSpace::borrowBlock(CopiedBlock
** outBlock
)
79 CopiedBlock
* block
= 0;
80 if (!getFreshBlock(AllocationMustSucceed
, &block
)) {
85 ASSERT(m_inCopyingPhase
);
86 MutexLocker
locker(m_loanedBlocksLock
);
87 m_numberOfLoanedBlocks
++;
89 ASSERT(block
->m_offset
== block
->payload());
94 inline CheckedBoolean
CopiedSpace::addNewBlock()
96 CopiedBlock
* block
= 0;
97 if (!getFreshBlock(AllocationCanFail
, &block
))
100 m_toSpace
->push(block
);
101 m_toSpaceFilter
.add(reinterpret_cast<Bits
>(block
));
102 m_toSpaceSet
.add(block
);
103 m_allocator
.resetCurrentBlock(block
);
107 inline CheckedBoolean
CopiedSpace::allocateNewBlock(CopiedBlock
** outBlock
)
109 PageAllocationAligned allocation
= PageAllocationAligned::allocate(HeapBlock::s_blockSize
, HeapBlock::s_blockSize
, OSAllocator::JSGCHeapPages
);
110 if (!static_cast<bool>(allocation
)) {
115 *outBlock
= new (NotNull
, allocation
.base()) CopiedBlock(allocation
);
119 inline bool CopiedSpace::fitsInBlock(CopiedBlock
* block
, size_t bytes
)
121 return static_cast<char*>(block
->m_offset
) + bytes
< reinterpret_cast<char*>(block
) + block
->capacity() && static_cast<char*>(block
->m_offset
) + bytes
> block
->m_offset
;
124 inline CheckedBoolean
CopiedSpace::tryAllocate(size_t bytes
, void** outPtr
)
126 ASSERT(!m_heap
->globalData()->isInitializingObject());
128 if (isOversize(bytes
) || !m_allocator
.fitsInCurrentBlock(bytes
))
129 return tryAllocateSlowCase(bytes
, outPtr
);
131 *outPtr
= m_allocator
.allocate(bytes
);
136 inline void* CopiedSpace::allocateFromBlock(CopiedBlock
* block
, size_t bytes
)
138 ASSERT(fitsInBlock(block
, bytes
));
139 ASSERT(is8ByteAligned(block
->m_offset
));
141 void* ptr
= block
->m_offset
;
142 ASSERT(block
->m_offset
>= block
->payload() && block
->m_offset
< reinterpret_cast<char*>(block
) + block
->capacity());
143 block
->m_offset
= static_cast<void*>((static_cast<char*>(ptr
) + bytes
));
144 ASSERT(block
->m_offset
>= block
->payload() && block
->m_offset
< reinterpret_cast<char*>(block
) + block
->capacity());
146 ASSERT(is8ByteAligned(ptr
));
150 inline bool CopiedSpace::isOversize(size_t bytes
)
152 return bytes
> s_maxAllocationSize
;
155 inline bool CopiedSpace::isPinned(void* ptr
)
157 return blockFor(ptr
)->m_isPinned
;
160 inline CopiedBlock
* CopiedSpace::oversizeBlockFor(void* ptr
)
162 return reinterpret_cast<CopiedBlock
*>(reinterpret_cast<size_t>(ptr
) & WTF::pageMask());
165 inline CopiedBlock
* CopiedSpace::blockFor(void* ptr
)
167 return reinterpret_cast<CopiedBlock
*>(reinterpret_cast<size_t>(ptr
) & s_blockMask
);