]>
Commit | Line | Data |
---|---|---|
14957cd0 A |
1 | /* |
2 | * Copyright (C) 1999-2000 Harri Porten (porten@kde.org) | |
3 | * Copyright (C) 2001 Peter Kelly (pmk@post.com) | |
4 | * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2011 Apple Inc. All rights reserved. | |
5 | * | |
6 | * This library is free software; you can redistribute it and/or | |
7 | * modify it under the terms of the GNU Lesser General Public | |
8 | * License as published by the Free Software Foundation; either | |
9 | * version 2 of the License, or (at your option) any later version. | |
10 | * | |
11 | * This library is distributed in the hope that it will be useful, | |
12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
14 | * Lesser General Public License for more details. | |
15 | * | |
16 | * You should have received a copy of the GNU Lesser General Public | |
17 | * License along with this library; if not, write to the Free Software | |
18 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
19 | * | |
20 | */ | |
21 | ||
22 | #ifndef MarkedBlock_h | |
23 | #define MarkedBlock_h | |
24 | ||
25 | #include <wtf/Bitmap.h> | |
26 | #include <wtf/PageAllocationAligned.h> | |
27 | #include <wtf/StdLibExtras.h> | |
28 | ||
29 | namespace JSC { | |
30 | ||
31 | class Heap; | |
32 | class JSCell; | |
33 | class JSGlobalData; | |
34 | ||
35 | typedef uintptr_t Bits; | |
36 | ||
37 | static const size_t KB = 1024; | |
38 | ||
39 | class MarkedBlock { | |
40 | public: | |
41 | static const size_t atomSize = sizeof(double); // Ensures natural alignment for all built-in types. | |
42 | ||
43 | static MarkedBlock* create(JSGlobalData*, size_t cellSize); | |
44 | static void destroy(MarkedBlock*); | |
45 | ||
46 | static bool isAtomAligned(const void*); | |
47 | static MarkedBlock* blockFor(const void*); | |
48 | static size_t firstAtom(); | |
49 | ||
50 | Heap* heap() const; | |
51 | ||
52 | void setPrev(MarkedBlock*); | |
53 | void setNext(MarkedBlock*); | |
54 | MarkedBlock* prev() const; | |
55 | MarkedBlock* next() const; | |
56 | ||
57 | void* allocate(); | |
58 | void reset(); | |
59 | void sweep(); | |
60 | ||
61 | bool isEmpty(); | |
62 | ||
63 | void clearMarks(); | |
64 | size_t markCount(); | |
65 | ||
66 | size_t cellSize(); | |
67 | ||
68 | size_t size(); | |
69 | size_t capacity(); | |
70 | ||
71 | bool contains(const void*); | |
72 | size_t atomNumber(const void*); | |
73 | bool isMarked(const void*); | |
74 | bool testAndSetMarked(const void*); | |
75 | void setMarked(const void*); | |
76 | ||
77 | template <typename Functor> void forEach(Functor&); | |
78 | ||
79 | private: | |
80 | static const size_t blockSize = 16 * KB; | |
81 | static const size_t blockMask = ~(blockSize - 1); // blockSize must be a power of two. | |
82 | ||
83 | static const size_t atomMask = ~(atomSize - 1); // atomSize must be a power of two. | |
84 | ||
85 | static const size_t atomsPerBlock = blockSize / atomSize; | |
86 | ||
87 | typedef char Atom[atomSize]; | |
88 | ||
89 | MarkedBlock(const PageAllocationAligned&, JSGlobalData*, size_t cellSize); | |
90 | Atom* atoms(); | |
91 | ||
92 | size_t m_nextAtom; | |
93 | size_t m_endAtom; // This is a fuzzy end. Always test for < m_endAtom. | |
94 | size_t m_atomsPerCell; | |
95 | WTF::Bitmap<blockSize / atomSize> m_marks; | |
96 | PageAllocationAligned m_allocation; | |
97 | Heap* m_heap; | |
98 | MarkedBlock* m_prev; | |
99 | MarkedBlock* m_next; | |
100 | }; | |
101 | ||
102 | inline size_t MarkedBlock::firstAtom() | |
103 | { | |
104 | return WTF::roundUpToMultipleOf<atomSize>(sizeof(MarkedBlock)) / atomSize; | |
105 | } | |
106 | ||
107 | inline MarkedBlock::Atom* MarkedBlock::atoms() | |
108 | { | |
109 | return reinterpret_cast<Atom*>(this); | |
110 | } | |
111 | ||
112 | inline bool MarkedBlock::isAtomAligned(const void* p) | |
113 | { | |
114 | return !((intptr_t)(p) & ~atomMask); | |
115 | } | |
116 | ||
117 | inline MarkedBlock* MarkedBlock::blockFor(const void* p) | |
118 | { | |
119 | return reinterpret_cast<MarkedBlock*>(reinterpret_cast<uintptr_t>(p) & blockMask); | |
120 | } | |
121 | ||
122 | inline Heap* MarkedBlock::heap() const | |
123 | { | |
124 | return m_heap; | |
125 | } | |
126 | ||
127 | inline void MarkedBlock::setPrev(MarkedBlock* prev) | |
128 | { | |
129 | m_prev = prev; | |
130 | } | |
131 | ||
132 | inline void MarkedBlock::setNext(MarkedBlock* next) | |
133 | { | |
134 | m_next = next; | |
135 | } | |
136 | ||
137 | inline MarkedBlock* MarkedBlock::prev() const | |
138 | { | |
139 | return m_prev; | |
140 | } | |
141 | ||
142 | inline MarkedBlock* MarkedBlock::next() const | |
143 | { | |
144 | return m_next; | |
145 | } | |
146 | ||
147 | inline void MarkedBlock::reset() | |
148 | { | |
149 | m_nextAtom = firstAtom(); | |
150 | } | |
151 | ||
152 | inline bool MarkedBlock::isEmpty() | |
153 | { | |
154 | return m_marks.isEmpty(); | |
155 | } | |
156 | ||
157 | inline void MarkedBlock::clearMarks() | |
158 | { | |
159 | m_marks.clearAll(); | |
160 | } | |
161 | ||
162 | inline size_t MarkedBlock::markCount() | |
163 | { | |
164 | return m_marks.count(); | |
165 | } | |
166 | ||
167 | inline size_t MarkedBlock::cellSize() | |
168 | { | |
169 | return m_atomsPerCell * atomSize; | |
170 | } | |
171 | ||
172 | inline size_t MarkedBlock::size() | |
173 | { | |
174 | return markCount() * cellSize(); | |
175 | } | |
176 | ||
177 | inline size_t MarkedBlock::capacity() | |
178 | { | |
179 | return m_allocation.size(); | |
180 | } | |
181 | ||
182 | inline bool MarkedBlock::contains(const void* p) | |
183 | { | |
184 | ASSERT(p && isAtomAligned(p) && atomNumber(p) < atomsPerBlock); | |
185 | ||
186 | // Even though we physically contain p, we only logically contain p if p | |
187 | // points to a live cell. (Claiming to contain a dead cell would trick the | |
188 | // conservative garbage collector into resurrecting the cell in a zombie state.) | |
189 | return isMarked(p); | |
190 | } | |
191 | ||
192 | inline size_t MarkedBlock::atomNumber(const void* p) | |
193 | { | |
194 | return (reinterpret_cast<uintptr_t>(p) - reinterpret_cast<uintptr_t>(this)) / atomSize; | |
195 | } | |
196 | ||
197 | inline bool MarkedBlock::isMarked(const void* p) | |
198 | { | |
199 | return m_marks.get(atomNumber(p)); | |
200 | } | |
201 | ||
202 | inline bool MarkedBlock::testAndSetMarked(const void* p) | |
203 | { | |
204 | return m_marks.testAndSet(atomNumber(p)); | |
205 | } | |
206 | ||
207 | inline void MarkedBlock::setMarked(const void* p) | |
208 | { | |
209 | m_marks.set(atomNumber(p)); | |
210 | } | |
211 | ||
212 | template <typename Functor> inline void MarkedBlock::forEach(Functor& functor) | |
213 | { | |
214 | for (size_t i = firstAtom(); i < m_endAtom; i += m_atomsPerCell) { | |
215 | if (!m_marks.get(i)) | |
216 | continue; | |
217 | functor(reinterpret_cast<JSCell*>(&atoms()[i])); | |
218 | } | |
219 | } | |
220 | ||
221 | } // namespace JSC | |
222 | ||
223 | #endif // MarkedSpace_h |