]> git.saurik.com Git - apple/javascriptcore.git/blob - heap/MarkedAllocator.h
JavaScriptCore-7600.1.4.16.1.tar.gz
[apple/javascriptcore.git] / heap / MarkedAllocator.h
1 #ifndef MarkedAllocator_h
2 #define MarkedAllocator_h
3
4 #include "MarkedBlock.h"
5 #include <wtf/DoublyLinkedList.h>
6
7 namespace JSC {
8
9 class Heap;
10 class MarkedSpace;
11 class LLIntOffsetsExtractor;
12
13 namespace DFG {
14 class SpeculativeJIT;
15 }
16
17 class MarkedAllocator {
18 friend class LLIntOffsetsExtractor;
19
20 public:
21 static ptrdiff_t offsetOfFreeListHead();
22
23 MarkedAllocator();
24 void lastChanceToFinalize();
25 void reset();
26 void stopAllocating();
27 void resumeAllocating();
28 size_t cellSize() { return m_cellSize; }
29 MarkedBlock::DestructorType destructorType() { return m_destructorType; }
30 void* allocate(size_t);
31 Heap* heap() { return m_heap; }
32 MarkedBlock* takeLastActiveBlock()
33 {
34 MarkedBlock* block = m_lastActiveBlock;
35 m_lastActiveBlock = 0;
36 return block;
37 }
38
39 template<typename Functor> void forEachBlock(Functor&);
40
41 void addBlock(MarkedBlock*);
42 void removeBlock(MarkedBlock*);
43 void init(Heap*, MarkedSpace*, size_t cellSize, MarkedBlock::DestructorType);
44
45 bool isPagedOut(double deadline);
46
47 private:
48 JS_EXPORT_PRIVATE void* allocateSlowCase(size_t);
49 void* tryAllocate(size_t);
50 void* tryAllocateHelper(size_t);
51 void* tryPopFreeList(size_t);
52 MarkedBlock* allocateBlock(size_t);
53 ALWAYS_INLINE void doTestCollectionsIfNeeded();
54
55 MarkedBlock::FreeList m_freeList;
56 MarkedBlock* m_currentBlock;
57 MarkedBlock* m_lastActiveBlock;
58 MarkedBlock* m_nextBlockToSweep;
59 DoublyLinkedList<MarkedBlock> m_blockList;
60 DoublyLinkedList<MarkedBlock> m_retiredBlocks;
61 size_t m_cellSize;
62 MarkedBlock::DestructorType m_destructorType;
63 Heap* m_heap;
64 MarkedSpace* m_markedSpace;
65 };
66
67 inline ptrdiff_t MarkedAllocator::offsetOfFreeListHead()
68 {
69 return OBJECT_OFFSETOF(MarkedAllocator, m_freeList) + OBJECT_OFFSETOF(MarkedBlock::FreeList, head);
70 }
71
72 inline MarkedAllocator::MarkedAllocator()
73 : m_currentBlock(0)
74 , m_lastActiveBlock(0)
75 , m_nextBlockToSweep(0)
76 , m_cellSize(0)
77 , m_destructorType(MarkedBlock::None)
78 , m_heap(0)
79 , m_markedSpace(0)
80 {
81 }
82
83 inline void MarkedAllocator::init(Heap* heap, MarkedSpace* markedSpace, size_t cellSize, MarkedBlock::DestructorType destructorType)
84 {
85 m_heap = heap;
86 m_markedSpace = markedSpace;
87 m_cellSize = cellSize;
88 m_destructorType = destructorType;
89 }
90
91 inline void* MarkedAllocator::allocate(size_t bytes)
92 {
93 MarkedBlock::FreeCell* head = m_freeList.head;
94 if (UNLIKELY(!head)) {
95 void* result = allocateSlowCase(bytes);
96 #ifndef NDEBUG
97 memset(result, 0xCD, bytes);
98 #endif
99 return result;
100 }
101
102 m_freeList.head = head->next;
103 #ifndef NDEBUG
104 memset(head, 0xCD, bytes);
105 #endif
106 return head;
107 }
108
109 inline void MarkedAllocator::stopAllocating()
110 {
111 ASSERT(!m_lastActiveBlock);
112 if (!m_currentBlock) {
113 ASSERT(!m_freeList.head);
114 return;
115 }
116
117 m_currentBlock->stopAllocating(m_freeList);
118 m_lastActiveBlock = m_currentBlock;
119 m_currentBlock = 0;
120 m_freeList = MarkedBlock::FreeList();
121 }
122
123 inline void MarkedAllocator::resumeAllocating()
124 {
125 if (!m_lastActiveBlock)
126 return;
127
128 m_freeList = m_lastActiveBlock->resumeAllocating();
129 m_currentBlock = m_lastActiveBlock;
130 m_lastActiveBlock = 0;
131 }
132
133 template <typename Functor> inline void MarkedAllocator::forEachBlock(Functor& functor)
134 {
135 MarkedBlock* next;
136 for (MarkedBlock* block = m_blockList.head(); block; block = next) {
137 next = block->next();
138 functor(block);
139 }
140
141 for (MarkedBlock* block = m_retiredBlocks.head(); block; block = next) {
142 next = block->next();
143 functor(block);
144 }
145 }
146
147 } // namespace JSC
148
149 #endif