]> git.saurik.com Git - apple/javascriptcore.git/blame_incremental - heap/MarkedAllocator.h
JavaScriptCore-7601.1.46.3.tar.gz
[apple/javascriptcore.git] / heap / MarkedAllocator.h
... / ...
CommitLineData
1#ifndef MarkedAllocator_h
2#define MarkedAllocator_h
3
4#include "MarkedBlock.h"
5#include <wtf/DoublyLinkedList.h>
6
7namespace JSC {
8
9class Heap;
10class MarkedSpace;
11class LLIntOffsetsExtractor;
12
13namespace DFG {
14class SpeculativeJIT;
15}
16
17class MarkedAllocator {
18 friend class LLIntOffsetsExtractor;
19
20public:
21 static ptrdiff_t offsetOfFreeListHead();
22
23 MarkedAllocator();
24 void lastChanceToFinalize();
25 void reset();
26 void stopAllocating();
27 void resumeAllocating();
28 size_t cellSize() { return m_cellSize; }
29 bool needsDestruction() { return m_needsDestruction; }
30 void* allocate(size_t);
31 Heap* heap() { return m_heap; }
32 MarkedBlock* takeLastActiveBlock()
33 {
34 MarkedBlock* block = m_lastActiveBlock;
35 m_lastActiveBlock = 0;
36 return block;
37 }
38
39 template<typename Functor> void forEachBlock(Functor&);
40
41 void addBlock(MarkedBlock*);
42 void removeBlock(MarkedBlock*);
43 void init(Heap*, MarkedSpace*, size_t cellSize, bool needsDestruction);
44
45 bool isPagedOut(double deadline);
46
47private:
48 JS_EXPORT_PRIVATE void* allocateSlowCase(size_t);
49 void* tryAllocate(size_t);
50 void* tryAllocateHelper(size_t);
51 void* tryPopFreeList(size_t);
52 MarkedBlock* allocateBlock(size_t);
53 ALWAYS_INLINE void doTestCollectionsIfNeeded();
54
55 MarkedBlock::FreeList m_freeList;
56 MarkedBlock* m_currentBlock;
57 MarkedBlock* m_lastActiveBlock;
58 MarkedBlock* m_nextBlockToSweep;
59 DoublyLinkedList<MarkedBlock> m_blockList;
60 DoublyLinkedList<MarkedBlock> m_retiredBlocks;
61 size_t m_cellSize;
62 bool m_needsDestruction { false };
63 Heap* m_heap;
64 MarkedSpace* m_markedSpace;
65};
66
67inline ptrdiff_t MarkedAllocator::offsetOfFreeListHead()
68{
69 return OBJECT_OFFSETOF(MarkedAllocator, m_freeList) + OBJECT_OFFSETOF(MarkedBlock::FreeList, head);
70}
71
72inline MarkedAllocator::MarkedAllocator()
73 : m_currentBlock(0)
74 , m_lastActiveBlock(0)
75 , m_nextBlockToSweep(0)
76 , m_cellSize(0)
77 , m_heap(0)
78 , m_markedSpace(0)
79{
80}
81
82inline void MarkedAllocator::init(Heap* heap, MarkedSpace* markedSpace, size_t cellSize, bool needsDestruction)
83{
84 m_heap = heap;
85 m_markedSpace = markedSpace;
86 m_cellSize = cellSize;
87 m_needsDestruction = needsDestruction;
88}
89
90inline void* MarkedAllocator::allocate(size_t bytes)
91{
92 MarkedBlock::FreeCell* head = m_freeList.head;
93 if (UNLIKELY(!head)) {
94 void* result = allocateSlowCase(bytes);
95#ifndef NDEBUG
96 memset(result, 0xCD, bytes);
97#endif
98 return result;
99 }
100
101 m_freeList.head = head->next;
102#ifndef NDEBUG
103 memset(head, 0xCD, bytes);
104#endif
105 return head;
106}
107
108inline void MarkedAllocator::stopAllocating()
109{
110 ASSERT(!m_lastActiveBlock);
111 if (!m_currentBlock) {
112 ASSERT(!m_freeList.head);
113 return;
114 }
115
116 m_currentBlock->stopAllocating(m_freeList);
117 m_lastActiveBlock = m_currentBlock;
118 m_currentBlock = 0;
119 m_freeList = MarkedBlock::FreeList();
120}
121
122inline void MarkedAllocator::resumeAllocating()
123{
124 if (!m_lastActiveBlock)
125 return;
126
127 m_freeList = m_lastActiveBlock->resumeAllocating();
128 m_currentBlock = m_lastActiveBlock;
129 m_lastActiveBlock = 0;
130}
131
132template <typename Functor> inline void MarkedAllocator::forEachBlock(Functor& functor)
133{
134 MarkedBlock* next;
135 for (MarkedBlock* block = m_blockList.head(); block; block = next) {
136 next = block->next();
137 functor(block);
138 }
139
140 for (MarkedBlock* block = m_retiredBlocks.head(); block; block = next) {
141 next = block->next();
142 functor(block);
143 }
144}
145
146} // namespace JSC
147
148#endif