]>
Commit | Line | Data |
---|---|---|
81345200 | 1 | /* |
ed1e77d3 | 2 | * Copyright (C) 2013, 2015 Apple Inc. All rights reserved. |
81345200 A |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without | |
5 | * modification, are permitted provided that the following conditions | |
6 | * are met: | |
7 | * 1. Redistributions of source code must retain the above copyright | |
8 | * notice, this list of conditions and the following disclaimer. | |
9 | * 2. Redistributions in binary form must reproduce the above copyright | |
10 | * notice, this list of conditions and the following disclaimer in the | |
11 | * documentation and/or other materials provided with the distribution. | |
12 | * | |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' | |
14 | * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, | |
15 | * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS | |
17 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | |
18 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | |
19 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | |
20 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | |
21 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | |
22 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF | |
23 | * THE POSSIBILITY OF SUCH DAMAGE. | |
24 | */ | |
25 | ||
26 | #include "config.h" | |
27 | #include "BytecodeLivenessAnalysis.h" | |
28 | ||
ed1e77d3 | 29 | #include "BytecodeKills.h" |
81345200 A |
30 | #include "BytecodeLivenessAnalysisInlines.h" |
31 | #include "BytecodeUseDef.h" | |
32 | #include "CodeBlock.h" | |
33 | #include "FullBytecodeLiveness.h" | |
34 | #include "PreciseJumpTargets.h" | |
35 | ||
36 | namespace JSC { | |
37 | ||
38 | BytecodeLivenessAnalysis::BytecodeLivenessAnalysis(CodeBlock* codeBlock) | |
39 | : m_codeBlock(codeBlock) | |
40 | { | |
41 | ASSERT(m_codeBlock); | |
42 | compute(); | |
43 | } | |
44 | ||
45 | static bool isValidRegisterForLiveness(CodeBlock* codeBlock, int operand) | |
46 | { | |
47 | if (codeBlock->isConstantRegisterIndex(operand)) | |
48 | return false; | |
49 | ||
50 | VirtualRegister virtualReg(operand); | |
ed1e77d3 | 51 | return virtualReg.isLocal(); |
81345200 A |
52 | } |
53 | ||
81345200 A |
54 | static unsigned getLeaderOffsetForBasicBlock(RefPtr<BytecodeBasicBlock>* basicBlock) |
55 | { | |
56 | return (*basicBlock)->leaderBytecodeOffset(); | |
57 | } | |
58 | ||
59 | static BytecodeBasicBlock* findBasicBlockWithLeaderOffset(Vector<RefPtr<BytecodeBasicBlock> >& basicBlocks, unsigned leaderOffset) | |
60 | { | |
61 | return (*tryBinarySearch<RefPtr<BytecodeBasicBlock>, unsigned>(basicBlocks, basicBlocks.size(), leaderOffset, getLeaderOffsetForBasicBlock)).get(); | |
62 | } | |
63 | ||
64 | static bool blockContainsBytecodeOffset(BytecodeBasicBlock* block, unsigned bytecodeOffset) | |
65 | { | |
66 | unsigned leaderOffset = block->leaderBytecodeOffset(); | |
67 | return bytecodeOffset >= leaderOffset && bytecodeOffset < leaderOffset + block->totalBytecodeLength(); | |
68 | } | |
69 | ||
70 | static BytecodeBasicBlock* findBasicBlockForBytecodeOffset(Vector<RefPtr<BytecodeBasicBlock> >& basicBlocks, unsigned bytecodeOffset) | |
71 | { | |
72 | /* | |
73 | for (unsigned i = 0; i < basicBlocks.size(); i++) { | |
74 | if (blockContainsBytecodeOffset(basicBlocks[i].get(), bytecodeOffset)) | |
75 | return basicBlocks[i].get(); | |
76 | } | |
77 | return 0; | |
78 | */ | |
79 | RefPtr<BytecodeBasicBlock>* basicBlock = approximateBinarySearch<RefPtr<BytecodeBasicBlock>, unsigned>( | |
80 | basicBlocks, basicBlocks.size(), bytecodeOffset, getLeaderOffsetForBasicBlock); | |
81 | // We found the block we were looking for. | |
82 | if (blockContainsBytecodeOffset((*basicBlock).get(), bytecodeOffset)) | |
83 | return (*basicBlock).get(); | |
84 | ||
85 | // Basic block is to the left of the returned block. | |
86 | if (bytecodeOffset < (*basicBlock)->leaderBytecodeOffset()) { | |
87 | ASSERT(basicBlock - 1 >= basicBlocks.data()); | |
88 | ASSERT(blockContainsBytecodeOffset(basicBlock[-1].get(), bytecodeOffset)); | |
89 | return basicBlock[-1].get(); | |
90 | } | |
91 | ||
92 | // Basic block is to the right of the returned block. | |
93 | ASSERT(&basicBlock[1] <= &basicBlocks.last()); | |
94 | ASSERT(blockContainsBytecodeOffset(basicBlock[1].get(), bytecodeOffset)); | |
95 | return basicBlock[1].get(); | |
96 | } | |
97 | ||
ed1e77d3 A |
98 | // Simplified interface to bytecode use/def, which determines defs first and then uses, and includes |
99 | // exception handlers in the uses. | |
100 | template<typename UseFunctor, typename DefFunctor> | |
101 | static void stepOverInstruction(CodeBlock* codeBlock, Vector<RefPtr<BytecodeBasicBlock>>& basicBlocks, unsigned bytecodeOffset, const UseFunctor& use, const DefFunctor& def) | |
81345200 | 102 | { |
ed1e77d3 A |
103 | // This abstractly execute the instruction in reverse. Instructions logically first use operands and |
104 | // then define operands. This logical ordering is necessary for operations that use and def the same | |
105 | // operand, like: | |
106 | // | |
107 | // op_add loc1, loc1, loc2 | |
108 | // | |
109 | // The use of loc1 happens before the def of loc1. That's a semantic requirement since the add | |
110 | // operation cannot travel forward in time to read the value that it will produce after reading that | |
111 | // value. Since we are executing in reverse, this means that we must do defs before uses (reverse of | |
112 | // uses before defs). | |
113 | // | |
114 | // Since this is a liveness analysis, this ordering ends up being particularly important: if we did | |
115 | // uses before defs, then the add operation above would appear to not have loc1 live, since we'd | |
116 | // first add it to the out set (the use), and then we'd remove it (the def). | |
81345200 | 117 | |
ed1e77d3 A |
118 | computeDefsForBytecodeOffset( |
119 | codeBlock, bytecodeOffset, | |
120 | [&] (CodeBlock* codeBlock, Instruction*, OpcodeID, int operand) { | |
121 | if (isValidRegisterForLiveness(codeBlock, operand)) | |
122 | def(VirtualRegister(operand).toLocal()); | |
123 | }); | |
124 | ||
125 | computeUsesForBytecodeOffset( | |
126 | codeBlock, bytecodeOffset, | |
127 | [&] (CodeBlock* codeBlock, Instruction*, OpcodeID, int operand) { | |
128 | if (isValidRegisterForLiveness(codeBlock, operand)) | |
129 | use(VirtualRegister(operand).toLocal()); | |
130 | }); | |
131 | ||
81345200 A |
132 | // If we have an exception handler, we want the live-in variables of the |
133 | // exception handler block to be included in the live-in of this particular bytecode. | |
134 | if (HandlerInfo* handler = codeBlock->handlerForBytecodeOffset(bytecodeOffset)) { | |
135 | BytecodeBasicBlock* handlerBlock = findBasicBlockWithLeaderOffset(basicBlocks, handler->target); | |
136 | ASSERT(handlerBlock); | |
ed1e77d3 | 137 | handlerBlock->in().forEachSetBit(use); |
81345200 A |
138 | } |
139 | } | |
140 | ||
ed1e77d3 A |
141 | static void stepOverInstruction(CodeBlock* codeBlock, Vector<RefPtr<BytecodeBasicBlock>>& basicBlocks, unsigned bytecodeOffset, FastBitVector& out) |
142 | { | |
143 | stepOverInstruction( | |
144 | codeBlock, basicBlocks, bytecodeOffset, | |
145 | [&] (unsigned bitIndex) { | |
146 | // This is the use functor, so we set the bit. | |
147 | out.set(bitIndex); | |
148 | }, | |
149 | [&] (unsigned bitIndex) { | |
150 | // This is the def functor, so we clear the bit. | |
151 | out.clear(bitIndex); | |
152 | }); | |
153 | } | |
154 | ||
81345200 A |
155 | static void computeLocalLivenessForBytecodeOffset(CodeBlock* codeBlock, BytecodeBasicBlock* block, Vector<RefPtr<BytecodeBasicBlock> >& basicBlocks, unsigned targetOffset, FastBitVector& result) |
156 | { | |
157 | ASSERT(!block->isExitBlock()); | |
158 | ASSERT(!block->isEntryBlock()); | |
159 | ||
160 | FastBitVector out = block->out(); | |
161 | ||
81345200 A |
162 | for (int i = block->bytecodeOffsets().size() - 1; i >= 0; i--) { |
163 | unsigned bytecodeOffset = block->bytecodeOffsets()[i]; | |
164 | if (targetOffset > bytecodeOffset) | |
165 | break; | |
166 | ||
ed1e77d3 | 167 | stepOverInstruction(codeBlock, basicBlocks, bytecodeOffset, out); |
81345200 A |
168 | } |
169 | ||
170 | result.set(out); | |
171 | } | |
172 | ||
173 | static void computeLocalLivenessForBlock(CodeBlock* codeBlock, BytecodeBasicBlock* block, Vector<RefPtr<BytecodeBasicBlock> >& basicBlocks) | |
174 | { | |
175 | if (block->isExitBlock() || block->isEntryBlock()) | |
176 | return; | |
177 | computeLocalLivenessForBytecodeOffset(codeBlock, block, basicBlocks, block->leaderBytecodeOffset(), block->in()); | |
178 | } | |
179 | ||
180 | void BytecodeLivenessAnalysis::runLivenessFixpoint() | |
181 | { | |
182 | UnlinkedCodeBlock* unlinkedCodeBlock = m_codeBlock->unlinkedCodeBlock(); | |
ed1e77d3 | 183 | unsigned numberOfVariables = unlinkedCodeBlock->m_numCalleeRegisters; |
81345200 A |
184 | |
185 | for (unsigned i = 0; i < m_basicBlocks.size(); i++) { | |
186 | BytecodeBasicBlock* block = m_basicBlocks[i].get(); | |
187 | block->in().resize(numberOfVariables); | |
188 | block->out().resize(numberOfVariables); | |
189 | } | |
190 | ||
191 | bool changed; | |
192 | m_basicBlocks.last()->in().clearAll(); | |
193 | m_basicBlocks.last()->out().clearAll(); | |
194 | FastBitVector newOut; | |
195 | newOut.resize(m_basicBlocks.last()->out().numBits()); | |
196 | do { | |
197 | changed = false; | |
ed1e77d3 | 198 | for (unsigned i = m_basicBlocks.size() - 1; i--;) { |
81345200 A |
199 | BytecodeBasicBlock* block = m_basicBlocks[i].get(); |
200 | newOut.clearAll(); | |
201 | for (unsigned j = 0; j < block->successors().size(); j++) | |
202 | newOut.merge(block->successors()[j]->in()); | |
203 | bool outDidChange = block->out().setAndCheck(newOut); | |
204 | computeLocalLivenessForBlock(m_codeBlock, block, m_basicBlocks); | |
205 | changed |= outDidChange; | |
206 | } | |
207 | } while (changed); | |
208 | } | |
209 | ||
ed1e77d3 | 210 | void BytecodeLivenessAnalysis::getLivenessInfoAtBytecodeOffset(unsigned bytecodeOffset, FastBitVector& result) |
81345200 A |
211 | { |
212 | BytecodeBasicBlock* block = findBasicBlockForBytecodeOffset(m_basicBlocks, bytecodeOffset); | |
213 | ASSERT(block); | |
214 | ASSERT(!block->isEntryBlock()); | |
215 | ASSERT(!block->isExitBlock()); | |
216 | result.resize(block->out().numBits()); | |
217 | computeLocalLivenessForBytecodeOffset(m_codeBlock, block, m_basicBlocks, bytecodeOffset, result); | |
218 | } | |
219 | ||
220 | bool BytecodeLivenessAnalysis::operandIsLiveAtBytecodeOffset(int operand, unsigned bytecodeOffset) | |
221 | { | |
ed1e77d3 | 222 | if (operandIsAlwaysLive(operand)) |
81345200 A |
223 | return true; |
224 | FastBitVector result; | |
ed1e77d3 A |
225 | getLivenessInfoAtBytecodeOffset(bytecodeOffset, result); |
226 | return operandThatIsNotAlwaysLiveIsLive(result, operand); | |
81345200 A |
227 | } |
228 | ||
ed1e77d3 | 229 | FastBitVector BytecodeLivenessAnalysis::getLivenessInfoAtBytecodeOffset(unsigned bytecodeOffset) |
81345200 | 230 | { |
ed1e77d3 A |
231 | FastBitVector out; |
232 | getLivenessInfoAtBytecodeOffset(bytecodeOffset, out); | |
233 | return out; | |
81345200 A |
234 | } |
235 | ||
ed1e77d3 | 236 | void BytecodeLivenessAnalysis::computeFullLiveness(FullBytecodeLiveness& result) |
81345200 A |
237 | { |
238 | FastBitVector out; | |
ed1e77d3 A |
239 | |
240 | result.m_map.resize(m_codeBlock->instructions().size()); | |
241 | ||
242 | for (unsigned i = m_basicBlocks.size(); i--;) { | |
243 | BytecodeBasicBlock* block = m_basicBlocks[i].get(); | |
244 | if (block->isEntryBlock() || block->isExitBlock()) | |
245 | continue; | |
246 | ||
247 | out = block->out(); | |
248 | ||
249 | for (unsigned i = block->bytecodeOffsets().size(); i--;) { | |
250 | unsigned bytecodeOffset = block->bytecodeOffsets()[i]; | |
251 | stepOverInstruction(m_codeBlock, m_basicBlocks, bytecodeOffset, out); | |
252 | result.m_map[bytecodeOffset] = out; | |
253 | } | |
254 | } | |
81345200 A |
255 | } |
256 | ||
ed1e77d3 | 257 | void BytecodeLivenessAnalysis::computeKills(BytecodeKills& result) |
81345200 A |
258 | { |
259 | FastBitVector out; | |
81345200 A |
260 | |
261 | result.m_codeBlock = m_codeBlock; | |
ed1e77d3 | 262 | result.m_killSets = std::make_unique<BytecodeKills::KillSet[]>(m_codeBlock->instructions().size()); |
81345200 A |
263 | |
264 | for (unsigned i = m_basicBlocks.size(); i--;) { | |
265 | BytecodeBasicBlock* block = m_basicBlocks[i].get(); | |
266 | if (block->isEntryBlock() || block->isExitBlock()) | |
267 | continue; | |
268 | ||
269 | out = block->out(); | |
81345200 A |
270 | |
271 | for (unsigned i = block->bytecodeOffsets().size(); i--;) { | |
272 | unsigned bytecodeOffset = block->bytecodeOffsets()[i]; | |
ed1e77d3 A |
273 | stepOverInstruction( |
274 | m_codeBlock, m_basicBlocks, bytecodeOffset, | |
275 | [&] (unsigned index) { | |
276 | // This is for uses. | |
277 | if (out.get(index)) | |
278 | return; | |
279 | result.m_killSets[bytecodeOffset].add(index); | |
280 | out.set(index); | |
281 | }, | |
282 | [&] (unsigned index) { | |
283 | // This is for defs. | |
284 | out.clear(index); | |
285 | }); | |
81345200 A |
286 | } |
287 | } | |
288 | } | |
289 | ||
290 | void BytecodeLivenessAnalysis::dumpResults() | |
291 | { | |
292 | Interpreter* interpreter = m_codeBlock->vm()->interpreter; | |
293 | Instruction* instructionsBegin = m_codeBlock->instructions().begin(); | |
294 | for (unsigned i = 0; i < m_basicBlocks.size(); i++) { | |
295 | BytecodeBasicBlock* block = m_basicBlocks[i].get(); | |
296 | dataLogF("\nBytecode basic block %u: %p (offset: %u, length: %u)\n", i, block, block->leaderBytecodeOffset(), block->totalBytecodeLength()); | |
297 | dataLogF("Predecessors: "); | |
298 | for (unsigned j = 0; j < block->predecessors().size(); j++) { | |
299 | BytecodeBasicBlock* predecessor = block->predecessors()[j]; | |
300 | dataLogF("%p ", predecessor); | |
301 | } | |
302 | dataLogF("\n"); | |
303 | dataLogF("Successors: "); | |
304 | for (unsigned j = 0; j < block->successors().size(); j++) { | |
305 | BytecodeBasicBlock* successor = block->successors()[j]; | |
306 | dataLogF("%p ", successor); | |
307 | } | |
308 | dataLogF("\n"); | |
309 | if (block->isEntryBlock()) { | |
310 | dataLogF("Entry block %p\n", block); | |
311 | continue; | |
312 | } | |
313 | if (block->isExitBlock()) { | |
314 | dataLogF("Exit block: %p\n", block); | |
315 | continue; | |
316 | } | |
317 | for (unsigned bytecodeOffset = block->leaderBytecodeOffset(); bytecodeOffset < block->leaderBytecodeOffset() + block->totalBytecodeLength();) { | |
318 | const Instruction* currentInstruction = &instructionsBegin[bytecodeOffset]; | |
319 | ||
320 | dataLogF("Live variables: "); | |
321 | FastBitVector liveBefore = getLivenessInfoAtBytecodeOffset(bytecodeOffset); | |
322 | for (unsigned j = 0; j < liveBefore.numBits(); j++) { | |
323 | if (liveBefore.get(j)) | |
324 | dataLogF("%u ", j); | |
325 | } | |
326 | dataLogF("\n"); | |
327 | m_codeBlock->dumpBytecode(WTF::dataFile(), m_codeBlock->globalObject()->globalExec(), instructionsBegin, currentInstruction); | |
328 | ||
329 | OpcodeID opcodeID = interpreter->getOpcodeID(instructionsBegin[bytecodeOffset].u.opcode); | |
330 | unsigned opcodeLength = opcodeLengths[opcodeID]; | |
331 | bytecodeOffset += opcodeLength; | |
332 | } | |
333 | ||
334 | dataLogF("Live variables: "); | |
335 | FastBitVector liveAfter = block->out(); | |
336 | for (unsigned j = 0; j < liveAfter.numBits(); j++) { | |
337 | if (liveAfter.get(j)) | |
338 | dataLogF("%u ", j); | |
339 | } | |
340 | dataLogF("\n"); | |
341 | } | |
342 | } | |
343 | ||
344 | void BytecodeLivenessAnalysis::compute() | |
345 | { | |
346 | computeBytecodeBasicBlocks(m_codeBlock, m_basicBlocks); | |
347 | ASSERT(m_basicBlocks.size()); | |
348 | runLivenessFixpoint(); | |
349 | ||
350 | if (Options::dumpBytecodeLivenessResults()) | |
351 | dumpResults(); | |
352 | } | |
353 | ||
354 | } // namespace JSC |