]> git.saurik.com Git - apple/javascriptcore.git/blame_incremental - dfg/DFGJITCompiler.h
JavaScriptCore-1218.35.tar.gz
[apple/javascriptcore.git] / dfg / DFGJITCompiler.h
... / ...
CommitLineData
1/*
2 * Copyright (C) 2011, 2013 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#ifndef DFGJITCompiler_h
27#define DFGJITCompiler_h
28
29#if ENABLE(DFG_JIT)
30
31#include "CodeBlock.h"
32#include "DFGCCallHelpers.h"
33#include "DFGDisassembler.h"
34#include "DFGFPRInfo.h"
35#include "DFGGPRInfo.h"
36#include "DFGGraph.h"
37#include "DFGOSRExitCompilationInfo.h"
38#include "DFGRegisterBank.h"
39#include "DFGRegisterSet.h"
40#include "JITCode.h"
41#include "LinkBuffer.h"
42#include "MacroAssembler.h"
43
44namespace JSC {
45
46class AbstractSamplingCounter;
47class CodeBlock;
48class VM;
49
50namespace DFG {
51
52class JITCodeGenerator;
53class NodeToRegisterMap;
54class OSRExitJumpPlaceholder;
55class SlowPathGenerator;
56class SpeculativeJIT;
57class SpeculationRecovery;
58
59struct EntryLocation;
60struct OSRExit;
61
62// === CallLinkRecord ===
63//
64// A record of a call out from JIT code that needs linking to a helper function.
65// Every CallLinkRecord contains a reference to the call instruction & the function
66// that it needs to be linked to.
67struct CallLinkRecord {
68 CallLinkRecord(MacroAssembler::Call call, FunctionPtr function)
69 : m_call(call)
70 , m_function(function)
71 {
72 }
73
74 MacroAssembler::Call m_call;
75 FunctionPtr m_function;
76};
77
78class CallBeginToken {
79public:
80 CallBeginToken()
81#if !ASSERT_DISABLED
82 : m_registered(false)
83 , m_exceptionCheckIndex(std::numeric_limits<unsigned>::max())
84#endif
85 {
86 }
87
88 ~CallBeginToken()
89 {
90 ASSERT(m_registered || !m_codeOrigin.isSet());
91 ASSERT(m_codeOrigin.isSet() == (m_exceptionCheckIndex != std::numeric_limits<unsigned>::max()));
92 }
93
94 void set(CodeOrigin codeOrigin, unsigned index)
95 {
96#if !ASSERT_DISABLED
97 ASSERT(m_registered || !m_codeOrigin.isSet());
98 ASSERT(m_codeOrigin.isSet() == (m_exceptionCheckIndex != std::numeric_limits<unsigned>::max()));
99 m_codeOrigin = codeOrigin;
100 m_registered = false;
101 m_exceptionCheckIndex = index;
102#else
103 UNUSED_PARAM(codeOrigin);
104 UNUSED_PARAM(index);
105#endif
106 }
107
108 void registerWithExceptionCheck(CodeOrigin codeOrigin, unsigned index)
109 {
110#if !ASSERT_DISABLED
111 ASSERT(m_codeOrigin == codeOrigin);
112 if (m_registered)
113 return;
114 ASSERT(m_exceptionCheckIndex == index);
115 m_registered = true;
116#else
117 UNUSED_PARAM(codeOrigin);
118 UNUSED_PARAM(index);
119#endif
120 }
121
122#if !ASSERT_DISABLED
123 const CodeOrigin& codeOrigin() const
124 {
125 return m_codeOrigin;
126 }
127#endif
128
129private:
130#if !ASSERT_DISABLED
131 CodeOrigin m_codeOrigin;
132 bool m_registered;
133 unsigned m_exceptionCheckIndex;
134#endif
135};
136
137// === CallExceptionRecord ===
138//
139// A record of a call out from JIT code that might throw an exception.
140// Calls that might throw an exception also record the Jump taken on exception
141// (unset if not present) and code origin used to recover handler/source info.
142struct CallExceptionRecord {
143 CallExceptionRecord(MacroAssembler::Call call, CodeOrigin codeOrigin)
144 : m_call(call)
145 , m_codeOrigin(codeOrigin)
146 {
147 }
148
149 CallExceptionRecord(MacroAssembler::Call call, MacroAssembler::Jump exceptionCheck, CodeOrigin codeOrigin)
150 : m_call(call)
151 , m_exceptionCheck(exceptionCheck)
152 , m_codeOrigin(codeOrigin)
153 {
154 }
155
156 MacroAssembler::Call m_call;
157 MacroAssembler::Jump m_exceptionCheck;
158 CodeOrigin m_codeOrigin;
159};
160
161struct PropertyAccessRecord {
162 enum RegisterMode { RegistersFlushed, RegistersInUse };
163
164#if USE(JSVALUE64)
165 PropertyAccessRecord(
166 CodeOrigin codeOrigin,
167 MacroAssembler::DataLabelPtr structureImm,
168 MacroAssembler::PatchableJump structureCheck,
169 MacroAssembler::ConvertibleLoadLabel propertyStorageLoad,
170 MacroAssembler::DataLabelCompact loadOrStore,
171 SlowPathGenerator* slowPathGenerator,
172 MacroAssembler::Label done,
173 int8_t baseGPR,
174 int8_t valueGPR,
175 const RegisterSet& usedRegisters,
176 RegisterMode registerMode = RegistersInUse)
177#elif USE(JSVALUE32_64)
178 PropertyAccessRecord(
179 CodeOrigin codeOrigin,
180 MacroAssembler::DataLabelPtr structureImm,
181 MacroAssembler::PatchableJump structureCheck,
182 MacroAssembler::ConvertibleLoadLabel propertyStorageLoad,
183 MacroAssembler::DataLabelCompact tagLoadOrStore,
184 MacroAssembler::DataLabelCompact payloadLoadOrStore,
185 SlowPathGenerator* slowPathGenerator,
186 MacroAssembler::Label done,
187 int8_t baseGPR,
188 int8_t valueTagGPR,
189 int8_t valueGPR,
190 const RegisterSet& usedRegisters,
191 RegisterMode registerMode = RegistersInUse)
192#endif
193 : m_codeOrigin(codeOrigin)
194 , m_structureImm(structureImm)
195 , m_structureCheck(structureCheck)
196 , m_propertyStorageLoad(propertyStorageLoad)
197#if USE(JSVALUE64)
198 , m_loadOrStore(loadOrStore)
199#elif USE(JSVALUE32_64)
200 , m_tagLoadOrStore(tagLoadOrStore)
201 , m_payloadLoadOrStore(payloadLoadOrStore)
202#endif
203 , m_slowPathGenerator(slowPathGenerator)
204 , m_done(done)
205 , m_baseGPR(baseGPR)
206#if USE(JSVALUE32_64)
207 , m_valueTagGPR(valueTagGPR)
208#endif
209 , m_valueGPR(valueGPR)
210 , m_usedRegisters(usedRegisters)
211 , m_registerMode(registerMode)
212 {
213 }
214
215 CodeOrigin m_codeOrigin;
216 MacroAssembler::DataLabelPtr m_structureImm;
217 MacroAssembler::PatchableJump m_structureCheck;
218 MacroAssembler::ConvertibleLoadLabel m_propertyStorageLoad;
219#if USE(JSVALUE64)
220 MacroAssembler::DataLabelCompact m_loadOrStore;
221#elif USE(JSVALUE32_64)
222 MacroAssembler::DataLabelCompact m_tagLoadOrStore;
223 MacroAssembler::DataLabelCompact m_payloadLoadOrStore;
224#endif
225 SlowPathGenerator* m_slowPathGenerator;
226 MacroAssembler::Label m_done;
227 int8_t m_baseGPR;
228#if USE(JSVALUE32_64)
229 int8_t m_valueTagGPR;
230#endif
231 int8_t m_valueGPR;
232 RegisterSet m_usedRegisters;
233 RegisterMode m_registerMode;
234};
235
236// === JITCompiler ===
237//
238// DFG::JITCompiler is responsible for generating JIT code from the dataflow graph.
239// It does so by delegating to the speculative & non-speculative JITs, which
240// generate to a MacroAssembler (which the JITCompiler owns through an inheritance
241// relationship). The JITCompiler holds references to information required during
242// compilation, and also records information used in linking (e.g. a list of all
243// call to be linked).
244class JITCompiler : public CCallHelpers {
245public:
246 JITCompiler(Graph& dfg);
247
248 bool compile(JITCode& entry);
249 bool compileFunction(JITCode& entry, MacroAssemblerCodePtr& entryWithArityCheck);
250
251 // Accessors for properties.
252 Graph& graph() { return m_graph; }
253
254 // Methods to set labels for the disassembler.
255 void setStartOfCode()
256 {
257 if (LIKELY(!m_disassembler))
258 return;
259 m_disassembler->setStartOfCode(labelIgnoringWatchpoints());
260 }
261
262 void setForBlock(BlockIndex blockIndex)
263 {
264 if (LIKELY(!m_disassembler))
265 return;
266 m_disassembler->setForBlock(blockIndex, labelIgnoringWatchpoints());
267 }
268
269 void setForNode(Node* node)
270 {
271 if (LIKELY(!m_disassembler))
272 return;
273 m_disassembler->setForNode(node, labelIgnoringWatchpoints());
274 }
275
276 void setEndOfMainPath()
277 {
278 if (LIKELY(!m_disassembler))
279 return;
280 m_disassembler->setEndOfMainPath(labelIgnoringWatchpoints());
281 }
282
283 void setEndOfCode()
284 {
285 if (LIKELY(!m_disassembler))
286 return;
287 m_disassembler->setEndOfCode(labelIgnoringWatchpoints());
288 }
289
290 unsigned currentCodeOriginIndex() const
291 {
292 return m_currentCodeOriginIndex;
293 }
294
295 // Get a token for beginning a call, and set the current code origin index in
296 // the call frame. For each beginCall() there must be at least one exception
297 // check, and all of the exception checks must have the same CodeOrigin as the
298 // beginCall().
299 void beginCall(CodeOrigin codeOrigin, CallBeginToken& token)
300 {
301 unsigned index = m_exceptionChecks.size();
302 store32(TrustedImm32(index), tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
303 token.set(codeOrigin, index);
304 }
305
306 // Notify the JIT of a call that does not require linking.
307 void notifyCall(Call functionCall, CodeOrigin codeOrigin, CallBeginToken& token)
308 {
309 token.registerWithExceptionCheck(codeOrigin, m_exceptionChecks.size());
310 m_exceptionChecks.append(CallExceptionRecord(functionCall, codeOrigin));
311 }
312
313 // Add a call out from JIT code, without an exception check.
314 Call appendCall(const FunctionPtr& function)
315 {
316 Call functionCall = call();
317 m_calls.append(CallLinkRecord(functionCall, function));
318 return functionCall;
319 }
320
321 void prepareForExceptionCheck()
322 {
323 move(TrustedImm32(m_exceptionChecks.size()), GPRInfo::nonPreservedNonReturnGPR);
324 }
325
326 // Add a call out from JIT code, with an exception check.
327 void addExceptionCheck(Call functionCall, CodeOrigin codeOrigin, CallBeginToken& token)
328 {
329 prepareForExceptionCheck();
330 token.registerWithExceptionCheck(codeOrigin, m_exceptionChecks.size());
331 m_exceptionChecks.append(CallExceptionRecord(functionCall, emitExceptionCheck(), codeOrigin));
332 }
333
334 // Add a call out from JIT code, with a fast exception check that tests if the return value is zero.
335 void addFastExceptionCheck(Call functionCall, CodeOrigin codeOrigin, CallBeginToken& token)
336 {
337 prepareForExceptionCheck();
338 Jump exceptionCheck = branchTestPtr(Zero, GPRInfo::returnValueGPR);
339 token.registerWithExceptionCheck(codeOrigin, m_exceptionChecks.size());
340 m_exceptionChecks.append(CallExceptionRecord(functionCall, exceptionCheck, codeOrigin));
341 }
342
343 void appendExitInfo(MacroAssembler::JumpList jumpsToFail = MacroAssembler::JumpList())
344 {
345 OSRExitCompilationInfo info;
346 info.m_failureJumps = jumpsToFail;
347 m_exitCompilationInfo.append(info);
348 }
349
350#if USE(JSVALUE32_64)
351 void* addressOfDoubleConstant(Node* node)
352 {
353 ASSERT(m_graph.isNumberConstant(node));
354 unsigned constantIndex = node->constantNumber();
355 return &(codeBlock()->constantRegister(FirstConstantRegisterIndex + constantIndex));
356 }
357#endif
358
359 void addPropertyAccess(const PropertyAccessRecord& record)
360 {
361 m_propertyAccesses.append(record);
362 }
363
364 void addJSCall(Call fastCall, Call slowCall, DataLabelPtr targetToCheck, CallLinkInfo::CallType callType, GPRReg callee, CodeOrigin codeOrigin)
365 {
366 m_jsCalls.append(JSCallRecord(fastCall, slowCall, targetToCheck, callType, callee, codeOrigin));
367 }
368
369 void addWeakReference(JSCell* target)
370 {
371 m_codeBlock->appendWeakReference(target);
372 }
373
374 void addWeakReferences(const StructureSet& structureSet)
375 {
376 for (unsigned i = structureSet.size(); i--;)
377 addWeakReference(structureSet[i]);
378 }
379
380 void addWeakReferenceTransition(JSCell* codeOrigin, JSCell* from, JSCell* to)
381 {
382 m_codeBlock->appendWeakReferenceTransition(codeOrigin, from, to);
383 }
384
385 template<typename T>
386 Jump branchWeakPtr(RelationalCondition cond, T left, JSCell* weakPtr)
387 {
388 Jump result = branchPtr(cond, left, TrustedImmPtr(weakPtr));
389 addWeakReference(weakPtr);
390 return result;
391 }
392
393 void noticeOSREntry(BasicBlock& basicBlock, JITCompiler::Label blockHead, LinkBuffer& linkBuffer)
394 {
395#if DFG_ENABLE(OSR_ENTRY)
396 // OSR entry is not allowed into blocks deemed unreachable by control flow analysis.
397 if (!basicBlock.cfaHasVisited)
398 return;
399
400 OSREntryData* entry = codeBlock()->appendDFGOSREntryData(basicBlock.bytecodeBegin, linkBuffer.offsetOf(blockHead));
401
402 entry->m_expectedValues = basicBlock.valuesAtHead;
403
404 // Fix the expected values: in our protocol, a dead variable will have an expected
405 // value of (None, []). But the old JIT may stash some values there. So we really
406 // need (Top, TOP).
407 for (size_t argument = 0; argument < basicBlock.variablesAtHead.numberOfArguments(); ++argument) {
408 Node* node = basicBlock.variablesAtHead.argument(argument);
409 if (!node || !node->shouldGenerate())
410 entry->m_expectedValues.argument(argument).makeTop();
411 }
412 for (size_t local = 0; local < basicBlock.variablesAtHead.numberOfLocals(); ++local) {
413 Node* node = basicBlock.variablesAtHead.local(local);
414 if (!node || !node->shouldGenerate())
415 entry->m_expectedValues.local(local).makeTop();
416 else if (node->variableAccessData()->shouldUseDoubleFormat())
417 entry->m_localsForcedDouble.set(local);
418 }
419#else
420 UNUSED_PARAM(basicBlock);
421 UNUSED_PARAM(blockHead);
422 UNUSED_PARAM(linkBuffer);
423#endif
424 }
425
426private:
427 friend class OSRExitJumpPlaceholder;
428
429 // Internal implementation to compile.
430 void compileEntry();
431 void compileBody(SpeculativeJIT&);
432 void link(LinkBuffer&);
433
434 void exitSpeculativeWithOSR(const OSRExit&, SpeculationRecovery*);
435 void compileExceptionHandlers();
436 void linkOSRExits();
437
438 // The dataflow graph currently being generated.
439 Graph& m_graph;
440
441 OwnPtr<Disassembler> m_disassembler;
442
443 // Vector of calls out from JIT code, including exception handler information.
444 // Count of the number of CallRecords with exception handlers.
445 Vector<CallLinkRecord> m_calls;
446 Vector<CallExceptionRecord> m_exceptionChecks;
447
448 struct JSCallRecord {
449 JSCallRecord(Call fastCall, Call slowCall, DataLabelPtr targetToCheck, CallLinkInfo::CallType callType, GPRReg callee, CodeOrigin codeOrigin)
450 : m_fastCall(fastCall)
451 , m_slowCall(slowCall)
452 , m_targetToCheck(targetToCheck)
453 , m_callType(callType)
454 , m_callee(callee)
455 , m_codeOrigin(codeOrigin)
456 {
457 }
458
459 Call m_fastCall;
460 Call m_slowCall;
461 DataLabelPtr m_targetToCheck;
462 CallLinkInfo::CallType m_callType;
463 GPRReg m_callee;
464 CodeOrigin m_codeOrigin;
465 };
466
467 Vector<PropertyAccessRecord, 4> m_propertyAccesses;
468 Vector<JSCallRecord, 4> m_jsCalls;
469 Vector<OSRExitCompilationInfo> m_exitCompilationInfo;
470 Vector<Vector<Label> > m_exitSiteLabels;
471 unsigned m_currentCodeOriginIndex;
472};
473
474} } // namespace JSC::DFG
475
476#endif
477#endif
478