]> git.saurik.com Git - apple/javascriptcore.git/blob - dfg/DFGJITCompiler.h
JavaScriptCore-7601.1.46.3.tar.gz
[apple/javascriptcore.git] / dfg / DFGJITCompiler.h
1 /*
2 * Copyright (C) 2011, 2013-2015 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #ifndef DFGJITCompiler_h
27 #define DFGJITCompiler_h
28
29 #if ENABLE(DFG_JIT)
30
31 #include "CCallHelpers.h"
32 #include "CodeBlock.h"
33 #include "DFGDisassembler.h"
34 #include "DFGGraph.h"
35 #include "DFGInlineCacheWrapper.h"
36 #include "DFGJITCode.h"
37 #include "DFGOSRExitCompilationInfo.h"
38 #include "DFGRegisterBank.h"
39 #include "FPRInfo.h"
40 #include "GPRInfo.h"
41 #include "JITCode.h"
42 #include "JITInlineCacheGenerator.h"
43 #include "LinkBuffer.h"
44 #include "MacroAssembler.h"
45 #include "TempRegisterSet.h"
46
47 namespace JSC {
48
49 class AbstractSamplingCounter;
50 class CodeBlock;
51 class VM;
52
53 namespace DFG {
54
55 class JITCodeGenerator;
56 class NodeToRegisterMap;
57 class OSRExitJumpPlaceholder;
58 class SlowPathGenerator;
59 class SpeculativeJIT;
60 class SpeculationRecovery;
61
62 struct EntryLocation;
63 struct OSRExit;
64
65 // === CallLinkRecord ===
66 //
67 // A record of a call out from JIT code that needs linking to a helper function.
68 // Every CallLinkRecord contains a reference to the call instruction & the function
69 // that it needs to be linked to.
70 struct CallLinkRecord {
71 CallLinkRecord(MacroAssembler::Call call, FunctionPtr function)
72 : m_call(call)
73 , m_function(function)
74 {
75 }
76
77 MacroAssembler::Call m_call;
78 FunctionPtr m_function;
79 };
80
81 struct InRecord {
82 InRecord(
83 MacroAssembler::PatchableJump jump, MacroAssembler::Label done,
84 SlowPathGenerator* slowPathGenerator, StructureStubInfo* stubInfo)
85 : m_jump(jump)
86 , m_done(done)
87 , m_slowPathGenerator(slowPathGenerator)
88 , m_stubInfo(stubInfo)
89 {
90 }
91
92 MacroAssembler::PatchableJump m_jump;
93 MacroAssembler::Label m_done;
94 SlowPathGenerator* m_slowPathGenerator;
95 StructureStubInfo* m_stubInfo;
96 };
97
98 // === JITCompiler ===
99 //
100 // DFG::JITCompiler is responsible for generating JIT code from the dataflow graph.
101 // It does so by delegating to the speculative & non-speculative JITs, which
102 // generate to a MacroAssembler (which the JITCompiler owns through an inheritance
103 // relationship). The JITCompiler holds references to information required during
104 // compilation, and also records information used in linking (e.g. a list of all
105 // call to be linked).
106 class JITCompiler : public CCallHelpers {
107 public:
108 JITCompiler(Graph& dfg);
109 ~JITCompiler();
110
111 void compile();
112 void compileFunction();
113
114 // Accessors for properties.
115 Graph& graph() { return m_graph; }
116
117 // Methods to set labels for the disassembler.
118 void setStartOfCode()
119 {
120 if (LIKELY(!m_disassembler))
121 return;
122 m_disassembler->setStartOfCode(labelIgnoringWatchpoints());
123 }
124
125 void setForBlockIndex(BlockIndex blockIndex)
126 {
127 if (LIKELY(!m_disassembler))
128 return;
129 m_disassembler->setForBlockIndex(blockIndex, labelIgnoringWatchpoints());
130 }
131
132 void setForNode(Node* node)
133 {
134 if (LIKELY(!m_disassembler))
135 return;
136 m_disassembler->setForNode(node, labelIgnoringWatchpoints());
137 }
138
139 void setEndOfMainPath()
140 {
141 if (LIKELY(!m_disassembler))
142 return;
143 m_disassembler->setEndOfMainPath(labelIgnoringWatchpoints());
144 }
145
146 void setEndOfCode()
147 {
148 if (LIKELY(!m_disassembler))
149 return;
150 m_disassembler->setEndOfCode(labelIgnoringWatchpoints());
151 }
152
153 void emitStoreCodeOrigin(CodeOrigin codeOrigin)
154 {
155 unsigned index = m_jitCode->common.addCodeOrigin(codeOrigin);
156 unsigned locationBits = CallFrame::Location::encodeAsCodeOriginIndex(index);
157 store32(TrustedImm32(locationBits), tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
158 }
159
160 // Add a call out from JIT code, without an exception check.
161 Call appendCall(const FunctionPtr& function)
162 {
163 Call functionCall = call();
164 m_calls.append(CallLinkRecord(functionCall, function));
165 return functionCall;
166 }
167
168 void exceptionCheck(Jump jumpToHandler)
169 {
170 m_exceptionChecks.append(jumpToHandler);
171 }
172
173 void exceptionCheck()
174 {
175 m_exceptionChecks.append(emitExceptionCheck());
176 }
177
178 void exceptionCheckWithCallFrameRollback()
179 {
180 m_exceptionChecksWithCallFrameRollback.append(emitExceptionCheck());
181 }
182
183 // Add a call out from JIT code, with a fast exception check that tests if the return value is zero.
184 void fastExceptionCheck()
185 {
186 callExceptionFuzz();
187 m_exceptionChecks.append(branchTestPtr(Zero, GPRInfo::returnValueGPR));
188 }
189
190 OSRExitCompilationInfo& appendExitInfo(MacroAssembler::JumpList jumpsToFail = MacroAssembler::JumpList())
191 {
192 OSRExitCompilationInfo info;
193 info.m_failureJumps = jumpsToFail;
194 m_exitCompilationInfo.append(info);
195 return m_exitCompilationInfo.last();
196 }
197
198 #if USE(JSVALUE32_64)
199 void* addressOfDoubleConstant(Node*);
200 #endif
201
202 void addGetById(const JITGetByIdGenerator& gen, SlowPathGenerator* slowPath)
203 {
204 m_getByIds.append(InlineCacheWrapper<JITGetByIdGenerator>(gen, slowPath));
205 }
206
207 void addPutById(const JITPutByIdGenerator& gen, SlowPathGenerator* slowPath)
208 {
209 m_putByIds.append(InlineCacheWrapper<JITPutByIdGenerator>(gen, slowPath));
210 }
211
212 void addIn(const InRecord& record)
213 {
214 m_ins.append(record);
215 }
216
217 unsigned currentJSCallIndex() const
218 {
219 return m_jsCalls.size();
220 }
221
222 void addJSCall(Call fastCall, Call slowCall, DataLabelPtr targetToCheck, CallLinkInfo* info)
223 {
224 m_jsCalls.append(JSCallRecord(fastCall, slowCall, targetToCheck, info));
225 }
226
227 void addWeakReference(JSCell* target)
228 {
229 m_graph.m_plan.weakReferences.addLazily(target);
230 }
231
232 void addWeakReferences(const StructureSet& structureSet)
233 {
234 for (unsigned i = structureSet.size(); i--;)
235 addWeakReference(structureSet[i]);
236 }
237
238 template<typename T>
239 Jump branchWeakPtr(RelationalCondition cond, T left, JSCell* weakPtr)
240 {
241 Jump result = branchPtr(cond, left, TrustedImmPtr(weakPtr));
242 addWeakReference(weakPtr);
243 return result;
244 }
245
246 template<typename T>
247 Jump branchWeakStructure(RelationalCondition cond, T left, Structure* weakStructure)
248 {
249 #if USE(JSVALUE64)
250 Jump result = branch32(cond, left, TrustedImm32(weakStructure->id()));
251 addWeakReference(weakStructure);
252 return result;
253 #else
254 return branchWeakPtr(cond, left, weakStructure);
255 #endif
256 }
257
258 template<typename T>
259 Jump branchStructurePtr(RelationalCondition cond, T left, Structure* structure)
260 {
261 #if USE(JSVALUE64)
262 return branch32(cond, left, TrustedImm32(structure->id()));
263 #else
264 return branchPtr(cond, left, TrustedImmPtr(structure));
265 #endif
266 }
267
268 void noticeOSREntry(BasicBlock&, JITCompiler::Label blockHead, LinkBuffer&);
269
270 RefPtr<JITCode> jitCode() { return m_jitCode; }
271
272 Vector<Label>& blockHeads() { return m_blockHeads; }
273
274 private:
275 friend class OSRExitJumpPlaceholder;
276
277 // Internal implementation to compile.
278 void compileEntry();
279 void compileBody();
280 void link(LinkBuffer&);
281
282 void exitSpeculativeWithOSR(const OSRExit&, SpeculationRecovery*);
283 void compileExceptionHandlers();
284 void linkOSRExits();
285 void disassemble(LinkBuffer&);
286
287 // The dataflow graph currently being generated.
288 Graph& m_graph;
289
290 std::unique_ptr<Disassembler> m_disassembler;
291
292 RefPtr<JITCode> m_jitCode;
293
294 // Vector of calls out from JIT code, including exception handler information.
295 // Count of the number of CallRecords with exception handlers.
296 Vector<CallLinkRecord> m_calls;
297 JumpList m_exceptionChecks;
298 JumpList m_exceptionChecksWithCallFrameRollback;
299
300 Vector<Label> m_blockHeads;
301
302 struct JSCallRecord {
303 JSCallRecord(Call fastCall, Call slowCall, DataLabelPtr targetToCheck, CallLinkInfo* info)
304 : m_fastCall(fastCall)
305 , m_slowCall(slowCall)
306 , m_targetToCheck(targetToCheck)
307 , m_info(info)
308 {
309 }
310
311 Call m_fastCall;
312 Call m_slowCall;
313 DataLabelPtr m_targetToCheck;
314 CallLinkInfo* m_info;
315 };
316
317 Vector<InlineCacheWrapper<JITGetByIdGenerator>, 4> m_getByIds;
318 Vector<InlineCacheWrapper<JITPutByIdGenerator>, 4> m_putByIds;
319 Vector<InRecord, 4> m_ins;
320 Vector<JSCallRecord, 4> m_jsCalls;
321 SegmentedVector<OSRExitCompilationInfo, 4> m_exitCompilationInfo;
322 Vector<Vector<Label>> m_exitSiteLabels;
323
324 Call m_callArityFixup;
325 Label m_arityCheck;
326 std::unique_ptr<SpeculativeJIT> m_speculative;
327 };
328
329 } } // namespace JSC::DFG
330
331 #endif
332 #endif
333