]>
Commit | Line | Data |
---|---|---|
9dae56ea | 1 | /* |
81345200 | 2 | * Copyright (C) 2008, 2009, 2012, 2013, 2014 Apple Inc. All rights reserved. |
9dae56ea A |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without | |
5 | * modification, are permitted provided that the following conditions | |
6 | * are met: | |
7 | * 1. Redistributions of source code must retain the above copyright | |
8 | * notice, this list of conditions and the following disclaimer. | |
9 | * 2. Redistributions in binary form must reproduce the above copyright | |
10 | * notice, this list of conditions and the following disclaimer in the | |
11 | * documentation and/or other materials provided with the distribution. | |
12 | * | |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY | |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR | |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, | |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR | |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY | |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
24 | */ | |
25 | ||
26 | #include "config.h" | |
14957cd0 A |
27 | |
28 | #if ENABLE(JIT) | |
81345200 | 29 | |
9dae56ea A |
30 | #include "JIT.h" |
31 | ||
ba379fdc | 32 | // This probably does not belong here; adding here for now as a quick Windows build fix. |
f9bf01c6 | 33 | #if ENABLE(ASSEMBLER) && CPU(X86) && !OS(MAC_OS_X) |
ba379fdc A |
34 | #include "MacroAssembler.h" |
35 | JSC::MacroAssemblerX86Common::SSE2CheckState JSC::MacroAssemblerX86Common::s_sse2CheckState = NotCheckedSSE2; | |
36 | #endif | |
37 | ||
81345200 | 38 | #include "ArityCheckFailReturnThunks.h" |
9dae56ea | 39 | #include "CodeBlock.h" |
81345200 | 40 | #include "DFGCapabilities.h" |
ba379fdc | 41 | #include "Interpreter.h" |
93a37866 | 42 | #include "JITInlines.h" |
81345200 | 43 | #include "JITOperations.h" |
9dae56ea A |
44 | #include "JSArray.h" |
45 | #include "JSFunction.h" | |
ba379fdc | 46 | #include "LinkBuffer.h" |
81345200 A |
47 | #include "MaxFrameExtentForSlowPathCall.h" |
48 | #include "JSCInlines.h" | |
49 | #include "ProfilerDatabase.h" | |
ba379fdc | 50 | #include "RepatchBuffer.h" |
9dae56ea A |
51 | #include "ResultType.h" |
52 | #include "SamplingTool.h" | |
81345200 A |
53 | #include "SlowPathCall.h" |
54 | #include "StackAlignment.h" | |
55 | #include <wtf/CryptographicallyRandomNumber.h> | |
9dae56ea A |
56 | |
57 | using namespace std; | |
58 | ||
59 | namespace JSC { | |
60 | ||
ba379fdc A |
61 | void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction) |
62 | { | |
63 | RepatchBuffer repatchBuffer(codeblock); | |
64 | repatchBuffer.relinkNearCallerToTrampoline(returnAddress, newCalleeFunction); | |
9dae56ea A |
65 | } |
66 | ||
ba379fdc | 67 | void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction) |
9dae56ea | 68 | { |
ba379fdc A |
69 | RepatchBuffer repatchBuffer(codeblock); |
70 | repatchBuffer.relinkCallerToTrampoline(returnAddress, newCalleeFunction); | |
9dae56ea A |
71 | } |
72 | ||
ba379fdc | 73 | void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction) |
9dae56ea | 74 | { |
ba379fdc A |
75 | RepatchBuffer repatchBuffer(codeblock); |
76 | repatchBuffer.relinkCallerToFunction(returnAddress, newCalleeFunction); | |
9dae56ea A |
77 | } |
78 | ||
93a37866 | 79 | JIT::JIT(VM* vm, CodeBlock* codeBlock) |
81345200 A |
80 | : JSInterfaceJIT(vm, codeBlock) |
81 | , m_interpreter(vm->interpreter) | |
6fe7ccc8 | 82 | , m_labels(codeBlock ? codeBlock->numberOfInstructions() : 0) |
14957cd0 | 83 | , m_bytecodeOffset((unsigned)-1) |
81345200 A |
84 | , m_getByIdIndex(UINT_MAX) |
85 | , m_putByIdIndex(UINT_MAX) | |
93a37866 | 86 | , m_byValInstructionIndex(UINT_MAX) |
93a37866 | 87 | , m_callLinkInfoIndex(UINT_MAX) |
14957cd0 | 88 | , m_randomGenerator(cryptographicallyRandomNumber()) |
93a37866 A |
89 | , m_canBeOptimized(false) |
90 | , m_shouldEmitProfiling(false) | |
9dae56ea A |
91 | { |
92 | } | |
93 | ||
6fe7ccc8 | 94 | #if ENABLE(DFG_JIT) |
93a37866 | 95 | void JIT::emitEnterOptimizationCheck() |
6fe7ccc8 | 96 | { |
93a37866 | 97 | if (!canBeOptimized()) |
6fe7ccc8 | 98 | return; |
93a37866 | 99 | |
81345200 A |
100 | JumpList skipOptimize; |
101 | ||
102 | skipOptimize.append(branchAdd32(Signed, TrustedImm32(Options::executionCounterIncrementForEntry()), AbsoluteAddress(m_codeBlock->addressOfJITExecuteCounter()))); | |
93a37866 | 103 | ASSERT(!m_bytecodeOffset); |
81345200 A |
104 | callOperation(operationOptimize, m_bytecodeOffset); |
105 | skipOptimize.append(branchTestPtr(Zero, returnValueGPR)); | |
106 | move(returnValueGPR2, stackPointerRegister); | |
107 | jump(returnValueGPR); | |
6fe7ccc8 A |
108 | skipOptimize.link(this); |
109 | } | |
110 | #endif | |
111 | ||
9dae56ea | 112 | #define NEXT_OPCODE(name) \ |
14957cd0 | 113 | m_bytecodeOffset += OPCODE_LENGTH(name); \ |
9dae56ea A |
114 | break; |
115 | ||
81345200 A |
116 | #define DEFINE_SLOW_OP(name) \ |
117 | case op_##name: { \ | |
118 | JITSlowPathCall slowPathCall(this, currentInstruction, slow_path_##name); \ | |
119 | slowPathCall.call(); \ | |
120 | NEXT_OPCODE(op_##name); \ | |
ba379fdc | 121 | } |
ba379fdc A |
122 | |
123 | #define DEFINE_OP(name) \ | |
9dae56ea | 124 | case name: { \ |
ba379fdc | 125 | emit_##name(currentInstruction); \ |
9dae56ea A |
126 | NEXT_OPCODE(name); \ |
127 | } | |
128 | ||
ba379fdc | 129 | #define DEFINE_SLOWCASE_OP(name) \ |
9dae56ea | 130 | case name: { \ |
ba379fdc | 131 | emitSlow_##name(currentInstruction, iter); \ |
9dae56ea A |
132 | NEXT_OPCODE(name); \ |
133 | } | |
134 | ||
135 | void JIT::privateCompileMainPass() | |
136 | { | |
81345200 A |
137 | jitAssertTagsInPlace(); |
138 | jitAssertArgumentCountSane(); | |
139 | ||
9dae56ea A |
140 | Instruction* instructionsBegin = m_codeBlock->instructions().begin(); |
141 | unsigned instructionCount = m_codeBlock->instructions().size(); | |
ba379fdc | 142 | |
ba379fdc | 143 | m_callLinkInfoIndex = 0; |
9dae56ea | 144 | |
14957cd0 | 145 | for (m_bytecodeOffset = 0; m_bytecodeOffset < instructionCount; ) { |
93a37866 A |
146 | if (m_disassembler) |
147 | m_disassembler->setForBytecodeMainPath(m_bytecodeOffset, label()); | |
14957cd0 A |
148 | Instruction* currentInstruction = instructionsBegin + m_bytecodeOffset; |
149 | ASSERT_WITH_MESSAGE(m_interpreter->isOpcode(currentInstruction->u.opcode), "privateCompileMainPass gone bad @ %d", m_bytecodeOffset); | |
9dae56ea A |
150 | |
151 | #if ENABLE(OPCODE_SAMPLING) | |
14957cd0 | 152 | if (m_bytecodeOffset > 0) // Avoid the overhead of sampling op_enter twice. |
9dae56ea A |
153 | sampleInstruction(currentInstruction); |
154 | #endif | |
155 | ||
14957cd0 | 156 | m_labels[m_bytecodeOffset] = label(); |
9dae56ea | 157 | |
6fe7ccc8 | 158 | #if ENABLE(JIT_VERBOSE) |
93a37866 | 159 | dataLogF("Old JIT emitting code for bc#%u at offset 0x%lx.\n", m_bytecodeOffset, (long)debugOffset()); |
6fe7ccc8 | 160 | #endif |
93a37866 A |
161 | |
162 | OpcodeID opcodeID = m_interpreter->getOpcodeID(currentInstruction->u.opcode); | |
6fe7ccc8 | 163 | |
81345200 | 164 | if (m_compilation) { |
93a37866 A |
165 | add64( |
166 | TrustedImm32(1), | |
167 | AbsoluteAddress(m_compilation->executionCounterFor(Profiler::OriginStack(Profiler::Origin( | |
168 | m_compilation->bytecodes(), m_bytecodeOffset)))->address())); | |
169 | } | |
81345200 A |
170 | |
171 | if (Options::eagerlyUpdateTopCallFrame()) | |
172 | updateTopCallFrame(); | |
173 | ||
93a37866 | 174 | switch (opcodeID) { |
81345200 A |
175 | DEFINE_SLOW_OP(del_by_val) |
176 | DEFINE_SLOW_OP(in) | |
177 | DEFINE_SLOW_OP(less) | |
178 | DEFINE_SLOW_OP(lesseq) | |
179 | DEFINE_SLOW_OP(greater) | |
180 | DEFINE_SLOW_OP(greatereq) | |
181 | DEFINE_SLOW_OP(is_function) | |
182 | DEFINE_SLOW_OP(is_object) | |
183 | DEFINE_SLOW_OP(typeof) | |
184 | ||
185 | DEFINE_OP(op_touch_entry) | |
ba379fdc A |
186 | DEFINE_OP(op_add) |
187 | DEFINE_OP(op_bitand) | |
ba379fdc A |
188 | DEFINE_OP(op_bitor) |
189 | DEFINE_OP(op_bitxor) | |
190 | DEFINE_OP(op_call) | |
191 | DEFINE_OP(op_call_eval) | |
192 | DEFINE_OP(op_call_varargs) | |
81345200 | 193 | DEFINE_OP(op_construct_varargs) |
ba379fdc A |
194 | DEFINE_OP(op_catch) |
195 | DEFINE_OP(op_construct) | |
14957cd0 A |
196 | DEFINE_OP(op_get_callee) |
197 | DEFINE_OP(op_create_this) | |
81345200 | 198 | DEFINE_OP(op_to_this) |
14957cd0 | 199 | DEFINE_OP(op_init_lazy_reg) |
ba379fdc A |
200 | DEFINE_OP(op_create_arguments) |
201 | DEFINE_OP(op_debug) | |
202 | DEFINE_OP(op_del_by_id) | |
ba379fdc | 203 | DEFINE_OP(op_div) |
ba379fdc A |
204 | DEFINE_OP(op_end) |
205 | DEFINE_OP(op_enter) | |
14957cd0 | 206 | DEFINE_OP(op_create_activation) |
ba379fdc A |
207 | DEFINE_OP(op_eq) |
208 | DEFINE_OP(op_eq_null) | |
93a37866 A |
209 | case op_get_by_id_out_of_line: |
210 | case op_get_array_length: | |
ba379fdc | 211 | DEFINE_OP(op_get_by_id) |
14957cd0 | 212 | DEFINE_OP(op_get_arguments_length) |
ba379fdc | 213 | DEFINE_OP(op_get_by_val) |
14957cd0 | 214 | DEFINE_OP(op_get_argument_by_val) |
f9bf01c6 | 215 | DEFINE_OP(op_get_by_pname) |
f9bf01c6 | 216 | DEFINE_OP(op_get_pnames) |
14957cd0 | 217 | DEFINE_OP(op_check_has_instance) |
ba379fdc | 218 | DEFINE_OP(op_instanceof) |
6fe7ccc8 A |
219 | DEFINE_OP(op_is_undefined) |
220 | DEFINE_OP(op_is_boolean) | |
221 | DEFINE_OP(op_is_number) | |
222 | DEFINE_OP(op_is_string) | |
ba379fdc A |
223 | DEFINE_OP(op_jeq_null) |
224 | DEFINE_OP(op_jfalse) | |
225 | DEFINE_OP(op_jmp) | |
ba379fdc A |
226 | DEFINE_OP(op_jneq_null) |
227 | DEFINE_OP(op_jneq_ptr) | |
f9bf01c6 | 228 | DEFINE_OP(op_jless) |
4e4e5a6f | 229 | DEFINE_OP(op_jlesseq) |
6fe7ccc8 A |
230 | DEFINE_OP(op_jgreater) |
231 | DEFINE_OP(op_jgreatereq) | |
232 | DEFINE_OP(op_jnless) | |
ba379fdc | 233 | DEFINE_OP(op_jnlesseq) |
6fe7ccc8 A |
234 | DEFINE_OP(op_jngreater) |
235 | DEFINE_OP(op_jngreatereq) | |
ba379fdc | 236 | DEFINE_OP(op_jtrue) |
6fe7ccc8 | 237 | DEFINE_OP(op_loop_hint) |
ba379fdc | 238 | DEFINE_OP(op_lshift) |
ba379fdc | 239 | DEFINE_OP(op_mod) |
81345200 | 240 | DEFINE_OP(op_captured_mov) |
ba379fdc A |
241 | DEFINE_OP(op_mov) |
242 | DEFINE_OP(op_mul) | |
ba379fdc | 243 | DEFINE_OP(op_negate) |
ba379fdc A |
244 | DEFINE_OP(op_neq) |
245 | DEFINE_OP(op_neq_null) | |
246 | DEFINE_OP(op_new_array) | |
93a37866 | 247 | DEFINE_OP(op_new_array_with_size) |
14957cd0 | 248 | DEFINE_OP(op_new_array_buffer) |
ba379fdc | 249 | DEFINE_OP(op_new_func) |
81345200 | 250 | DEFINE_OP(op_new_captured_func) |
ba379fdc A |
251 | DEFINE_OP(op_new_func_exp) |
252 | DEFINE_OP(op_new_object) | |
253 | DEFINE_OP(op_new_regexp) | |
254 | DEFINE_OP(op_next_pname) | |
255 | DEFINE_OP(op_not) | |
256 | DEFINE_OP(op_nstricteq) | |
257 | DEFINE_OP(op_pop_scope) | |
93a37866 A |
258 | DEFINE_OP(op_dec) |
259 | DEFINE_OP(op_inc) | |
ba379fdc A |
260 | DEFINE_OP(op_profile_did_call) |
261 | DEFINE_OP(op_profile_will_call) | |
93a37866 A |
262 | DEFINE_OP(op_push_name_scope) |
263 | DEFINE_OP(op_push_with_scope) | |
264 | case op_put_by_id_out_of_line: | |
6fe7ccc8 A |
265 | case op_put_by_id_transition_direct: |
266 | case op_put_by_id_transition_normal: | |
93a37866 A |
267 | case op_put_by_id_transition_direct_out_of_line: |
268 | case op_put_by_id_transition_normal_out_of_line: | |
ba379fdc A |
269 | DEFINE_OP(op_put_by_id) |
270 | DEFINE_OP(op_put_by_index) | |
81345200 | 271 | case op_put_by_val_direct: |
ba379fdc | 272 | DEFINE_OP(op_put_by_val) |
6fe7ccc8 | 273 | DEFINE_OP(op_put_getter_setter) |
93a37866 A |
274 | case op_init_global_const_nop: |
275 | NEXT_OPCODE(op_init_global_const_nop); | |
276 | DEFINE_OP(op_init_global_const) | |
81345200 | 277 | |
ba379fdc | 278 | DEFINE_OP(op_ret) |
14957cd0 | 279 | DEFINE_OP(op_ret_object_or_this) |
ba379fdc | 280 | DEFINE_OP(op_rshift) |
81345200 | 281 | DEFINE_OP(op_unsigned) |
4e4e5a6f | 282 | DEFINE_OP(op_urshift) |
ba379fdc A |
283 | DEFINE_OP(op_strcat) |
284 | DEFINE_OP(op_stricteq) | |
285 | DEFINE_OP(op_sub) | |
286 | DEFINE_OP(op_switch_char) | |
287 | DEFINE_OP(op_switch_imm) | |
288 | DEFINE_OP(op_switch_string) | |
289 | DEFINE_OP(op_tear_off_activation) | |
290 | DEFINE_OP(op_tear_off_arguments) | |
291 | DEFINE_OP(op_throw) | |
93a37866 A |
292 | DEFINE_OP(op_throw_static_error) |
293 | DEFINE_OP(op_to_number) | |
ba379fdc | 294 | DEFINE_OP(op_to_primitive) |
9dae56ea | 295 | |
81345200 A |
296 | DEFINE_OP(op_resolve_scope) |
297 | DEFINE_OP(op_get_from_scope) | |
298 | DEFINE_OP(op_put_to_scope) | |
299 | default: | |
93a37866 | 300 | RELEASE_ASSERT_NOT_REACHED(); |
9dae56ea A |
301 | } |
302 | } | |
303 | ||
81345200 | 304 | RELEASE_ASSERT(m_callLinkInfoIndex == m_callCompilationInfo.size()); |
9dae56ea A |
305 | |
306 | #ifndef NDEBUG | |
ba379fdc | 307 | // Reset this, in order to guard its use with ASSERTs. |
14957cd0 | 308 | m_bytecodeOffset = (unsigned)-1; |
9dae56ea A |
309 | #endif |
310 | } | |
311 | ||
9dae56ea A |
312 | void JIT::privateCompileLinkPass() |
313 | { | |
314 | unsigned jmpTableCount = m_jmpTable.size(); | |
315 | for (unsigned i = 0; i < jmpTableCount; ++i) | |
14957cd0 | 316 | m_jmpTable[i].from.linkTo(m_labels[m_jmpTable[i].toBytecodeOffset], this); |
9dae56ea A |
317 | m_jmpTable.clear(); |
318 | } | |
319 | ||
320 | void JIT::privateCompileSlowCases() | |
321 | { | |
322 | Instruction* instructionsBegin = m_codeBlock->instructions().begin(); | |
ba379fdc | 323 | |
81345200 A |
324 | m_getByIdIndex = 0; |
325 | m_putByIdIndex = 0; | |
93a37866 | 326 | m_byValInstructionIndex = 0; |
ba379fdc | 327 | m_callLinkInfoIndex = 0; |
6fe7ccc8 | 328 | |
6fe7ccc8 A |
329 | // Use this to assert that slow-path code associates new profiling sites with existing |
330 | // ValueProfiles rather than creating new ones. This ensures that for a given instruction | |
331 | // (say, get_by_id) we get combined statistics for both the fast-path executions of that | |
332 | // instructions and the slow-path executions. Furthermore, if the slow-path code created | |
333 | // new ValueProfiles then the ValueProfiles would no longer be sorted by bytecode offset, | |
334 | // which would break the invariant necessary to use CodeBlock::valueProfileForBytecodeOffset(). | |
335 | unsigned numberOfValueProfiles = m_codeBlock->numberOfValueProfiles(); | |
9dae56ea A |
336 | |
337 | for (Vector<SlowCaseEntry>::iterator iter = m_slowCases.begin(); iter != m_slowCases.end();) { | |
14957cd0 | 338 | m_bytecodeOffset = iter->to; |
93a37866 | 339 | |
14957cd0 | 340 | unsigned firstTo = m_bytecodeOffset; |
93a37866 | 341 | |
14957cd0 | 342 | Instruction* currentInstruction = instructionsBegin + m_bytecodeOffset; |
6fe7ccc8 | 343 | |
6fe7ccc8 | 344 | RareCaseProfile* rareCaseProfile = 0; |
93a37866 | 345 | if (shouldEmitProfiling()) |
6fe7ccc8 | 346 | rareCaseProfile = m_codeBlock->addRareCaseProfile(m_bytecodeOffset); |
6fe7ccc8 A |
347 | |
348 | #if ENABLE(JIT_VERBOSE) | |
93a37866 | 349 | dataLogF("Old JIT emitting slow code for bc#%u at offset 0x%lx.\n", m_bytecodeOffset, (long)debugOffset()); |
6fe7ccc8 | 350 | #endif |
93a37866 A |
351 | |
352 | if (m_disassembler) | |
353 | m_disassembler->setForBytecodeSlowPath(m_bytecodeOffset, label()); | |
9dae56ea | 354 | |
ba379fdc A |
355 | switch (m_interpreter->getOpcodeID(currentInstruction->u.opcode)) { |
356 | DEFINE_SLOWCASE_OP(op_add) | |
357 | DEFINE_SLOWCASE_OP(op_bitand) | |
ba379fdc A |
358 | DEFINE_SLOWCASE_OP(op_bitor) |
359 | DEFINE_SLOWCASE_OP(op_bitxor) | |
360 | DEFINE_SLOWCASE_OP(op_call) | |
361 | DEFINE_SLOWCASE_OP(op_call_eval) | |
362 | DEFINE_SLOWCASE_OP(op_call_varargs) | |
81345200 | 363 | DEFINE_SLOWCASE_OP(op_construct_varargs) |
ba379fdc | 364 | DEFINE_SLOWCASE_OP(op_construct) |
81345200 | 365 | DEFINE_SLOWCASE_OP(op_to_this) |
6fe7ccc8 | 366 | DEFINE_SLOWCASE_OP(op_create_this) |
81345200 | 367 | DEFINE_SLOWCASE_OP(op_captured_mov) |
ba379fdc | 368 | DEFINE_SLOWCASE_OP(op_div) |
ba379fdc | 369 | DEFINE_SLOWCASE_OP(op_eq) |
81345200 | 370 | DEFINE_SLOWCASE_OP(op_get_callee) |
93a37866 A |
371 | case op_get_by_id_out_of_line: |
372 | case op_get_array_length: | |
ba379fdc | 373 | DEFINE_SLOWCASE_OP(op_get_by_id) |
14957cd0 | 374 | DEFINE_SLOWCASE_OP(op_get_arguments_length) |
ba379fdc | 375 | DEFINE_SLOWCASE_OP(op_get_by_val) |
14957cd0 | 376 | DEFINE_SLOWCASE_OP(op_get_argument_by_val) |
f9bf01c6 | 377 | DEFINE_SLOWCASE_OP(op_get_by_pname) |
14957cd0 | 378 | DEFINE_SLOWCASE_OP(op_check_has_instance) |
ba379fdc A |
379 | DEFINE_SLOWCASE_OP(op_instanceof) |
380 | DEFINE_SLOWCASE_OP(op_jfalse) | |
f9bf01c6 | 381 | DEFINE_SLOWCASE_OP(op_jless) |
4e4e5a6f | 382 | DEFINE_SLOWCASE_OP(op_jlesseq) |
6fe7ccc8 A |
383 | DEFINE_SLOWCASE_OP(op_jgreater) |
384 | DEFINE_SLOWCASE_OP(op_jgreatereq) | |
385 | DEFINE_SLOWCASE_OP(op_jnless) | |
ba379fdc | 386 | DEFINE_SLOWCASE_OP(op_jnlesseq) |
6fe7ccc8 A |
387 | DEFINE_SLOWCASE_OP(op_jngreater) |
388 | DEFINE_SLOWCASE_OP(op_jngreatereq) | |
ba379fdc | 389 | DEFINE_SLOWCASE_OP(op_jtrue) |
93a37866 | 390 | DEFINE_SLOWCASE_OP(op_loop_hint) |
ba379fdc | 391 | DEFINE_SLOWCASE_OP(op_lshift) |
ba379fdc A |
392 | DEFINE_SLOWCASE_OP(op_mod) |
393 | DEFINE_SLOWCASE_OP(op_mul) | |
ba379fdc | 394 | DEFINE_SLOWCASE_OP(op_negate) |
ba379fdc | 395 | DEFINE_SLOWCASE_OP(op_neq) |
6fe7ccc8 | 396 | DEFINE_SLOWCASE_OP(op_new_object) |
ba379fdc A |
397 | DEFINE_SLOWCASE_OP(op_not) |
398 | DEFINE_SLOWCASE_OP(op_nstricteq) | |
93a37866 A |
399 | DEFINE_SLOWCASE_OP(op_dec) |
400 | DEFINE_SLOWCASE_OP(op_inc) | |
401 | case op_put_by_id_out_of_line: | |
6fe7ccc8 A |
402 | case op_put_by_id_transition_direct: |
403 | case op_put_by_id_transition_normal: | |
93a37866 A |
404 | case op_put_by_id_transition_direct_out_of_line: |
405 | case op_put_by_id_transition_normal_out_of_line: | |
ba379fdc | 406 | DEFINE_SLOWCASE_OP(op_put_by_id) |
81345200 | 407 | case op_put_by_val_direct: |
ba379fdc | 408 | DEFINE_SLOWCASE_OP(op_put_by_val) |
ba379fdc | 409 | DEFINE_SLOWCASE_OP(op_rshift) |
81345200 | 410 | DEFINE_SLOWCASE_OP(op_unsigned) |
4e4e5a6f | 411 | DEFINE_SLOWCASE_OP(op_urshift) |
ba379fdc A |
412 | DEFINE_SLOWCASE_OP(op_stricteq) |
413 | DEFINE_SLOWCASE_OP(op_sub) | |
93a37866 | 414 | DEFINE_SLOWCASE_OP(op_to_number) |
ba379fdc | 415 | DEFINE_SLOWCASE_OP(op_to_primitive) |
93a37866 | 416 | |
81345200 A |
417 | DEFINE_SLOWCASE_OP(op_resolve_scope) |
418 | DEFINE_SLOWCASE_OP(op_get_from_scope) | |
419 | DEFINE_SLOWCASE_OP(op_put_to_scope) | |
93a37866 | 420 | |
9dae56ea | 421 | default: |
93a37866 | 422 | RELEASE_ASSERT_NOT_REACHED(); |
9dae56ea A |
423 | } |
424 | ||
93a37866 A |
425 | RELEASE_ASSERT_WITH_MESSAGE(iter == m_slowCases.end() || firstTo != iter->to, "Not enough jumps linked in slow case codegen."); |
426 | RELEASE_ASSERT_WITH_MESSAGE(firstTo == (iter - 1)->to, "Too many jumps linked in slow case codegen."); | |
6fe7ccc8 | 427 | |
93a37866 | 428 | if (shouldEmitProfiling()) |
6fe7ccc8 | 429 | add32(TrustedImm32(1), AbsoluteAddress(&rareCaseProfile->m_counter)); |
9dae56ea A |
430 | |
431 | emitJumpSlowToHot(jump(), 0); | |
432 | } | |
433 | ||
81345200 A |
434 | RELEASE_ASSERT(m_getByIdIndex == m_getByIds.size()); |
435 | RELEASE_ASSERT(m_putByIdIndex == m_putByIds.size()); | |
436 | RELEASE_ASSERT(m_callLinkInfoIndex == m_callCompilationInfo.size()); | |
93a37866 | 437 | RELEASE_ASSERT(numberOfValueProfiles == m_codeBlock->numberOfValueProfiles()); |
9dae56ea A |
438 | |
439 | #ifndef NDEBUG | |
ba379fdc | 440 | // Reset this, in order to guard its use with ASSERTs. |
14957cd0 | 441 | m_bytecodeOffset = (unsigned)-1; |
9dae56ea A |
442 | #endif |
443 | } | |
444 | ||
81345200 | 445 | CompilationResult JIT::privateCompile(JITCompilationEffort effort) |
6fe7ccc8 | 446 | { |
81345200 | 447 | DFG::CapabilityLevel level = m_codeBlock->capabilityLevel(); |
93a37866 A |
448 | switch (level) { |
449 | case DFG::CannotCompile: | |
450 | m_canBeOptimized = false; | |
81345200 | 451 | m_canBeOptimizedOrInlined = false; |
93a37866 A |
452 | m_shouldEmitProfiling = false; |
453 | break; | |
81345200 | 454 | case DFG::CanInline: |
93a37866 A |
455 | m_canBeOptimized = false; |
456 | m_canBeOptimizedOrInlined = true; | |
457 | m_shouldEmitProfiling = true; | |
458 | break; | |
459 | case DFG::CanCompile: | |
81345200 | 460 | case DFG::CanCompileAndInline: |
93a37866 A |
461 | m_canBeOptimized = true; |
462 | m_canBeOptimizedOrInlined = true; | |
463 | m_shouldEmitProfiling = true; | |
464 | break; | |
465 | default: | |
466 | RELEASE_ASSERT_NOT_REACHED(); | |
467 | break; | |
468 | } | |
81345200 A |
469 | |
470 | switch (m_codeBlock->codeType()) { | |
471 | case GlobalCode: | |
472 | case EvalCode: | |
473 | m_codeBlock->m_shouldAlwaysBeInlined = false; | |
474 | break; | |
475 | case FunctionCode: | |
476 | // We could have already set it to false because we detected an uninlineable call. | |
477 | // Don't override that observation. | |
478 | m_codeBlock->m_shouldAlwaysBeInlined &= canInline(level) && DFG::mightInlineFunction(m_codeBlock); | |
479 | break; | |
480 | } | |
93a37866 A |
481 | |
482 | if (Options::showDisassembly() || m_vm->m_perBytecodeProfiler) | |
483 | m_disassembler = adoptPtr(new JITDisassembler(m_codeBlock)); | |
484 | if (m_vm->m_perBytecodeProfiler) { | |
81345200 A |
485 | m_compilation = adoptRef( |
486 | new Profiler::Compilation( | |
487 | m_vm->m_perBytecodeProfiler->ensureBytecodesFor(m_codeBlock), | |
488 | Profiler::Baseline)); | |
93a37866 A |
489 | m_compilation->addProfiledBytecodes(*m_vm->m_perBytecodeProfiler, m_codeBlock); |
490 | } | |
491 | ||
492 | if (m_disassembler) | |
493 | m_disassembler->setStartOfCode(label()); | |
6fe7ccc8 | 494 | |
14957cd0 A |
495 | // Just add a little bit of randomness to the codegen |
496 | if (m_randomGenerator.getUint32() & 1) | |
497 | nop(); | |
9dae56ea | 498 | |
81345200 | 499 | emitFunctionPrologue(); |
93a37866 | 500 | emitPutImmediateToCallFrameHeader(m_codeBlock, JSStack::CodeBlock); |
9dae56ea | 501 | |
14957cd0 A |
502 | Label beginLabel(this); |
503 | ||
504 | sampleCodeBlock(m_codeBlock); | |
505 | #if ENABLE(OPCODE_SAMPLING) | |
506 | sampleInstruction(m_codeBlock->instructions().begin()); | |
507 | #endif | |
508 | ||
81345200 | 509 | Jump stackOverflow; |
9dae56ea | 510 | if (m_codeBlock->codeType() == FunctionCode) { |
6fe7ccc8 A |
511 | ASSERT(m_bytecodeOffset == (unsigned)-1); |
512 | if (shouldEmitProfiling()) { | |
513 | for (int argument = 0; argument < m_codeBlock->numParameters(); ++argument) { | |
514 | // If this is a constructor, then we want to put in a dummy profiling site (to | |
515 | // keep things consistent) but we don't actually want to record the dummy value. | |
516 | if (m_codeBlock->m_isConstructor && !argument) | |
517 | continue; | |
518 | int offset = CallFrame::argumentOffsetIncludingThis(argument) * static_cast<int>(sizeof(Register)); | |
519 | #if USE(JSVALUE64) | |
93a37866 | 520 | load64(Address(callFrameRegister, offset), regT0); |
6fe7ccc8 A |
521 | #elif USE(JSVALUE32_64) |
522 | load32(Address(callFrameRegister, offset + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); | |
523 | load32(Address(callFrameRegister, offset + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); | |
524 | #endif | |
525 | emitValueProfilingSite(m_codeBlock->valueProfileForArgument(argument)); | |
526 | } | |
527 | } | |
9dae56ea | 528 | |
81345200 A |
529 | addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, regT1); |
530 | stackOverflow = branchPtr(Above, AbsoluteAddress(m_vm->addressOfStackLimit()), regT1); | |
9dae56ea A |
531 | } |
532 | ||
81345200 A |
533 | addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister); |
534 | checkStackPointerAlignment(); | |
535 | ||
9dae56ea A |
536 | privateCompileMainPass(); |
537 | privateCompileLinkPass(); | |
538 | privateCompileSlowCases(); | |
93a37866 A |
539 | |
540 | if (m_disassembler) | |
541 | m_disassembler->setEndOfSlowPath(label()); | |
9dae56ea | 542 | |
14957cd0 | 543 | Label arityCheck; |
9dae56ea | 544 | if (m_codeBlock->codeType() == FunctionCode) { |
81345200 | 545 | stackOverflow.link(this); |
14957cd0 | 546 | m_bytecodeOffset = 0; |
81345200 A |
547 | if (maxFrameExtentForSlowPathCall) |
548 | addPtr(TrustedImm32(-maxFrameExtentForSlowPathCall), stackPointerRegister); | |
549 | callOperationWithCallFrameRollbackOnException(operationThrowStackOverflowError, m_codeBlock); | |
14957cd0 A |
550 | |
551 | arityCheck = label(); | |
81345200 A |
552 | store8(TrustedImm32(0), &m_codeBlock->m_shouldAlwaysBeInlined); |
553 | emitFunctionPrologue(); | |
93a37866 | 554 | emitPutImmediateToCallFrameHeader(m_codeBlock, JSStack::CodeBlock); |
14957cd0 | 555 | |
93a37866 | 556 | load32(payloadFor(JSStack::ArgumentCount), regT1); |
6fe7ccc8 A |
557 | branch32(AboveOrEqual, regT1, TrustedImm32(m_codeBlock->m_numParameters)).linkTo(beginLabel, this); |
558 | ||
559 | m_bytecodeOffset = 0; | |
81345200 A |
560 | |
561 | if (maxFrameExtentForSlowPathCall) | |
562 | addPtr(TrustedImm32(-maxFrameExtentForSlowPathCall), stackPointerRegister); | |
563 | callOperationWithCallFrameRollbackOnException(m_codeBlock->m_isConstructor ? operationConstructArityCheck : operationCallArityCheck); | |
564 | if (maxFrameExtentForSlowPathCall) | |
565 | addPtr(TrustedImm32(maxFrameExtentForSlowPathCall), stackPointerRegister); | |
566 | if (returnValueGPR != regT0) | |
567 | move(returnValueGPR, regT0); | |
568 | branchTest32(Zero, regT0).linkTo(beginLabel, this); | |
569 | GPRReg thunkReg; | |
570 | #if USE(JSVALUE64) | |
571 | thunkReg = GPRInfo::regT7; | |
572 | #else | |
573 | thunkReg = GPRInfo::regT5; | |
574 | #endif | |
575 | move(TrustedImmPtr(m_vm->arityCheckFailReturnThunks->returnPCsFor(*m_vm, m_codeBlock->numParameters())), thunkReg); | |
576 | loadPtr(BaseIndex(thunkReg, regT0, timesPtr()), thunkReg); | |
577 | emitNakedCall(m_vm->getCTIStub(arityFixup).code()); | |
578 | ||
6fe7ccc8 A |
579 | #if !ASSERT_DISABLED |
580 | m_bytecodeOffset = (unsigned)-1; // Reset this, in order to guard its use with ASSERTs. | |
581 | #endif | |
14957cd0 A |
582 | |
583 | jump(beginLabel); | |
9dae56ea A |
584 | } |
585 | ||
586 | ASSERT(m_jmpTable.isEmpty()); | |
93a37866 | 587 | |
81345200 A |
588 | privateCompileExceptionHandlers(); |
589 | ||
93a37866 A |
590 | if (m_disassembler) |
591 | m_disassembler->setEndOfCode(label()); | |
9dae56ea | 592 | |
81345200 | 593 | LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock, effort); |
6fe7ccc8 | 594 | if (patchBuffer.didFailToAllocate()) |
81345200 | 595 | return CompilationFailed; |
9dae56ea A |
596 | |
597 | // Translate vPC offsets into addresses in JIT generated code, for switch tables. | |
598 | for (unsigned i = 0; i < m_switches.size(); ++i) { | |
599 | SwitchRecord record = m_switches[i]; | |
14957cd0 | 600 | unsigned bytecodeOffset = record.bytecodeOffset; |
9dae56ea A |
601 | |
602 | if (record.type != SwitchRecord::String) { | |
603 | ASSERT(record.type == SwitchRecord::Immediate || record.type == SwitchRecord::Character); | |
604 | ASSERT(record.jumpTable.simpleJumpTable->branchOffsets.size() == record.jumpTable.simpleJumpTable->ctiOffsets.size()); | |
605 | ||
14957cd0 | 606 | record.jumpTable.simpleJumpTable->ctiDefault = patchBuffer.locationOf(m_labels[bytecodeOffset + record.defaultOffset]); |
9dae56ea A |
607 | |
608 | for (unsigned j = 0; j < record.jumpTable.simpleJumpTable->branchOffsets.size(); ++j) { | |
609 | unsigned offset = record.jumpTable.simpleJumpTable->branchOffsets[j]; | |
14957cd0 | 610 | record.jumpTable.simpleJumpTable->ctiOffsets[j] = offset ? patchBuffer.locationOf(m_labels[bytecodeOffset + offset]) : record.jumpTable.simpleJumpTable->ctiDefault; |
9dae56ea A |
611 | } |
612 | } else { | |
613 | ASSERT(record.type == SwitchRecord::String); | |
614 | ||
14957cd0 | 615 | record.jumpTable.stringJumpTable->ctiDefault = patchBuffer.locationOf(m_labels[bytecodeOffset + record.defaultOffset]); |
9dae56ea A |
616 | |
617 | StringJumpTable::StringOffsetTable::iterator end = record.jumpTable.stringJumpTable->offsetTable.end(); | |
618 | for (StringJumpTable::StringOffsetTable::iterator it = record.jumpTable.stringJumpTable->offsetTable.begin(); it != end; ++it) { | |
93a37866 A |
619 | unsigned offset = it->value.branchOffset; |
620 | it->value.ctiOffset = offset ? patchBuffer.locationOf(m_labels[bytecodeOffset + offset]) : record.jumpTable.stringJumpTable->ctiDefault; | |
9dae56ea A |
621 | } |
622 | } | |
623 | } | |
624 | ||
625 | for (size_t i = 0; i < m_codeBlock->numberOfExceptionHandlers(); ++i) { | |
626 | HandlerInfo& handler = m_codeBlock->exceptionHandler(i); | |
ba379fdc | 627 | handler.nativeCode = patchBuffer.locationOf(m_labels[handler.target]); |
9dae56ea A |
628 | } |
629 | ||
630 | for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) { | |
631 | if (iter->to) | |
ba379fdc | 632 | patchBuffer.link(iter->from, FunctionPtr(iter->to)); |
9dae56ea A |
633 | } |
634 | ||
81345200 A |
635 | for (unsigned i = m_getByIds.size(); i--;) |
636 | m_getByIds[i].finalize(patchBuffer); | |
637 | for (unsigned i = m_putByIds.size(); i--;) | |
638 | m_putByIds[i].finalize(patchBuffer); | |
9dae56ea | 639 | |
93a37866 A |
640 | m_codeBlock->setNumberOfByValInfos(m_byValCompilationInfo.size()); |
641 | for (unsigned i = 0; i < m_byValCompilationInfo.size(); ++i) { | |
642 | CodeLocationJump badTypeJump = CodeLocationJump(patchBuffer.locationOf(m_byValCompilationInfo[i].badTypeJump)); | |
643 | CodeLocationLabel doneTarget = patchBuffer.locationOf(m_byValCompilationInfo[i].doneTarget); | |
644 | CodeLocationLabel slowPathTarget = patchBuffer.locationOf(m_byValCompilationInfo[i].slowPathTarget); | |
645 | CodeLocationCall returnAddress = patchBuffer.locationOf(m_byValCompilationInfo[i].returnAddress); | |
646 | ||
647 | m_codeBlock->byValInfo(i) = ByValInfo( | |
648 | m_byValCompilationInfo[i].bytecodeIndex, | |
649 | badTypeJump, | |
650 | m_byValCompilationInfo[i].arrayMode, | |
651 | differenceBetweenCodePtr(badTypeJump, doneTarget), | |
652 | differenceBetweenCodePtr(returnAddress, slowPathTarget)); | |
653 | } | |
81345200 A |
654 | for (unsigned i = 0; i < m_callCompilationInfo.size(); ++i) { |
655 | CallCompilationInfo& compilationInfo = m_callCompilationInfo[i]; | |
656 | CallLinkInfo& info = *compilationInfo.callLinkInfo; | |
657 | info.callReturnLocation = patchBuffer.locationOfNearCall(compilationInfo.callReturnLocation); | |
658 | info.hotPathBegin = patchBuffer.locationOf(compilationInfo.hotPathBegin); | |
659 | info.hotPathOther = patchBuffer.locationOfNearCall(compilationInfo.hotPathOther); | |
9dae56ea A |
660 | } |
661 | ||
81345200 A |
662 | CompactJITCodeMap::Encoder jitCodeMapEncoder; |
663 | for (unsigned bytecodeOffset = 0; bytecodeOffset < m_labels.size(); ++bytecodeOffset) { | |
664 | if (m_labels[bytecodeOffset].isSet()) | |
665 | jitCodeMapEncoder.append(bytecodeOffset, patchBuffer.offsetOf(m_labels[bytecodeOffset])); | |
6fe7ccc8 | 666 | } |
81345200 | 667 | m_codeBlock->setJITCodeMap(jitCodeMapEncoder.finish()); |
6fe7ccc8 | 668 | |
81345200 A |
669 | MacroAssemblerCodePtr withArityCheck; |
670 | if (m_codeBlock->codeType() == FunctionCode) | |
671 | withArityCheck = patchBuffer.locationOf(arityCheck); | |
93a37866 A |
672 | |
673 | if (Options::showDisassembly()) | |
674 | m_disassembler->dump(patchBuffer); | |
81345200 | 675 | if (m_compilation) { |
93a37866 | 676 | m_disassembler->reportToProfiler(m_compilation.get(), patchBuffer); |
81345200 A |
677 | m_vm->m_perBytecodeProfiler->addCompilation(m_compilation); |
678 | } | |
6fe7ccc8 | 679 | |
93a37866 | 680 | CodeRef result = patchBuffer.finalizeCodeWithoutDisassembly(); |
6fe7ccc8 | 681 | |
93a37866 | 682 | m_vm->machineCodeBytesPerBytecodeWordForBaselineJIT.add( |
6fe7ccc8 A |
683 | static_cast<double>(result.size()) / |
684 | static_cast<double>(m_codeBlock->instructions().size())); | |
685 | ||
93a37866 | 686 | m_codeBlock->shrinkToFit(CodeBlock::LateShrink); |
81345200 A |
687 | m_codeBlock->setJITCode( |
688 | adoptRef(new DirectJITCode(result, withArityCheck, JITCode::BaselineJIT))); | |
93a37866 | 689 | |
6fe7ccc8 | 690 | #if ENABLE(JIT_VERBOSE) |
93a37866 | 691 | dataLogF("JIT generated code for %p at [%p, %p).\n", m_codeBlock, result.executableMemory()->start(), result.executableMemory()->end()); |
6fe7ccc8 A |
692 | #endif |
693 | ||
81345200 | 694 | return CompilationSuccessful; |
9dae56ea A |
695 | } |
696 | ||
81345200 | 697 | void JIT::privateCompileExceptionHandlers() |
ba379fdc | 698 | { |
81345200 | 699 | if (m_exceptionChecks.empty() && m_exceptionChecksWithCallFrameRollback.empty()) |
6fe7ccc8 | 700 | return; |
81345200 A |
701 | |
702 | Jump doLookup; | |
703 | ||
704 | if (!m_exceptionChecksWithCallFrameRollback.empty()) { | |
705 | m_exceptionChecksWithCallFrameRollback.link(this); | |
706 | emitGetCallerFrameFromCallFrameHeaderPtr(GPRInfo::argumentGPR1); | |
707 | doLookup = jump(); | |
ba379fdc A |
708 | } |
709 | ||
81345200 A |
710 | if (!m_exceptionChecks.empty()) |
711 | m_exceptionChecks.link(this); | |
712 | ||
713 | // lookupExceptionHandler is passed two arguments, the VM and the exec (the CallFrame*). | |
714 | move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR1); | |
715 | ||
716 | if (doLookup.isSet()) | |
717 | doLookup.link(this); | |
718 | ||
719 | move(TrustedImmPtr(vm()), GPRInfo::argumentGPR0); | |
720 | ||
721 | #if CPU(X86) | |
722 | // FIXME: should use the call abstraction, but this is currently in the SpeculativeJIT layer! | |
723 | poke(GPRInfo::argumentGPR0); | |
724 | poke(GPRInfo::argumentGPR1, 1); | |
725 | #endif | |
726 | m_calls.append(CallRecord(call(), (unsigned)-1, FunctionPtr(lookupExceptionHandler).value())); | |
727 | jumpToExceptionHandler(); | |
93a37866 A |
728 | } |
729 | ||
81345200 | 730 | unsigned JIT::frameRegisterCountFor(CodeBlock* codeBlock) |
93a37866 | 731 | { |
81345200 | 732 | ASSERT(static_cast<unsigned>(codeBlock->m_numCalleeRegisters) == WTF::roundUpToMultipleOf(stackAlignmentRegisters(), static_cast<unsigned>(codeBlock->m_numCalleeRegisters))); |
93a37866 | 733 | |
81345200 A |
734 | return roundLocalRegisterCountForFramePointerOffset(codeBlock->m_numCalleeRegisters + maxFrameExtentForSlowPathCallInRegisters); |
735 | } | |
736 | ||
737 | int JIT::stackPointerOffsetFor(CodeBlock* codeBlock) | |
738 | { | |
739 | return virtualRegisterForLocal(frameRegisterCountFor(codeBlock) - 1).offset(); | |
9dae56ea A |
740 | } |
741 | ||
742 | } // namespace JSC | |
743 | ||
744 | #endif // ENABLE(JIT) |