]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JIT.cpp
26e1f630f924642e1677a694abc34e61b8822393
[apple/javascriptcore.git] / jit / JIT.cpp
1 /*
2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 // This probably does not belong here; adding here for now as a quick Windows build fix.
32 #if ENABLE(ASSEMBLER) && CPU(X86) && !OS(MAC_OS_X)
33 #include "MacroAssembler.h"
34 JSC::MacroAssemblerX86Common::SSE2CheckState JSC::MacroAssemblerX86Common::s_sse2CheckState = NotCheckedSSE2;
35 #endif
36
37 #include "CodeBlock.h"
38 #include "CryptographicallyRandomNumber.h"
39 #include "Interpreter.h"
40 #include "JITInlineMethods.h"
41 #include "JITStubCall.h"
42 #include "JSArray.h"
43 #include "JSFunction.h"
44 #include "LinkBuffer.h"
45 #include "RepatchBuffer.h"
46 #include "ResultType.h"
47 #include "SamplingTool.h"
48 #include "dfg/DFGNode.h" // for DFG_SUCCESS_STATS
49
50 using namespace std;
51
52 namespace JSC {
53
54 void ctiPatchNearCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction)
55 {
56 RepatchBuffer repatchBuffer(codeblock);
57 repatchBuffer.relinkNearCallerToTrampoline(returnAddress, newCalleeFunction);
58 }
59
60 void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction)
61 {
62 RepatchBuffer repatchBuffer(codeblock);
63 repatchBuffer.relinkCallerToTrampoline(returnAddress, newCalleeFunction);
64 }
65
66 void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction)
67 {
68 RepatchBuffer repatchBuffer(codeblock);
69 repatchBuffer.relinkCallerToFunction(returnAddress, newCalleeFunction);
70 }
71
72 JIT::JIT(JSGlobalData* globalData, CodeBlock* codeBlock)
73 : m_interpreter(globalData->interpreter)
74 , m_globalData(globalData)
75 , m_codeBlock(codeBlock)
76 , m_labels(codeBlock ? codeBlock->instructions().size() : 0)
77 , m_propertyAccessCompilationInfo(codeBlock ? codeBlock->numberOfStructureStubInfos() : 0)
78 , m_callStructureStubCompilationInfo(codeBlock ? codeBlock->numberOfCallLinkInfos() : 0)
79 , m_bytecodeOffset((unsigned)-1)
80 #if USE(JSVALUE32_64)
81 , m_jumpTargetIndex(0)
82 , m_mappedBytecodeOffset((unsigned)-1)
83 , m_mappedVirtualRegisterIndex((unsigned)-1)
84 , m_mappedTag((RegisterID)-1)
85 , m_mappedPayload((RegisterID)-1)
86 #else
87 , m_lastResultBytecodeRegister(std::numeric_limits<int>::max())
88 , m_jumpTargetsPosition(0)
89 #endif
90 #if USE(OS_RANDOMNESS)
91 , m_randomGenerator(cryptographicallyRandomNumber())
92 #else
93 , m_randomGenerator(static_cast<unsigned>(randomNumber() * 0xFFFFFFF))
94 #endif
95 {
96 }
97
98 #if USE(JSVALUE32_64)
99 void JIT::emitTimeoutCheck()
100 {
101 Jump skipTimeout = branchSub32(NonZero, TrustedImm32(1), timeoutCheckRegister);
102 JITStubCall stubCall(this, cti_timeout_check);
103 stubCall.addArgument(regT1, regT0); // save last result registers.
104 stubCall.call(timeoutCheckRegister);
105 stubCall.getArgument(0, regT1, regT0); // reload last result registers.
106 skipTimeout.link(this);
107 }
108 #else
109 void JIT::emitTimeoutCheck()
110 {
111 Jump skipTimeout = branchSub32(NonZero, TrustedImm32(1), timeoutCheckRegister);
112 JITStubCall(this, cti_timeout_check).call(timeoutCheckRegister);
113 skipTimeout.link(this);
114
115 killLastResultRegister();
116 }
117 #endif
118
119 #define NEXT_OPCODE(name) \
120 m_bytecodeOffset += OPCODE_LENGTH(name); \
121 break;
122
123 #if USE(JSVALUE32_64)
124 #define DEFINE_BINARY_OP(name) \
125 case name: { \
126 JITStubCall stubCall(this, cti_##name); \
127 stubCall.addArgument(currentInstruction[2].u.operand); \
128 stubCall.addArgument(currentInstruction[3].u.operand); \
129 stubCall.call(currentInstruction[1].u.operand); \
130 NEXT_OPCODE(name); \
131 }
132
133 #define DEFINE_UNARY_OP(name) \
134 case name: { \
135 JITStubCall stubCall(this, cti_##name); \
136 stubCall.addArgument(currentInstruction[2].u.operand); \
137 stubCall.call(currentInstruction[1].u.operand); \
138 NEXT_OPCODE(name); \
139 }
140
141 #else // USE(JSVALUE32_64)
142
143 #define DEFINE_BINARY_OP(name) \
144 case name: { \
145 JITStubCall stubCall(this, cti_##name); \
146 stubCall.addArgument(currentInstruction[2].u.operand, regT2); \
147 stubCall.addArgument(currentInstruction[3].u.operand, regT2); \
148 stubCall.call(currentInstruction[1].u.operand); \
149 NEXT_OPCODE(name); \
150 }
151
152 #define DEFINE_UNARY_OP(name) \
153 case name: { \
154 JITStubCall stubCall(this, cti_##name); \
155 stubCall.addArgument(currentInstruction[2].u.operand, regT2); \
156 stubCall.call(currentInstruction[1].u.operand); \
157 NEXT_OPCODE(name); \
158 }
159 #endif // USE(JSVALUE32_64)
160
161 #define DEFINE_OP(name) \
162 case name: { \
163 emit_##name(currentInstruction); \
164 NEXT_OPCODE(name); \
165 }
166
167 #define DEFINE_SLOWCASE_OP(name) \
168 case name: { \
169 emitSlow_##name(currentInstruction, iter); \
170 NEXT_OPCODE(name); \
171 }
172
173 void JIT::privateCompileMainPass()
174 {
175 Instruction* instructionsBegin = m_codeBlock->instructions().begin();
176 unsigned instructionCount = m_codeBlock->instructions().size();
177
178 m_propertyAccessInstructionIndex = 0;
179 m_globalResolveInfoIndex = 0;
180 m_callLinkInfoIndex = 0;
181
182 for (m_bytecodeOffset = 0; m_bytecodeOffset < instructionCount; ) {
183 Instruction* currentInstruction = instructionsBegin + m_bytecodeOffset;
184 ASSERT_WITH_MESSAGE(m_interpreter->isOpcode(currentInstruction->u.opcode), "privateCompileMainPass gone bad @ %d", m_bytecodeOffset);
185
186 #if ENABLE(OPCODE_SAMPLING)
187 if (m_bytecodeOffset > 0) // Avoid the overhead of sampling op_enter twice.
188 sampleInstruction(currentInstruction);
189 #endif
190
191 #if USE(JSVALUE64)
192 if (atJumpTarget())
193 killLastResultRegister();
194 #endif
195
196 m_labels[m_bytecodeOffset] = label();
197
198 switch (m_interpreter->getOpcodeID(currentInstruction->u.opcode)) {
199 DEFINE_BINARY_OP(op_del_by_val)
200 DEFINE_BINARY_OP(op_in)
201 DEFINE_BINARY_OP(op_less)
202 DEFINE_BINARY_OP(op_lesseq)
203 DEFINE_UNARY_OP(op_is_boolean)
204 DEFINE_UNARY_OP(op_is_function)
205 DEFINE_UNARY_OP(op_is_number)
206 DEFINE_UNARY_OP(op_is_object)
207 DEFINE_UNARY_OP(op_is_string)
208 DEFINE_UNARY_OP(op_is_undefined)
209 #if USE(JSVALUE64)
210 DEFINE_UNARY_OP(op_negate)
211 #endif
212 DEFINE_UNARY_OP(op_typeof)
213
214 DEFINE_OP(op_add)
215 DEFINE_OP(op_bitand)
216 DEFINE_OP(op_bitnot)
217 DEFINE_OP(op_bitor)
218 DEFINE_OP(op_bitxor)
219 DEFINE_OP(op_call)
220 DEFINE_OP(op_call_eval)
221 DEFINE_OP(op_call_varargs)
222 DEFINE_OP(op_catch)
223 DEFINE_OP(op_construct)
224 DEFINE_OP(op_get_callee)
225 DEFINE_OP(op_create_this)
226 DEFINE_OP(op_convert_this)
227 DEFINE_OP(op_convert_this_strict)
228 DEFINE_OP(op_init_lazy_reg)
229 DEFINE_OP(op_create_arguments)
230 DEFINE_OP(op_debug)
231 DEFINE_OP(op_del_by_id)
232 DEFINE_OP(op_div)
233 DEFINE_OP(op_end)
234 DEFINE_OP(op_enter)
235 DEFINE_OP(op_create_activation)
236 DEFINE_OP(op_eq)
237 DEFINE_OP(op_eq_null)
238 DEFINE_OP(op_get_by_id)
239 DEFINE_OP(op_get_arguments_length)
240 DEFINE_OP(op_get_by_val)
241 DEFINE_OP(op_get_argument_by_val)
242 DEFINE_OP(op_get_by_pname)
243 DEFINE_OP(op_get_global_var)
244 DEFINE_OP(op_get_pnames)
245 DEFINE_OP(op_get_scoped_var)
246 DEFINE_OP(op_check_has_instance)
247 DEFINE_OP(op_instanceof)
248 DEFINE_OP(op_jeq_null)
249 DEFINE_OP(op_jfalse)
250 DEFINE_OP(op_jmp)
251 DEFINE_OP(op_jmp_scopes)
252 DEFINE_OP(op_jneq_null)
253 DEFINE_OP(op_jneq_ptr)
254 DEFINE_OP(op_jnless)
255 DEFINE_OP(op_jless)
256 DEFINE_OP(op_jlesseq)
257 DEFINE_OP(op_jnlesseq)
258 DEFINE_OP(op_jsr)
259 DEFINE_OP(op_jtrue)
260 DEFINE_OP(op_load_varargs)
261 DEFINE_OP(op_loop)
262 DEFINE_OP(op_loop_if_less)
263 DEFINE_OP(op_loop_if_lesseq)
264 DEFINE_OP(op_loop_if_true)
265 DEFINE_OP(op_loop_if_false)
266 DEFINE_OP(op_lshift)
267 DEFINE_OP(op_method_check)
268 DEFINE_OP(op_mod)
269 DEFINE_OP(op_mov)
270 DEFINE_OP(op_mul)
271 #if USE(JSVALUE32_64)
272 DEFINE_OP(op_negate)
273 #endif
274 DEFINE_OP(op_neq)
275 DEFINE_OP(op_neq_null)
276 DEFINE_OP(op_new_array)
277 DEFINE_OP(op_new_array_buffer)
278 DEFINE_OP(op_new_func)
279 DEFINE_OP(op_new_func_exp)
280 DEFINE_OP(op_new_object)
281 DEFINE_OP(op_new_regexp)
282 DEFINE_OP(op_next_pname)
283 DEFINE_OP(op_not)
284 DEFINE_OP(op_nstricteq)
285 DEFINE_OP(op_pop_scope)
286 DEFINE_OP(op_post_dec)
287 DEFINE_OP(op_post_inc)
288 DEFINE_OP(op_pre_dec)
289 DEFINE_OP(op_pre_inc)
290 DEFINE_OP(op_profile_did_call)
291 DEFINE_OP(op_profile_will_call)
292 DEFINE_OP(op_push_new_scope)
293 DEFINE_OP(op_push_scope)
294 DEFINE_OP(op_put_by_id)
295 DEFINE_OP(op_put_by_index)
296 DEFINE_OP(op_put_by_val)
297 DEFINE_OP(op_put_getter)
298 DEFINE_OP(op_put_global_var)
299 DEFINE_OP(op_put_scoped_var)
300 DEFINE_OP(op_put_setter)
301 DEFINE_OP(op_resolve)
302 DEFINE_OP(op_resolve_base)
303 DEFINE_OP(op_ensure_property_exists)
304 DEFINE_OP(op_resolve_global)
305 DEFINE_OP(op_resolve_global_dynamic)
306 DEFINE_OP(op_resolve_skip)
307 DEFINE_OP(op_resolve_with_base)
308 DEFINE_OP(op_ret)
309 DEFINE_OP(op_call_put_result)
310 DEFINE_OP(op_ret_object_or_this)
311 DEFINE_OP(op_rshift)
312 DEFINE_OP(op_urshift)
313 DEFINE_OP(op_sret)
314 DEFINE_OP(op_strcat)
315 DEFINE_OP(op_stricteq)
316 DEFINE_OP(op_sub)
317 DEFINE_OP(op_switch_char)
318 DEFINE_OP(op_switch_imm)
319 DEFINE_OP(op_switch_string)
320 DEFINE_OP(op_tear_off_activation)
321 DEFINE_OP(op_tear_off_arguments)
322 DEFINE_OP(op_throw)
323 DEFINE_OP(op_throw_reference_error)
324 DEFINE_OP(op_to_jsnumber)
325 DEFINE_OP(op_to_primitive)
326
327 case op_get_array_length:
328 case op_get_by_id_chain:
329 case op_get_by_id_generic:
330 case op_get_by_id_proto:
331 case op_get_by_id_proto_list:
332 case op_get_by_id_self:
333 case op_get_by_id_self_list:
334 case op_get_by_id_getter_chain:
335 case op_get_by_id_getter_proto:
336 case op_get_by_id_getter_proto_list:
337 case op_get_by_id_getter_self:
338 case op_get_by_id_getter_self_list:
339 case op_get_by_id_custom_chain:
340 case op_get_by_id_custom_proto:
341 case op_get_by_id_custom_proto_list:
342 case op_get_by_id_custom_self:
343 case op_get_by_id_custom_self_list:
344 case op_get_string_length:
345 case op_put_by_id_generic:
346 case op_put_by_id_replace:
347 case op_put_by_id_transition:
348 ASSERT_NOT_REACHED();
349 }
350 }
351
352 ASSERT(m_propertyAccessInstructionIndex == m_codeBlock->numberOfStructureStubInfos());
353 ASSERT(m_callLinkInfoIndex == m_codeBlock->numberOfCallLinkInfos());
354
355 #ifndef NDEBUG
356 // Reset this, in order to guard its use with ASSERTs.
357 m_bytecodeOffset = (unsigned)-1;
358 #endif
359 }
360
361
362 void JIT::privateCompileLinkPass()
363 {
364 unsigned jmpTableCount = m_jmpTable.size();
365 for (unsigned i = 0; i < jmpTableCount; ++i)
366 m_jmpTable[i].from.linkTo(m_labels[m_jmpTable[i].toBytecodeOffset], this);
367 m_jmpTable.clear();
368 }
369
370 void JIT::privateCompileSlowCases()
371 {
372 Instruction* instructionsBegin = m_codeBlock->instructions().begin();
373
374 m_propertyAccessInstructionIndex = 0;
375 m_globalResolveInfoIndex = 0;
376 m_callLinkInfoIndex = 0;
377
378 for (Vector<SlowCaseEntry>::iterator iter = m_slowCases.begin(); iter != m_slowCases.end();) {
379 #if USE(JSVALUE64)
380 killLastResultRegister();
381 #endif
382
383 m_bytecodeOffset = iter->to;
384 #ifndef NDEBUG
385 unsigned firstTo = m_bytecodeOffset;
386 #endif
387 Instruction* currentInstruction = instructionsBegin + m_bytecodeOffset;
388
389 switch (m_interpreter->getOpcodeID(currentInstruction->u.opcode)) {
390 DEFINE_SLOWCASE_OP(op_add)
391 DEFINE_SLOWCASE_OP(op_bitand)
392 DEFINE_SLOWCASE_OP(op_bitnot)
393 DEFINE_SLOWCASE_OP(op_bitor)
394 DEFINE_SLOWCASE_OP(op_bitxor)
395 DEFINE_SLOWCASE_OP(op_call)
396 DEFINE_SLOWCASE_OP(op_call_eval)
397 DEFINE_SLOWCASE_OP(op_call_varargs)
398 DEFINE_SLOWCASE_OP(op_construct)
399 DEFINE_SLOWCASE_OP(op_convert_this)
400 DEFINE_SLOWCASE_OP(op_convert_this_strict)
401 DEFINE_SLOWCASE_OP(op_div)
402 DEFINE_SLOWCASE_OP(op_eq)
403 DEFINE_SLOWCASE_OP(op_get_by_id)
404 DEFINE_SLOWCASE_OP(op_get_arguments_length)
405 DEFINE_SLOWCASE_OP(op_get_by_val)
406 DEFINE_SLOWCASE_OP(op_get_argument_by_val)
407 DEFINE_SLOWCASE_OP(op_get_by_pname)
408 DEFINE_SLOWCASE_OP(op_check_has_instance)
409 DEFINE_SLOWCASE_OP(op_instanceof)
410 DEFINE_SLOWCASE_OP(op_jfalse)
411 DEFINE_SLOWCASE_OP(op_jnless)
412 DEFINE_SLOWCASE_OP(op_jless)
413 DEFINE_SLOWCASE_OP(op_jlesseq)
414 DEFINE_SLOWCASE_OP(op_jnlesseq)
415 DEFINE_SLOWCASE_OP(op_jtrue)
416 DEFINE_SLOWCASE_OP(op_load_varargs)
417 DEFINE_SLOWCASE_OP(op_loop_if_less)
418 DEFINE_SLOWCASE_OP(op_loop_if_lesseq)
419 DEFINE_SLOWCASE_OP(op_loop_if_true)
420 DEFINE_SLOWCASE_OP(op_loop_if_false)
421 DEFINE_SLOWCASE_OP(op_lshift)
422 DEFINE_SLOWCASE_OP(op_method_check)
423 DEFINE_SLOWCASE_OP(op_mod)
424 DEFINE_SLOWCASE_OP(op_mul)
425 #if USE(JSVALUE32_64)
426 DEFINE_SLOWCASE_OP(op_negate)
427 #endif
428 DEFINE_SLOWCASE_OP(op_neq)
429 DEFINE_SLOWCASE_OP(op_not)
430 DEFINE_SLOWCASE_OP(op_nstricteq)
431 DEFINE_SLOWCASE_OP(op_post_dec)
432 DEFINE_SLOWCASE_OP(op_post_inc)
433 DEFINE_SLOWCASE_OP(op_pre_dec)
434 DEFINE_SLOWCASE_OP(op_pre_inc)
435 DEFINE_SLOWCASE_OP(op_put_by_id)
436 DEFINE_SLOWCASE_OP(op_put_by_val)
437 DEFINE_SLOWCASE_OP(op_resolve_global)
438 DEFINE_SLOWCASE_OP(op_resolve_global_dynamic)
439 DEFINE_SLOWCASE_OP(op_rshift)
440 DEFINE_SLOWCASE_OP(op_urshift)
441 DEFINE_SLOWCASE_OP(op_stricteq)
442 DEFINE_SLOWCASE_OP(op_sub)
443 DEFINE_SLOWCASE_OP(op_to_jsnumber)
444 DEFINE_SLOWCASE_OP(op_to_primitive)
445 default:
446 ASSERT_NOT_REACHED();
447 }
448
449 ASSERT_WITH_MESSAGE(iter == m_slowCases.end() || firstTo != iter->to,"Not enough jumps linked in slow case codegen.");
450 ASSERT_WITH_MESSAGE(firstTo == (iter - 1)->to, "Too many jumps linked in slow case codegen.");
451
452 emitJumpSlowToHot(jump(), 0);
453 }
454
455 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
456 ASSERT(m_propertyAccessInstructionIndex == m_codeBlock->numberOfStructureStubInfos());
457 #endif
458 ASSERT(m_callLinkInfoIndex == m_codeBlock->numberOfCallLinkInfos());
459
460 #ifndef NDEBUG
461 // Reset this, in order to guard its use with ASSERTs.
462 m_bytecodeOffset = (unsigned)-1;
463 #endif
464 }
465
466 JITCode JIT::privateCompile(CodePtr* functionEntryArityCheck)
467 {
468 // Just add a little bit of randomness to the codegen
469 if (m_randomGenerator.getUint32() & 1)
470 nop();
471
472 // Could use a pop_m, but would need to offset the following instruction if so.
473 preserveReturnAddressAfterCall(regT2);
474 emitPutToCallFrameHeader(regT2, RegisterFile::ReturnPC);
475
476 Label beginLabel(this);
477
478 sampleCodeBlock(m_codeBlock);
479 #if ENABLE(OPCODE_SAMPLING)
480 sampleInstruction(m_codeBlock->instructions().begin());
481 #endif
482
483 Jump registerFileCheck;
484 if (m_codeBlock->codeType() == FunctionCode) {
485 #if DFG_SUCCESS_STATS
486 static SamplingCounter counter("orignalJIT");
487 emitCount(counter);
488 #endif
489
490 // In the case of a fast linked call, we do not set this up in the caller.
491 emitPutImmediateToCallFrameHeader(m_codeBlock, RegisterFile::CodeBlock);
492
493 addPtr(Imm32(m_codeBlock->m_numCalleeRegisters * sizeof(Register)), callFrameRegister, regT1);
494 registerFileCheck = branchPtr(Below, AbsoluteAddress(m_globalData->interpreter->registerFile().addressOfEnd()), regT1);
495 }
496
497 Label functionBody = label();
498
499 privateCompileMainPass();
500 privateCompileLinkPass();
501 privateCompileSlowCases();
502
503 Label arityCheck;
504 if (m_codeBlock->codeType() == FunctionCode) {
505 registerFileCheck.link(this);
506 m_bytecodeOffset = 0;
507 JITStubCall(this, cti_register_file_check).call();
508 #ifndef NDEBUG
509 m_bytecodeOffset = (unsigned)-1; // Reset this, in order to guard its use with ASSERTs.
510 #endif
511 jump(functionBody);
512
513 arityCheck = label();
514 preserveReturnAddressAfterCall(regT2);
515 emitPutToCallFrameHeader(regT2, RegisterFile::ReturnPC);
516 branch32(Equal, regT1, TrustedImm32(m_codeBlock->m_numParameters)).linkTo(beginLabel, this);
517 restoreArgumentReference();
518
519 JITStubCall(this, m_codeBlock->m_isConstructor ? cti_op_construct_arityCheck : cti_op_call_arityCheck).call(callFrameRegister);
520
521 jump(beginLabel);
522 }
523
524 ASSERT(m_jmpTable.isEmpty());
525
526 LinkBuffer patchBuffer(*m_globalData, this, m_globalData->executableAllocator);
527
528 // Translate vPC offsets into addresses in JIT generated code, for switch tables.
529 for (unsigned i = 0; i < m_switches.size(); ++i) {
530 SwitchRecord record = m_switches[i];
531 unsigned bytecodeOffset = record.bytecodeOffset;
532
533 if (record.type != SwitchRecord::String) {
534 ASSERT(record.type == SwitchRecord::Immediate || record.type == SwitchRecord::Character);
535 ASSERT(record.jumpTable.simpleJumpTable->branchOffsets.size() == record.jumpTable.simpleJumpTable->ctiOffsets.size());
536
537 record.jumpTable.simpleJumpTable->ctiDefault = patchBuffer.locationOf(m_labels[bytecodeOffset + record.defaultOffset]);
538
539 for (unsigned j = 0; j < record.jumpTable.simpleJumpTable->branchOffsets.size(); ++j) {
540 unsigned offset = record.jumpTable.simpleJumpTable->branchOffsets[j];
541 record.jumpTable.simpleJumpTable->ctiOffsets[j] = offset ? patchBuffer.locationOf(m_labels[bytecodeOffset + offset]) : record.jumpTable.simpleJumpTable->ctiDefault;
542 }
543 } else {
544 ASSERT(record.type == SwitchRecord::String);
545
546 record.jumpTable.stringJumpTable->ctiDefault = patchBuffer.locationOf(m_labels[bytecodeOffset + record.defaultOffset]);
547
548 StringJumpTable::StringOffsetTable::iterator end = record.jumpTable.stringJumpTable->offsetTable.end();
549 for (StringJumpTable::StringOffsetTable::iterator it = record.jumpTable.stringJumpTable->offsetTable.begin(); it != end; ++it) {
550 unsigned offset = it->second.branchOffset;
551 it->second.ctiOffset = offset ? patchBuffer.locationOf(m_labels[bytecodeOffset + offset]) : record.jumpTable.stringJumpTable->ctiDefault;
552 }
553 }
554 }
555
556 for (size_t i = 0; i < m_codeBlock->numberOfExceptionHandlers(); ++i) {
557 HandlerInfo& handler = m_codeBlock->exceptionHandler(i);
558 handler.nativeCode = patchBuffer.locationOf(m_labels[handler.target]);
559 }
560
561 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
562 if (iter->to)
563 patchBuffer.link(iter->from, FunctionPtr(iter->to));
564 }
565
566 if (m_codeBlock->needsCallReturnIndices()) {
567 m_codeBlock->callReturnIndexVector().reserveCapacity(m_calls.size());
568 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter)
569 m_codeBlock->callReturnIndexVector().append(CallReturnOffsetToBytecodeOffset(patchBuffer.returnAddressOffset(iter->from), iter->bytecodeOffset));
570 }
571
572 // Link absolute addresses for jsr
573 for (Vector<JSRInfo>::iterator iter = m_jsrSites.begin(); iter != m_jsrSites.end(); ++iter)
574 patchBuffer.patch(iter->storeLocation, patchBuffer.locationOf(iter->target).executableAddress());
575
576 #if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
577 for (unsigned i = 0; i < m_codeBlock->numberOfStructureStubInfos(); ++i) {
578 StructureStubInfo& info = m_codeBlock->structureStubInfo(i);
579 info.callReturnLocation = patchBuffer.locationOf(m_propertyAccessCompilationInfo[i].callReturnLocation);
580 info.hotPathBegin = patchBuffer.locationOf(m_propertyAccessCompilationInfo[i].hotPathBegin);
581 }
582 #endif
583 #if ENABLE(JIT_OPTIMIZE_CALL)
584 for (unsigned i = 0; i < m_codeBlock->numberOfCallLinkInfos(); ++i) {
585 CallLinkInfo& info = m_codeBlock->callLinkInfo(i);
586 info.isCall = m_callStructureStubCompilationInfo[i].isCall;
587 info.callReturnLocation = patchBuffer.locationOfNearCall(m_callStructureStubCompilationInfo[i].callReturnLocation);
588 info.hotPathBegin = patchBuffer.locationOf(m_callStructureStubCompilationInfo[i].hotPathBegin);
589 info.hotPathOther = patchBuffer.locationOfNearCall(m_callStructureStubCompilationInfo[i].hotPathOther);
590 }
591 #endif
592 unsigned methodCallCount = m_methodCallCompilationInfo.size();
593 m_codeBlock->addMethodCallLinkInfos(methodCallCount);
594 for (unsigned i = 0; i < methodCallCount; ++i) {
595 MethodCallLinkInfo& info = m_codeBlock->methodCallLinkInfo(i);
596 info.cachedStructure.setLocation(patchBuffer.locationOf(m_methodCallCompilationInfo[i].structureToCompare));
597 info.callReturnLocation = m_codeBlock->structureStubInfo(m_methodCallCompilationInfo[i].propertyAccessIndex).callReturnLocation;
598 }
599
600 if (m_codeBlock->codeType() == FunctionCode && functionEntryArityCheck)
601 *functionEntryArityCheck = patchBuffer.locationOf(arityCheck);
602
603 return patchBuffer.finalizeCode();
604 }
605
606 #if ENABLE(JIT_OPTIMIZE_CALL)
607
608 void JIT::linkCall(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, JIT::CodePtr code, CallLinkInfo* callLinkInfo, int callerArgCount, JSGlobalData* globalData)
609 {
610 RepatchBuffer repatchBuffer(callerCodeBlock);
611
612 // Currently we only link calls with the exact number of arguments.
613 // If this is a native call calleeCodeBlock is null so the number of parameters is unimportant
614 if (!calleeCodeBlock || (callerArgCount == calleeCodeBlock->m_numParameters)) {
615 ASSERT(!callLinkInfo->isLinked());
616 callLinkInfo->callee.set(*globalData, callLinkInfo->hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
617 repatchBuffer.relink(callLinkInfo->hotPathOther, code);
618 }
619
620 // patch the call so we do not continue to try to link.
621 repatchBuffer.relink(callLinkInfo->callReturnLocation, globalData->jitStubs->ctiVirtualCall());
622 }
623
624 void JIT::linkConstruct(JSFunction* callee, CodeBlock* callerCodeBlock, CodeBlock* calleeCodeBlock, JIT::CodePtr code, CallLinkInfo* callLinkInfo, int callerArgCount, JSGlobalData* globalData)
625 {
626 RepatchBuffer repatchBuffer(callerCodeBlock);
627
628 // Currently we only link calls with the exact number of arguments.
629 // If this is a native call calleeCodeBlock is null so the number of parameters is unimportant
630 if (!calleeCodeBlock || (callerArgCount == calleeCodeBlock->m_numParameters)) {
631 ASSERT(!callLinkInfo->isLinked());
632 callLinkInfo->callee.set(*globalData, callLinkInfo->hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
633 repatchBuffer.relink(callLinkInfo->hotPathOther, code);
634 }
635
636 // patch the call so we do not continue to try to link.
637 repatchBuffer.relink(callLinkInfo->callReturnLocation, globalData->jitStubs->ctiVirtualConstruct());
638 }
639 #endif // ENABLE(JIT_OPTIMIZE_CALL)
640
641 } // namespace JSC
642
643 #endif // ENABLE(JIT)