]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JITStubs.cpp
JavaScriptCore-1218.0.1.tar.gz
[apple/javascriptcore.git] / jit / JITStubs.cpp
1 /*
2 * Copyright (C) 2008, 2009, 2013 Apple Inc. All rights reserved.
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4 * Copyright (C) Research In Motion Limited 2010, 2011. All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 *
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
16 * its contributors may be used to endorse or promote products derived
17 * from this software without specific prior written permission.
18 *
19 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
20 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
23 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 */
30
31 #include "config.h"
32
33 #if ENABLE(JIT)
34 #include "JITStubs.h"
35
36 #include "CommonSlowPaths.h"
37 #include "Arguments.h"
38 #include "ArrayConstructor.h"
39 #include "CallFrame.h"
40 #include "CodeBlock.h"
41 #include "CodeProfiling.h"
42 #include "DFGOSREntry.h"
43 #include "Debugger.h"
44 #include "ExceptionHelpers.h"
45 #include "GetterSetter.h"
46 #include "Heap.h"
47 #include <wtf/InlineASM.h>
48 #include "JIT.h"
49 #include "JITExceptions.h"
50 #include "JSActivation.h"
51 #include "JSArray.h"
52 #include "JSFunction.h"
53 #include "JSGlobalObjectFunctions.h"
54 #include "JSNameScope.h"
55 #include "JSNotAnObject.h"
56 #include "JSPropertyNameIterator.h"
57 #include "JSString.h"
58 #include "JSWithScope.h"
59 #include "LegacyProfiler.h"
60 #include "NameInstance.h"
61 #include "ObjectConstructor.h"
62 #include "ObjectPrototype.h"
63 #include "Operations.h"
64 #include "Parser.h"
65 #include "RegExpObject.h"
66 #include "RegExpPrototype.h"
67 #include "Register.h"
68 #include "RepatchBuffer.h"
69 #include "SamplingTool.h"
70 #include "Strong.h"
71 #include "StructureRareDataInlines.h"
72 #include <wtf/StdLibExtras.h>
73 #include <stdarg.h>
74 #include <stdio.h>
75
76 using namespace std;
77
78 namespace JSC {
79
80 #if USE(JSVALUE32_64)
81
82 #if COMPILER(GCC) && CPU(X86)
83
84 // These ASSERTs remind you that, if you change the layout of JITStackFrame, you
85 // need to change the assembly trampolines below to match.
86 COMPILE_ASSERT(offsetof(struct JITStackFrame, code) % 16 == 0x0, JITStackFrame_maintains_16byte_stack_alignment);
87 COMPILE_ASSERT(offsetof(struct JITStackFrame, savedEBX) == 0x3c, JITStackFrame_stub_argument_space_matches_ctiTrampoline);
88 COMPILE_ASSERT(offsetof(struct JITStackFrame, callFrame) == 0x58, JITStackFrame_callFrame_offset_matches_ctiTrampoline);
89 COMPILE_ASSERT(offsetof(struct JITStackFrame, code) == 0x50, JITStackFrame_code_offset_matches_ctiTrampoline);
90
91 asm (
92 ".text\n"
93 ".globl " SYMBOL_STRING(ctiTrampoline) "\n"
94 HIDE_SYMBOL(ctiTrampoline) "\n"
95 SYMBOL_STRING(ctiTrampoline) ":" "\n"
96 "pushl %ebp" "\n"
97 "movl %esp, %ebp" "\n"
98 "pushl %esi" "\n"
99 "pushl %edi" "\n"
100 "pushl %ebx" "\n"
101 "subl $0x3c, %esp" "\n"
102 "movw $0x02FF, %bx" "\n"
103 "movw %bx, 0(%esp)" "\n"
104 "fldcw 0(%esp)" "\n"
105 "movl 0x58(%esp), %edi" "\n"
106 "call *0x50(%esp)" "\n"
107 "addl $0x3c, %esp" "\n"
108 "popl %ebx" "\n"
109 "popl %edi" "\n"
110 "popl %esi" "\n"
111 "popl %ebp" "\n"
112 "ffree %st(1)" "\n"
113 "ret" "\n"
114 ".globl " SYMBOL_STRING(ctiTrampolineEnd) "\n"
115 HIDE_SYMBOL(ctiTrampolineEnd) "\n"
116 SYMBOL_STRING(ctiTrampolineEnd) ":" "\n"
117 );
118
119 asm (
120 ".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
121 HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
122 SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
123 "movl %esp, %ecx" "\n"
124 "call " LOCAL_REFERENCE(cti_vm_throw) "\n"
125 "int3" "\n"
126 );
127
128 asm (
129 ".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
130 HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
131 SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
132 "addl $0x3c, %esp" "\n"
133 "popl %ebx" "\n"
134 "popl %edi" "\n"
135 "popl %esi" "\n"
136 "popl %ebp" "\n"
137 "ret" "\n"
138 );
139
140 #elif COMPILER(GCC) && CPU(X86_64)
141
142 // These ASSERTs remind you that, if you change the layout of JITStackFrame, you
143 // need to change the assembly trampolines below to match.
144 COMPILE_ASSERT(offsetof(struct JITStackFrame, code) % 32 == 0x0, JITStackFrame_maintains_32byte_stack_alignment);
145 COMPILE_ASSERT(offsetof(struct JITStackFrame, savedRBX) == 0x48, JITStackFrame_stub_argument_space_matches_ctiTrampoline);
146 COMPILE_ASSERT(offsetof(struct JITStackFrame, callFrame) == 0x90, JITStackFrame_callFrame_offset_matches_ctiTrampoline);
147 COMPILE_ASSERT(offsetof(struct JITStackFrame, code) == 0x80, JITStackFrame_code_offset_matches_ctiTrampoline);
148
149 asm (
150 ".globl " SYMBOL_STRING(ctiTrampoline) "\n"
151 HIDE_SYMBOL(ctiTrampoline) "\n"
152 SYMBOL_STRING(ctiTrampoline) ":" "\n"
153 "pushq %rbp" "\n"
154 "movq %rsp, %rbp" "\n"
155 "pushq %r12" "\n"
156 "pushq %r13" "\n"
157 "pushq %r14" "\n"
158 "pushq %r15" "\n"
159 "pushq %rbx" "\n"
160 "subq $0x48, %rsp" "\n"
161 "movq $512, %r12" "\n"
162 "movq $0xFFFF000000000000, %r14" "\n"
163 "movq $0xFFFF000000000002, %r15" "\n"
164 "movq 0x90(%rsp), %r13" "\n"
165 "call *0x80(%rsp)" "\n"
166 "addq $0x48, %rsp" "\n"
167 "popq %rbx" "\n"
168 "popq %r15" "\n"
169 "popq %r14" "\n"
170 "popq %r13" "\n"
171 "popq %r12" "\n"
172 "popq %rbp" "\n"
173 "ret" "\n"
174 ".globl " SYMBOL_STRING(ctiTrampolineEnd) "\n"
175 HIDE_SYMBOL(ctiTrampolineEnd) "\n"
176 SYMBOL_STRING(ctiTrampolineEnd) ":" "\n"
177 );
178
179 asm (
180 ".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
181 HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
182 SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
183 "movq %rsp, %rdi" "\n"
184 "call " LOCAL_REFERENCE(cti_vm_throw) "\n"
185 "int3" "\n"
186 );
187
188 asm (
189 ".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
190 HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
191 SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
192 "addq $0x48, %rsp" "\n"
193 "popq %rbx" "\n"
194 "popq %r15" "\n"
195 "popq %r14" "\n"
196 "popq %r13" "\n"
197 "popq %r12" "\n"
198 "popq %rbp" "\n"
199 "ret" "\n"
200 );
201
202 #elif (COMPILER(GCC) || COMPILER(RVCT)) && CPU(ARM_THUMB2)
203
204 #define THUNK_RETURN_ADDRESS_OFFSET 0x38
205 #define PRESERVED_RETURN_ADDRESS_OFFSET 0x3C
206 #define PRESERVED_R4_OFFSET 0x40
207 #define PRESERVED_R5_OFFSET 0x44
208 #define PRESERVED_R6_OFFSET 0x48
209 #define PRESERVED_R7_OFFSET 0x4C
210 #define PRESERVED_R8_OFFSET 0x50
211 #define PRESERVED_R9_OFFSET 0x54
212 #define PRESERVED_R10_OFFSET 0x58
213 #define PRESERVED_R11_OFFSET 0x5C
214 #define REGISTER_FILE_OFFSET 0x60
215 #define FIRST_STACK_ARGUMENT 0x68
216
217 #elif (COMPILER(GCC) || COMPILER(MSVC) || COMPILER(RVCT)) && CPU(ARM_TRADITIONAL)
218
219 // Also update the MSVC section (defined at DEFINE_STUB_FUNCTION)
220 // when changing one of the following values.
221 #define THUNK_RETURN_ADDRESS_OFFSET 64
222 #define PRESERVEDR4_OFFSET 68
223
224 #elif COMPILER(MSVC) && CPU(X86)
225
226 // These ASSERTs remind you that, if you change the layout of JITStackFrame, you
227 // need to change the assembly trampolines below to match.
228 COMPILE_ASSERT(offsetof(struct JITStackFrame, code) % 16 == 0x0, JITStackFrame_maintains_16byte_stack_alignment);
229 COMPILE_ASSERT(offsetof(struct JITStackFrame, savedEBX) == 0x3c, JITStackFrame_stub_argument_space_matches_ctiTrampoline);
230 COMPILE_ASSERT(offsetof(struct JITStackFrame, callFrame) == 0x58, JITStackFrame_callFrame_offset_matches_ctiTrampoline);
231 COMPILE_ASSERT(offsetof(struct JITStackFrame, code) == 0x50, JITStackFrame_code_offset_matches_ctiTrampoline);
232
233 extern "C" {
234
235 __declspec(naked) EncodedJSValue ctiTrampoline(void* code, JSStack*, CallFrame*, void* /*unused1*/, void* /*unused2*/, VM*)
236 {
237 __asm {
238 push ebp;
239 mov ebp, esp;
240 push esi;
241 push edi;
242 push ebx;
243 sub esp, 0x3c;
244 mov ecx, esp;
245 mov edi, [esp + 0x58];
246 call [esp + 0x50];
247 add esp, 0x3c;
248 pop ebx;
249 pop edi;
250 pop esi;
251 pop ebp;
252 ret;
253 }
254 }
255
256 __declspec(naked) void ctiVMThrowTrampoline()
257 {
258 __asm {
259 mov ecx, esp;
260 call cti_vm_throw;
261 add esp, 0x3c;
262 pop ebx;
263 pop edi;
264 pop esi;
265 pop ebp;
266 ret;
267 }
268 }
269
270 __declspec(naked) void ctiOpThrowNotCaught()
271 {
272 __asm {
273 add esp, 0x3c;
274 pop ebx;
275 pop edi;
276 pop esi;
277 pop ebp;
278 ret;
279 }
280 }
281 }
282
283 #elif CPU(MIPS)
284
285 #define PRESERVED_GP_OFFSET 60
286 #define PRESERVED_S0_OFFSET 64
287 #define PRESERVED_S1_OFFSET 68
288 #define PRESERVED_S2_OFFSET 72
289 #define PRESERVED_S3_OFFSET 76
290 #define PRESERVED_S4_OFFSET 80
291 #define PRESERVED_RETURN_ADDRESS_OFFSET 84
292 #define THUNK_RETURN_ADDRESS_OFFSET 88
293 #define REGISTER_FILE_OFFSET 92
294 #define VM_OFFSET 108
295 #define STACK_LENGTH 112
296 #elif CPU(SH4)
297 #define SYMBOL_STRING(name) #name
298 /* code (r4), JSStack* (r5), CallFrame* (r6), void* unused1 (r7), void* unused2(sp), VM (sp)*/
299
300 asm volatile (
301 ".text\n"
302 ".globl " SYMBOL_STRING(ctiTrampoline) "\n"
303 HIDE_SYMBOL(ctiTrampoline) "\n"
304 SYMBOL_STRING(ctiTrampoline) ":" "\n"
305 "mov.l r7, @-r15" "\n"
306 "mov.l r6, @-r15" "\n"
307 "mov.l r5, @-r15" "\n"
308 "mov.l r14, @-r15" "\n"
309 "sts.l pr, @-r15" "\n"
310 "mov.l r13, @-r15" "\n"
311 "mov.l r11, @-r15" "\n"
312 "mov.l r10, @-r15" "\n"
313 "add #-60, r15" "\n"
314 "mov r6, r14" "\n"
315 "jsr @r4" "\n"
316 "nop" "\n"
317 "add #60, r15" "\n"
318 "mov.l @r15+,r10" "\n"
319 "mov.l @r15+,r11" "\n"
320 "mov.l @r15+,r13" "\n"
321 "lds.l @r15+,pr" "\n"
322 "mov.l @r15+,r14" "\n"
323 "add #12, r15" "\n"
324 "rts" "\n"
325 "nop" "\n"
326 ".globl " SYMBOL_STRING(ctiTrampolineEnd) "\n"
327 HIDE_SYMBOL(ctiTrampolineEnd) "\n"
328 SYMBOL_STRING(ctiTrampolineEnd) ":" "\n"
329 );
330
331 asm volatile (
332 ".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
333 HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
334 SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
335 "mov.l .L2"SYMBOL_STRING(cti_vm_throw)",r0" "\n"
336 "mov r15, r4" "\n"
337 "mov.l @(r0,r12),r11" "\n"
338 "jsr @r11" "\n"
339 "nop" "\n"
340 "add #60, r15" "\n"
341 "mov.l @r15+,r10" "\n"
342 "mov.l @r15+,r11" "\n"
343 "mov.l @r15+,r13" "\n"
344 "lds.l @r15+,pr" "\n"
345 "mov.l @r15+,r14" "\n"
346 "add #12, r15" "\n"
347 "rts" "\n"
348 "nop" "\n"
349 ".align 2" "\n"
350 ".L2"SYMBOL_STRING(cti_vm_throw)":.long " SYMBOL_STRING(cti_vm_throw)"@GOT \n"
351 );
352
353 asm volatile (
354 ".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
355 HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
356 SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
357 "add #60, r15" "\n"
358 "mov.l @r15+,r10" "\n"
359 "mov.l @r15+,r11" "\n"
360 "mov.l @r15+,r13" "\n"
361 "lds.l @r15+,pr" "\n"
362 "mov.l @r15+,r14" "\n"
363 "add #12, r15" "\n"
364 "rts" "\n"
365 "nop" "\n"
366 );
367 #else
368 #error "JIT not supported on this platform."
369 #endif
370
371 #else // USE(JSVALUE32_64)
372
373 #if COMPILER(GCC) && CPU(X86_64) && !OS(WINDOWS)
374
375 // These ASSERTs remind you that, if you change the layout of JITStackFrame, you
376 // need to change the assembly trampolines below to match.
377 COMPILE_ASSERT(offsetof(struct JITStackFrame, callFrame) == 0x58, JITStackFrame_callFrame_offset_matches_ctiTrampoline);
378 COMPILE_ASSERT(offsetof(struct JITStackFrame, code) == 0x48, JITStackFrame_code_offset_matches_ctiTrampoline);
379 COMPILE_ASSERT(offsetof(struct JITStackFrame, savedRBX) == 0x78, JITStackFrame_stub_argument_space_matches_ctiTrampoline);
380
381 asm (
382 ".text\n"
383 ".globl " SYMBOL_STRING(ctiTrampoline) "\n"
384 HIDE_SYMBOL(ctiTrampoline) "\n"
385 SYMBOL_STRING(ctiTrampoline) ":" "\n"
386 "pushq %rbp" "\n"
387 "movq %rsp, %rbp" "\n"
388 "pushq %r12" "\n"
389 "pushq %r13" "\n"
390 "pushq %r14" "\n"
391 "pushq %r15" "\n"
392 "pushq %rbx" "\n"
393 // Form the JIT stubs area
394 "pushq %r9" "\n"
395 "pushq %r8" "\n"
396 "pushq %rcx" "\n"
397 "pushq %rdx" "\n"
398 "pushq %rsi" "\n"
399 "pushq %rdi" "\n"
400 "subq $0x48, %rsp" "\n"
401 "movq $512, %r12" "\n"
402 "movq $0xFFFF000000000000, %r14" "\n"
403 "movq $0xFFFF000000000002, %r15" "\n"
404 "movq %rdx, %r13" "\n"
405 "call *%rdi" "\n"
406 "addq $0x78, %rsp" "\n"
407 "popq %rbx" "\n"
408 "popq %r15" "\n"
409 "popq %r14" "\n"
410 "popq %r13" "\n"
411 "popq %r12" "\n"
412 "popq %rbp" "\n"
413 "ret" "\n"
414 ".globl " SYMBOL_STRING(ctiTrampolineEnd) "\n"
415 HIDE_SYMBOL(ctiTrampolineEnd) "\n"
416 SYMBOL_STRING(ctiTrampolineEnd) ":" "\n"
417 );
418
419 asm (
420 ".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
421 HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
422 SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
423 "movq %rsp, %rdi" "\n"
424 "call " LOCAL_REFERENCE(cti_vm_throw) "\n"
425 "int3" "\n"
426 );
427
428 asm (
429 ".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
430 HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
431 SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
432 "addq $0x78, %rsp" "\n"
433 "popq %rbx" "\n"
434 "popq %r15" "\n"
435 "popq %r14" "\n"
436 "popq %r13" "\n"
437 "popq %r12" "\n"
438 "popq %rbp" "\n"
439 "ret" "\n"
440 );
441
442 #elif COMPILER(GCC) && CPU(X86_64) && OS(WINDOWS)
443
444 // These ASSERTs remind you that, if you change the layout of JITStackFrame, you
445 // need to change the assembly trampolines below to match.
446 COMPILE_ASSERT(offsetof(struct JITStackFrame, code) % 16 == 0x0, JITStackFrame_maintains_16byte_stack_alignment);
447 COMPILE_ASSERT(offsetof(struct JITStackFrame, savedRBX) == 0x58, JITStackFrame_stub_argument_space_matches_ctiTrampoline);
448
449 asm (
450 ".text\n"
451 ".globl " SYMBOL_STRING(ctiTrampoline) "\n"
452 HIDE_SYMBOL(ctiTrampoline) "\n"
453 SYMBOL_STRING(ctiTrampoline) ":" "\n"
454 // Dump register parameters to their home address
455 "movq %r9, 0x20(%rsp)" "\n"
456 "movq %r8, 0x18(%rsp)" "\n"
457 "movq %rdx, 0x10(%rsp)" "\n"
458 "movq %rcx, 0x8(%rsp)" "\n"
459
460 "pushq %rbp" "\n"
461 "movq %rsp, %rbp" "\n"
462 "pushq %r12" "\n"
463 "pushq %r13" "\n"
464 "pushq %r14" "\n"
465 "pushq %r15" "\n"
466 "pushq %rbx" "\n"
467
468 // Decrease rsp to point to the start of our JITStackFrame
469 "subq $0x58, %rsp" "\n"
470 "movq $512, %r12" "\n"
471 "movq $0xFFFF000000000000, %r14" "\n"
472 "movq $0xFFFF000000000002, %r15" "\n"
473 "movq %r8, %r13" "\n"
474 "call *%rcx" "\n"
475 "addq $0x58, %rsp" "\n"
476 "popq %rbx" "\n"
477 "popq %r15" "\n"
478 "popq %r14" "\n"
479 "popq %r13" "\n"
480 "popq %r12" "\n"
481 "popq %rbp" "\n"
482 "ret" "\n"
483 ".globl " SYMBOL_STRING(ctiTrampolineEnd) "\n"
484 HIDE_SYMBOL(ctiTrampolineEnd) "\n"
485 SYMBOL_STRING(ctiTrampolineEnd) ":" "\n"
486 );
487
488 asm (
489 ".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
490 HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
491 SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
492 "movq %rsp, %rcx" "\n"
493 "call " LOCAL_REFERENCE(cti_vm_throw) "\n"
494 "int3" "\n"
495 );
496
497 asm (
498 ".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
499 HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
500 SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
501 "addq $0x58, %rsp" "\n"
502 "popq %rbx" "\n"
503 "popq %r15" "\n"
504 "popq %r14" "\n"
505 "popq %r13" "\n"
506 "popq %r12" "\n"
507 "popq %rbp" "\n"
508 "ret" "\n"
509 );
510
511 #elif COMPILER(GCC) && CPU(ARM64)
512
513 #define THUNK_RETURN_ADDRESS_OFFSET 0x30
514 #define PRESERVED_RETURN_ADDRESS_OFFSET 0x38
515 #define PRESERVED_X19_OFFSET 0x40
516 #define PRESERVED_X20_OFFSET 0x48
517 #define PRESERVED_X21_OFFSET 0x50
518 #define PRESERVED_X22_OFFSET 0x58
519 #define PRESERVED_X23_OFFSET 0x60
520 #define PRESERVED_X24_OFFSET 0x68
521 #define PRESERVED_X25_OFFSET 0x70
522 #define PRESERVED_X26_OFFSET 0x78
523 #define PRESERVED_X27_OFFSET 0x80
524 #define PRESERVED_X28_OFFSET 0x88
525 #define REGISTER_FILE_OFFSET 0x90
526 #define CALLFRAME_OFFSET 0x98
527 #define PROFILER_REFERENCE_OFFSET 0xa0
528 #define VM_OFFSET 0xa8
529 #define SIZEOF_JITSTACKFRAME 0xb0
530
531 asm (
532 ".section __TEXT,__text,regular,pure_instructions" "\n"
533 ".globl " SYMBOL_STRING(ctiTrampoline) "\n"
534 ".align 2" "\n"
535 HIDE_SYMBOL(ctiTrampoline) "\n"
536 SYMBOL_STRING(ctiTrampoline) ":" "\n"
537 "sub sp, sp, #" STRINGIZE_VALUE_OF(SIZEOF_JITSTACKFRAME) "\n"
538 "str lr, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "]" "\n"
539 "str x19, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X19_OFFSET) "]" "\n"
540 "str x20, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X20_OFFSET) "]" "\n"
541 "str x21, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X21_OFFSET) "]" "\n"
542 "str x22, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X22_OFFSET) "]" "\n"
543 "str x23, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X23_OFFSET) "]" "\n"
544 "str x24, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X24_OFFSET) "]" "\n"
545 "str x25, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X25_OFFSET) "]" "\n"
546 "str x26, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X26_OFFSET) "]" "\n"
547 "str x27, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X27_OFFSET) "]" "\n"
548 "str x28, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X28_OFFSET) "]" "\n"
549 "str x1, [sp, #" STRINGIZE_VALUE_OF(REGISTER_FILE_OFFSET) "]" "\n"
550 "str x2, [sp, #" STRINGIZE_VALUE_OF(CALLFRAME_OFFSET) "]" "\n"
551 "str x4, [sp, #" STRINGIZE_VALUE_OF(PROFILER_REFERENCE_OFFSET) "]" "\n"
552 "str x5, [sp, #" STRINGIZE_VALUE_OF(VM_OFFSET) "]" "\n"
553 "mov x25, x2" "\n" // callFrameRegister = ARM64Registers::x25
554 "mov x26, #512" "\n" // timeoutCheckRegister = ARM64Registers::x26
555 "mov x27, #0xFFFF000000000000" "\n" // tagTypeNumberRegister = ARM64Registers::x27
556 "add x28, x27, #2" "\n" // ( #0xFFFF000000000002 ) tagMaskRegister = ARM64Registers::x28
557 "blr x0" "\n"
558 "ldr x28, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X28_OFFSET) "]" "\n"
559 "ldr x27, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X27_OFFSET) "]" "\n"
560 "ldr x26, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X26_OFFSET) "]" "\n"
561 "ldr x25, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X25_OFFSET) "]" "\n"
562 "ldr x24, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X24_OFFSET) "]" "\n"
563 "ldr x23, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X23_OFFSET) "]" "\n"
564 "ldr x22, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X22_OFFSET) "]" "\n"
565 "ldr x21, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X21_OFFSET) "]" "\n"
566 "ldr x20, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X20_OFFSET) "]" "\n"
567 "ldr x19, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X19_OFFSET) "]" "\n"
568 "ldr lr, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "]" "\n"
569 "add sp, sp, #" STRINGIZE_VALUE_OF(SIZEOF_JITSTACKFRAME) "\n"
570 "ret" "\n"
571
572 HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
573 SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
574 "ldr x28, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X28_OFFSET) "]" "\n"
575 "ldr x27, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X27_OFFSET) "]" "\n"
576 "ldr x26, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X26_OFFSET) "]" "\n"
577 "ldr x25, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X25_OFFSET) "]" "\n"
578 "ldr x24, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X24_OFFSET) "]" "\n"
579 "ldr x23, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X23_OFFSET) "]" "\n"
580 "ldr x22, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X22_OFFSET) "]" "\n"
581 "ldr x21, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X21_OFFSET) "]" "\n"
582 "ldr x20, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X20_OFFSET) "]" "\n"
583 "ldr x19, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_X19_OFFSET) "]" "\n"
584 "ldr lr, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "]" "\n"
585 "add sp, sp, #" STRINGIZE_VALUE_OF(SIZEOF_JITSTACKFRAME) "\n"
586 "ret" "\n"
587 ".globl " SYMBOL_STRING(ctiTrampolineEnd) "\n"
588 ".align 2" "\n"
589 HIDE_SYMBOL(ctiTrampolineEnd) "\n"
590 SYMBOL_STRING(ctiTrampolineEnd) ":" "\n"
591 );
592
593 asm (
594 ".section __TEXT,__text,regular,pure_instructions" "\n"
595 ".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
596 ".align 2" "\n"
597 HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
598 SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
599 "mov x0, sp" "\n"
600 "bl " LOCAL_REFERENCE(cti_vm_throw) "\n"
601 "hlt 0xdead" "\n" // Should not be reached!
602 );
603
604 #elif COMPILER(MSVC) && CPU(X86_64)
605
606 // These ASSERTs remind you that, if you change the layout of JITStackFrame, you
607 // need to change the assembly trampolines in JITStubsMSVC64.asm to match.
608 COMPILE_ASSERT(offsetof(struct JITStackFrame, code) % 16 == 0x0, JITStackFrame_maintains_16byte_stack_alignment);
609 COMPILE_ASSERT(offsetof(struct JITStackFrame, savedRBX) == 0x58, JITStackFrame_stub_argument_space_matches_ctiTrampoline);
610
611 #else
612 #error "JIT not supported on this platform."
613 #endif
614
615 #endif // USE(JSVALUE32_64)
616
617 #if CPU(MIPS)
618 asm (
619 ".text" "\n"
620 ".align 2" "\n"
621 ".set noreorder" "\n"
622 ".set nomacro" "\n"
623 ".set nomips16" "\n"
624 ".globl " SYMBOL_STRING(ctiTrampoline) "\n"
625 ".ent " SYMBOL_STRING(ctiTrampoline) "\n"
626 SYMBOL_STRING(ctiTrampoline) ":" "\n"
627 "addiu $29,$29,-" STRINGIZE_VALUE_OF(STACK_LENGTH) "\n"
628 "sw $31," STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "($29)" "\n"
629 "sw $20," STRINGIZE_VALUE_OF(PRESERVED_S4_OFFSET) "($29)" "\n"
630 "sw $19," STRINGIZE_VALUE_OF(PRESERVED_S3_OFFSET) "($29)" "\n"
631 "sw $18," STRINGIZE_VALUE_OF(PRESERVED_S2_OFFSET) "($29)" "\n"
632 "sw $17," STRINGIZE_VALUE_OF(PRESERVED_S1_OFFSET) "($29)" "\n"
633 "sw $16," STRINGIZE_VALUE_OF(PRESERVED_S0_OFFSET) "($29)" "\n"
634 #if WTF_MIPS_PIC
635 "sw $28," STRINGIZE_VALUE_OF(PRESERVED_GP_OFFSET) "($29)" "\n"
636 #endif
637 "move $16,$6 # set callFrameRegister" "\n"
638 "move $25,$4 # move executableAddress to t9" "\n"
639 "sw $5," STRINGIZE_VALUE_OF(REGISTER_FILE_OFFSET) "($29) # store JSStack to current stack" "\n"
640 "lw $9," STRINGIZE_VALUE_OF(STACK_LENGTH + 20) "($29) # load vm from previous stack" "\n"
641 "jalr $25" "\n"
642 "sw $9," STRINGIZE_VALUE_OF(VM_OFFSET) "($29) # store vm to current stack" "\n"
643 "lw $16," STRINGIZE_VALUE_OF(PRESERVED_S0_OFFSET) "($29)" "\n"
644 "lw $17," STRINGIZE_VALUE_OF(PRESERVED_S1_OFFSET) "($29)" "\n"
645 "lw $18," STRINGIZE_VALUE_OF(PRESERVED_S2_OFFSET) "($29)" "\n"
646 "lw $19," STRINGIZE_VALUE_OF(PRESERVED_S3_OFFSET) "($29)" "\n"
647 "lw $20," STRINGIZE_VALUE_OF(PRESERVED_S4_OFFSET) "($29)" "\n"
648 "lw $31," STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "($29)" "\n"
649 "jr $31" "\n"
650 "addiu $29,$29," STRINGIZE_VALUE_OF(STACK_LENGTH) "\n"
651 ".set reorder" "\n"
652 ".set macro" "\n"
653 ".end " SYMBOL_STRING(ctiTrampoline) "\n"
654 ".globl " SYMBOL_STRING(ctiTrampolineEnd) "\n"
655 HIDE_SYMBOL(ctiTrampolineEnd) "\n"
656 SYMBOL_STRING(ctiTrampolineEnd) ":" "\n"
657 );
658
659 asm (
660 ".text" "\n"
661 ".align 2" "\n"
662 ".set noreorder" "\n"
663 ".set nomacro" "\n"
664 ".set nomips16" "\n"
665 ".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
666 ".ent " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
667 SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
668 #if WTF_MIPS_PIC
669 ".set macro" "\n"
670 ".cpload $31" "\n"
671 "la $25," SYMBOL_STRING(cti_vm_throw) "\n"
672 ".set nomacro" "\n"
673 "bal " SYMBOL_STRING(cti_vm_throw) "\n"
674 "move $4,$29" "\n"
675 #else
676 "jal " SYMBOL_STRING(cti_vm_throw) "\n"
677 "move $4,$29" "\n"
678 #endif
679 "lw $16," STRINGIZE_VALUE_OF(PRESERVED_S0_OFFSET) "($29)" "\n"
680 "lw $17," STRINGIZE_VALUE_OF(PRESERVED_S1_OFFSET) "($29)" "\n"
681 "lw $18," STRINGIZE_VALUE_OF(PRESERVED_S2_OFFSET) "($29)" "\n"
682 "lw $19," STRINGIZE_VALUE_OF(PRESERVED_S3_OFFSET) "($29)" "\n"
683 "lw $20," STRINGIZE_VALUE_OF(PRESERVED_S4_OFFSET) "($29)" "\n"
684 "lw $31," STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "($29)" "\n"
685 "jr $31" "\n"
686 "addiu $29,$29," STRINGIZE_VALUE_OF(STACK_LENGTH) "\n"
687 ".set reorder" "\n"
688 ".set macro" "\n"
689 ".end " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
690 );
691
692 asm (
693 ".text" "\n"
694 ".align 2" "\n"
695 ".set noreorder" "\n"
696 ".set nomacro" "\n"
697 ".set nomips16" "\n"
698 ".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
699 ".ent " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
700 SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
701 "lw $16," STRINGIZE_VALUE_OF(PRESERVED_S0_OFFSET) "($29)" "\n"
702 "lw $17," STRINGIZE_VALUE_OF(PRESERVED_S1_OFFSET) "($29)" "\n"
703 "lw $18," STRINGIZE_VALUE_OF(PRESERVED_S2_OFFSET) "($29)" "\n"
704 "lw $19," STRINGIZE_VALUE_OF(PRESERVED_S3_OFFSET) "($29)" "\n"
705 "lw $20," STRINGIZE_VALUE_OF(PRESERVED_S4_OFFSET) "($29)" "\n"
706 "lw $31," STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "($29)" "\n"
707 "jr $31" "\n"
708 "addiu $29,$29," STRINGIZE_VALUE_OF(STACK_LENGTH) "\n"
709 ".set reorder" "\n"
710 ".set macro" "\n"
711 ".end " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
712 );
713 #endif
714
715 #if COMPILER(GCC) && CPU(ARM_THUMB2)
716
717 asm (
718 ".text" "\n"
719 ".align 2" "\n"
720 ".globl " SYMBOL_STRING(ctiTrampoline) "\n"
721 HIDE_SYMBOL(ctiTrampoline) "\n"
722 ".thumb" "\n"
723 ".thumb_func " THUMB_FUNC_PARAM(ctiTrampoline) "\n"
724 SYMBOL_STRING(ctiTrampoline) ":" "\n"
725 "sub sp, sp, #" STRINGIZE_VALUE_OF(FIRST_STACK_ARGUMENT) "\n"
726 "str lr, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "]" "\n"
727 "str r4, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R4_OFFSET) "]" "\n"
728 "str r5, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R5_OFFSET) "]" "\n"
729 "str r6, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R6_OFFSET) "]" "\n"
730 "str r7, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R7_OFFSET) "]" "\n"
731 "str r8, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R8_OFFSET) "]" "\n"
732 "str r9, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R9_OFFSET) "]" "\n"
733 "str r10, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R10_OFFSET) "]" "\n"
734 "str r11, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R11_OFFSET) "]" "\n"
735 "str r1, [sp, #" STRINGIZE_VALUE_OF(REGISTER_FILE_OFFSET) "]" "\n"
736 "mov r5, r2" "\n"
737 "mov r6, #512" "\n"
738 "blx r0" "\n"
739 "ldr r11, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R11_OFFSET) "]" "\n"
740 "ldr r10, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R10_OFFSET) "]" "\n"
741 "ldr r9, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R9_OFFSET) "]" "\n"
742 "ldr r8, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R8_OFFSET) "]" "\n"
743 "ldr r7, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R7_OFFSET) "]" "\n"
744 "ldr r6, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R6_OFFSET) "]" "\n"
745 "ldr r5, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R5_OFFSET) "]" "\n"
746 "ldr r4, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R4_OFFSET) "]" "\n"
747 "ldr lr, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "]" "\n"
748 "add sp, sp, #" STRINGIZE_VALUE_OF(FIRST_STACK_ARGUMENT) "\n"
749 "bx lr" "\n"
750 ".align 2" "\n"
751 ".globl " SYMBOL_STRING(ctiTrampolineEnd) "\n"
752 HIDE_SYMBOL(ctiTrampolineEnd) "\n"
753 ".thumb" "\n"
754 ".thumb_func " THUMB_FUNC_PARAM(ctiTrampolineEnd) "\n"
755 SYMBOL_STRING(ctiTrampolineEnd) ":" "\n"
756 );
757
758 asm (
759 ".text" "\n"
760 ".align 2" "\n"
761 ".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
762 HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
763 ".thumb" "\n"
764 ".thumb_func " THUMB_FUNC_PARAM(ctiVMThrowTrampoline) "\n"
765 SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
766 "mov r0, sp" "\n"
767 "bl " LOCAL_REFERENCE(cti_vm_throw) "\n"
768 "ldr r11, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R11_OFFSET) "]" "\n"
769 "ldr r10, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R10_OFFSET) "]" "\n"
770 "ldr r9, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R9_OFFSET) "]" "\n"
771 "ldr r8, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R8_OFFSET) "]" "\n"
772 "ldr r7, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R7_OFFSET) "]" "\n"
773 "ldr r6, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R6_OFFSET) "]" "\n"
774 "ldr r5, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R5_OFFSET) "]" "\n"
775 "ldr r4, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R4_OFFSET) "]" "\n"
776 "ldr lr, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "]" "\n"
777 "add sp, sp, #" STRINGIZE_VALUE_OF(FIRST_STACK_ARGUMENT) "\n"
778 "bx lr" "\n"
779 );
780
781 asm (
782 ".text" "\n"
783 ".align 2" "\n"
784 ".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
785 HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
786 ".thumb" "\n"
787 ".thumb_func " THUMB_FUNC_PARAM(ctiOpThrowNotCaught) "\n"
788 SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
789 "ldr r11, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R11_OFFSET) "]" "\n"
790 "ldr r10, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R10_OFFSET) "]" "\n"
791 "ldr r9, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R9_OFFSET) "]" "\n"
792 "ldr r8, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R8_OFFSET) "]" "\n"
793 "ldr r7, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R7_OFFSET) "]" "\n"
794 "ldr r6, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R6_OFFSET) "]" "\n"
795 "ldr r5, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R5_OFFSET) "]" "\n"
796 "ldr r4, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R4_OFFSET) "]" "\n"
797 "ldr lr, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "]" "\n"
798 "add sp, sp, #" STRINGIZE_VALUE_OF(FIRST_STACK_ARGUMENT) "\n"
799 "bx lr" "\n"
800 );
801
802 #elif COMPILER(GCC) && CPU(ARM_TRADITIONAL)
803
804 asm (
805 ".text" "\n"
806 ".globl " SYMBOL_STRING(ctiTrampoline) "\n"
807 HIDE_SYMBOL(ctiTrampoline) "\n"
808 INLINE_ARM_FUNCTION(ctiTrampoline)
809 SYMBOL_STRING(ctiTrampoline) ":" "\n"
810 "stmdb sp!, {r1-r3}" "\n"
811 "stmdb sp!, {r4-r6, r8-r11, lr}" "\n"
812 "sub sp, sp, #" STRINGIZE_VALUE_OF(PRESERVEDR4_OFFSET) "\n"
813 "mov r5, r2" "\n"
814 "mov r6, #512" "\n"
815 // r0 contains the code
816 "blx r0" "\n"
817 "add sp, sp, #" STRINGIZE_VALUE_OF(PRESERVEDR4_OFFSET) "\n"
818 "ldmia sp!, {r4-r6, r8-r11, lr}" "\n"
819 "add sp, sp, #12" "\n"
820 "bx lr" "\n"
821 ".globl " SYMBOL_STRING(ctiTrampolineEnd) "\n"
822 HIDE_SYMBOL(ctiTrampolineEnd) "\n"
823 SYMBOL_STRING(ctiTrampolineEnd) ":" "\n"
824 );
825
826 asm (
827 ".text" "\n"
828 ".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
829 HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
830 INLINE_ARM_FUNCTION(ctiVMThrowTrampoline)
831 SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
832 "mov r0, sp" "\n"
833 "bl " SYMBOL_STRING(cti_vm_throw) "\n"
834
835 // Both has the same return sequence
836 ".text" "\n"
837 ".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
838 HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
839 INLINE_ARM_FUNCTION(ctiOpThrowNotCaught)
840 SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
841 "add sp, sp, #" STRINGIZE_VALUE_OF(PRESERVEDR4_OFFSET) "\n"
842 "ldmia sp!, {r4-r6, r8-r11, lr}" "\n"
843 "add sp, sp, #12" "\n"
844 "bx lr" "\n"
845 );
846
847 #elif COMPILER(RVCT) && CPU(ARM_THUMB2)
848
849 __asm EncodedJSValue ctiTrampoline(void*, JSStack*, CallFrame*, void* /*unused1*/, void* /*unused2*/, VM*)
850 {
851 PRESERVE8
852 sub sp, sp, # FIRST_STACK_ARGUMENT
853 str lr, [sp, # PRESERVED_RETURN_ADDRESS_OFFSET ]
854 str r4, [sp, # PRESERVED_R4_OFFSET ]
855 str r5, [sp, # PRESERVED_R5_OFFSET ]
856 str r6, [sp, # PRESERVED_R6_OFFSET ]
857 str r7, [sp, # PRESERVED_R7_OFFSET ]
858 str r8, [sp, # PRESERVED_R8_OFFSET ]
859 str r9, [sp, # PRESERVED_R9_OFFSET ]
860 str r10, [sp, # PRESERVED_R10_OFFSET ]
861 str r11, [sp, # PRESERVED_R11_OFFSET ]
862 str r1, [sp, # REGISTER_FILE_OFFSET ]
863 mov r5, r2
864 mov r6, #512
865 blx r0
866 ldr r11, [sp, # PRESERVED_R11_OFFSET ]
867 ldr r10, [sp, # PRESERVED_R10_OFFSET ]
868 ldr r9, [sp, # PRESERVED_R9_OFFSET ]
869 ldr r8, [sp, # PRESERVED_R8_OFFSET ]
870 ldr r7, [sp, # PRESERVED_R7_OFFSET ]
871 ldr r6, [sp, # PRESERVED_R6_OFFSET ]
872 ldr r5, [sp, # PRESERVED_R5_OFFSET ]
873 ldr r4, [sp, # PRESERVED_R4_OFFSET ]
874 ldr lr, [sp, # PRESERVED_RETURN_ADDRESS_OFFSET ]
875 add sp, sp, # FIRST_STACK_ARGUMENT
876 bx lr
877 }
878
879 __asm void ctiVMThrowTrampoline()
880 {
881 PRESERVE8
882 mov r0, sp
883 bl cti_vm_throw
884 ldr r11, [sp, # PRESERVED_R11_OFFSET ]
885 ldr r10, [sp, # PRESERVED_R10_OFFSET ]
886 ldr r9, [sp, # PRESERVED_R9_OFFSET ]
887 ldr r8, [sp, # PRESERVED_R8_OFFSET ]
888 ldr r7, [sp, # PRESERVED_R7_OFFSET ]
889 ldr r6, [sp, # PRESERVED_R6_OFFSET ]
890 ldr r6, [sp, # PRESERVED_R6_OFFSET ]
891 ldr r5, [sp, # PRESERVED_R5_OFFSET ]
892 ldr r4, [sp, # PRESERVED_R4_OFFSET ]
893 ldr lr, [sp, # PRESERVED_RETURN_ADDRESS_OFFSET ]
894 add sp, sp, # FIRST_STACK_ARGUMENT
895 bx lr
896 }
897
898 __asm void ctiOpThrowNotCaught()
899 {
900 PRESERVE8
901 ldr r11, [sp, # PRESERVED_R11_OFFSET ]
902 ldr r10, [sp, # PRESERVED_R10_OFFSET ]
903 ldr r9, [sp, # PRESERVED_R9_OFFSET ]
904 ldr r8, [sp, # PRESERVED_R8_OFFSET ]
905 ldr r7, [sp, # PRESERVED_R7_OFFSET ]
906 ldr r6, [sp, # PRESERVED_R6_OFFSET ]
907 ldr r6, [sp, # PRESERVED_R6_OFFSET ]
908 ldr r5, [sp, # PRESERVED_R5_OFFSET ]
909 ldr r4, [sp, # PRESERVED_R4_OFFSET ]
910 ldr lr, [sp, # PRESERVED_RETURN_ADDRESS_OFFSET ]
911 add sp, sp, # FIRST_STACK_ARGUMENT
912 bx lr
913 }
914
915 #elif COMPILER(RVCT) && CPU(ARM_TRADITIONAL)
916
917 __asm EncodedJSValue ctiTrampoline(void*, JSStack*, CallFrame*, void* /*unused1*/, void* /*unused2*/, VM*)
918 {
919 ARM
920 stmdb sp!, {r1-r3}
921 stmdb sp!, {r4-r6, r8-r11, lr}
922 sub sp, sp, # PRESERVEDR4_OFFSET
923 mov r5, r2
924 mov r6, #512
925 mov lr, pc
926 bx r0
927 add sp, sp, # PRESERVEDR4_OFFSET
928 ldmia sp!, {r4-r6, r8-r11, lr}
929 add sp, sp, #12
930 bx lr
931 }
932 __asm void ctiTrampolineEnd()
933 {
934 }
935
936 __asm void ctiVMThrowTrampoline()
937 {
938 ARM
939 PRESERVE8
940 mov r0, sp
941 bl cti_vm_throw
942 add sp, sp, # PRESERVEDR4_OFFSET
943 ldmia sp!, {r4-r6, r8-r11, lr}
944 add sp, sp, #12
945 bx lr
946 }
947
948 __asm void ctiOpThrowNotCaught()
949 {
950 ARM
951 add sp, sp, # PRESERVEDR4_OFFSET
952 ldmia sp!, {r4-r8, lr}
953 add sp, sp, #12
954 bx lr
955 }
956 #endif
957
958 #if ENABLE(OPCODE_SAMPLING)
959 #define CTI_SAMPLER stackFrame.vm->interpreter->sampler()
960 #else
961 #define CTI_SAMPLER 0
962 #endif
963
964 void performPlatformSpecificJITAssertions(VM* vm)
965 {
966 if (!vm->canUseJIT())
967 return;
968
969 #if CPU(ARM_THUMB2)
970 // Unfortunate the arm compiler does not like the use of offsetof on JITStackFrame (since it contains non POD types),
971 // and the OBJECT_OFFSETOF macro does not appear constantish enough for it to be happy with its use in COMPILE_ASSERT
972 // macros.
973 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedReturnAddress) == PRESERVED_RETURN_ADDRESS_OFFSET);
974 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR4) == PRESERVED_R4_OFFSET);
975 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR5) == PRESERVED_R5_OFFSET);
976 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR6) == PRESERVED_R6_OFFSET);
977 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR7) == PRESERVED_R7_OFFSET);
978 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR8) == PRESERVED_R8_OFFSET);
979 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR9) == PRESERVED_R9_OFFSET);
980 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR10) == PRESERVED_R10_OFFSET);
981 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR11) == PRESERVED_R11_OFFSET);
982
983 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, stack) == REGISTER_FILE_OFFSET);
984 // The fifth argument is the first item already on the stack.
985 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, unused1) == FIRST_STACK_ARGUMENT);
986
987 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, thunkReturnAddress) == THUNK_RETURN_ADDRESS_OFFSET);
988
989 #elif CPU(ARM64)
990
991 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, thunkReturnAddress) == THUNK_RETURN_ADDRESS_OFFSET);
992 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedReturnAddress) == PRESERVED_RETURN_ADDRESS_OFFSET);
993 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedX19) == PRESERVED_X19_OFFSET);
994 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedX20) == PRESERVED_X20_OFFSET);
995 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedX21) == PRESERVED_X21_OFFSET);
996 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedX22) == PRESERVED_X22_OFFSET);
997 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedX23) == PRESERVED_X23_OFFSET);
998 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedX24) == PRESERVED_X24_OFFSET);
999 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedX25) == PRESERVED_X25_OFFSET);
1000 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedX26) == PRESERVED_X26_OFFSET);
1001 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedX27) == PRESERVED_X27_OFFSET);
1002 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedX28) == PRESERVED_X28_OFFSET);
1003 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, stack) == REGISTER_FILE_OFFSET);
1004 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, callFrame) == CALLFRAME_OFFSET);
1005 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, enabledProfilerReference) == PROFILER_REFERENCE_OFFSET);
1006 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, vm) == VM_OFFSET);
1007 ASSERT(sizeof(struct JITStackFrame) == SIZEOF_JITSTACKFRAME);
1008
1009 #elif CPU(ARM_TRADITIONAL)
1010
1011 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, thunkReturnAddress) == THUNK_RETURN_ADDRESS_OFFSET);
1012 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR4) == PRESERVEDR4_OFFSET);
1013
1014
1015 #elif CPU(MIPS)
1016 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedGP) == PRESERVED_GP_OFFSET);
1017 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedS0) == PRESERVED_S0_OFFSET);
1018 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedS1) == PRESERVED_S1_OFFSET);
1019 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedS2) == PRESERVED_S2_OFFSET);
1020 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedReturnAddress) == PRESERVED_RETURN_ADDRESS_OFFSET);
1021 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, thunkReturnAddress) == THUNK_RETURN_ADDRESS_OFFSET);
1022 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, stack) == REGISTER_FILE_OFFSET);
1023 ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, vm) == VM_OFFSET);
1024
1025 #endif
1026 }
1027
1028 NEVER_INLINE static void tryCachePutByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const PutPropertySlot& slot, StructureStubInfo* stubInfo, bool direct)
1029 {
1030 // The interpreter checks for recursion here; I do not believe this can occur in CTI.
1031
1032 if (!baseValue.isCell())
1033 return;
1034
1035 // Uncacheable: give up.
1036 if (!slot.isCacheable()) {
1037 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
1038 return;
1039 }
1040
1041 JSCell* baseCell = baseValue.asCell();
1042 Structure* structure = baseCell->structure();
1043
1044 if (structure->isUncacheableDictionary() || structure->typeInfo().prohibitsPropertyCaching()) {
1045 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
1046 return;
1047 }
1048
1049 // If baseCell != base, then baseCell must be a proxy for another object.
1050 if (baseCell != slot.base()) {
1051 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
1052 return;
1053 }
1054
1055 // If the offset isn't something we can patch, then bail out.
1056 if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset()))) {
1057 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
1058 return;
1059 }
1060
1061 // Cache hit: Specialize instruction and ref Structures.
1062
1063 // Structure transition, cache transition info
1064 if (slot.type() == PutPropertySlot::NewProperty) {
1065 if (structure->isDictionary()) {
1066 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
1067 return;
1068 }
1069
1070 // put_by_id_transition checks the prototype chain for setters.
1071 if (normalizePrototypeChain(callFrame, baseCell) == InvalidPrototypeChain) {
1072 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
1073 return;
1074 }
1075
1076 StructureChain* prototypeChain = structure->prototypeChain(callFrame);
1077 ASSERT(structure->previousID()->transitionWatchpointSetHasBeenInvalidated());
1078 stubInfo->initPutByIdTransition(callFrame->vm(), codeBlock->ownerExecutable(), structure->previousID(), structure, prototypeChain, direct);
1079 JIT::compilePutByIdTransition(callFrame->scope()->vm(), codeBlock, stubInfo, structure->previousID(), structure, slot.cachedOffset(), prototypeChain, returnAddress, direct);
1080 return;
1081 }
1082
1083 stubInfo->initPutByIdReplace(callFrame->vm(), codeBlock->ownerExecutable(), structure);
1084
1085 JIT::patchPutByIdReplace(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress, direct);
1086 }
1087
1088 NEVER_INLINE static void tryCacheGetByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo* stubInfo)
1089 {
1090 // FIXME: Write a test that proves we need to check for recursion here just
1091 // like the interpreter does, then add a check for recursion.
1092
1093 // FIXME: Cache property access for immediates.
1094 if (!baseValue.isCell()) {
1095 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
1096 return;
1097 }
1098
1099 VM* vm = &callFrame->vm();
1100
1101 if (isJSArray(baseValue) && propertyName == callFrame->propertyNames().length) {
1102 JIT::compilePatchGetArrayLength(callFrame->scope()->vm(), codeBlock, returnAddress);
1103 return;
1104 }
1105
1106 if (isJSString(baseValue) && propertyName == callFrame->propertyNames().length) {
1107 // The tradeoff of compiling an patched inline string length access routine does not seem
1108 // to pay off, so we currently only do this for arrays.
1109 ctiPatchCallByReturnAddress(codeBlock, returnAddress, vm->getCTIStub(stringLengthTrampolineGenerator).code());
1110 return;
1111 }
1112
1113 // Uncacheable: give up.
1114 if (!slot.isCacheable()) {
1115 stubInfo->accessType = access_get_by_id_generic;
1116 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
1117 return;
1118 }
1119
1120 JSCell* baseCell = baseValue.asCell();
1121 Structure* structure = baseCell->structure();
1122
1123 if (structure->isUncacheableDictionary() || structure->typeInfo().prohibitsPropertyCaching()) {
1124 stubInfo->accessType = access_get_by_id_generic;
1125 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
1126 return;
1127 }
1128
1129 // Cache hit: Specialize instruction and ref Structures.
1130
1131 if (slot.slotBase() == baseValue) {
1132 RELEASE_ASSERT(stubInfo->accessType == access_unset);
1133 if ((slot.cachedPropertyType() != PropertySlot::Value)
1134 || !MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset())))
1135 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
1136 else {
1137 JIT::patchGetByIdSelf(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress);
1138 stubInfo->initGetByIdSelf(callFrame->vm(), codeBlock->ownerExecutable(), structure);
1139 }
1140 return;
1141 }
1142
1143 if (structure->isDictionary()) {
1144 stubInfo->accessType = access_get_by_id_generic;
1145 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
1146 return;
1147 }
1148
1149 if (slot.slotBase() == structure->prototypeForLookup(callFrame)) {
1150 ASSERT(slot.slotBase().isObject());
1151
1152 JSObject* slotBaseObject = asObject(slot.slotBase());
1153 size_t offset = slot.cachedOffset();
1154
1155 if (structure->typeInfo().hasImpureGetOwnPropertySlot()) {
1156 stubInfo->accessType = access_get_by_id_generic;
1157 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
1158 return;
1159 }
1160
1161 // Since we're accessing a prototype in a loop, it's a good bet that it
1162 // should not be treated as a dictionary.
1163 if (slotBaseObject->structure()->isDictionary()) {
1164 slotBaseObject->flattenDictionaryObject(callFrame->vm());
1165 offset = slotBaseObject->structure()->get(callFrame->vm(), propertyName);
1166 }
1167
1168 stubInfo->initGetByIdProto(callFrame->vm(), codeBlock->ownerExecutable(), structure, slotBaseObject->structure(), slot.cachedPropertyType() == PropertySlot::Value);
1169
1170 ASSERT(!structure->isDictionary());
1171 ASSERT(!slotBaseObject->structure()->isDictionary());
1172 JIT::compileGetByIdProto(callFrame->scope()->vm(), callFrame, codeBlock, stubInfo, structure, slotBaseObject->structure(), propertyName, slot, offset, returnAddress);
1173 return;
1174 }
1175
1176 PropertyOffset offset = slot.cachedOffset();
1177 size_t count = normalizePrototypeChainForChainAccess(callFrame, baseValue, slot.slotBase(), propertyName, offset);
1178 if (count == InvalidPrototypeChain) {
1179 stubInfo->accessType = access_get_by_id_generic;
1180 ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_generic));
1181 return;
1182 }
1183
1184 StructureChain* prototypeChain = structure->prototypeChain(callFrame);
1185 stubInfo->initGetByIdChain(callFrame->vm(), codeBlock->ownerExecutable(), structure, prototypeChain, count, slot.cachedPropertyType() == PropertySlot::Value);
1186 JIT::compileGetByIdChain(callFrame->scope()->vm(), callFrame, codeBlock, stubInfo, structure, prototypeChain, count, propertyName, slot, offset, returnAddress);
1187 }
1188
1189 #if !defined(NDEBUG)
1190
1191 extern "C" {
1192
1193 static void jscGeneratedNativeCode()
1194 {
1195 // When executing a JIT stub function (which might do an allocation), we hack the return address
1196 // to pretend to be executing this function, to keep stack logging tools from blowing out
1197 // memory.
1198 }
1199
1200 }
1201
1202 struct StackHack {
1203 ALWAYS_INLINE StackHack(JITStackFrame& stackFrame)
1204 : stackFrame(stackFrame)
1205 , savedReturnAddress(*stackFrame.returnAddressSlot())
1206 {
1207 if (!CodeProfiling::enabled())
1208 *stackFrame.returnAddressSlot() = ReturnAddressPtr(FunctionPtr(jscGeneratedNativeCode));
1209 }
1210
1211 ALWAYS_INLINE ~StackHack()
1212 {
1213 *stackFrame.returnAddressSlot() = savedReturnAddress;
1214 }
1215
1216 JITStackFrame& stackFrame;
1217 ReturnAddressPtr savedReturnAddress;
1218 };
1219
1220 #define STUB_INIT_STACK_FRAME(stackFrame) JITStackFrame& stackFrame = *reinterpret_cast_ptr<JITStackFrame*>(STUB_ARGS); StackHack stackHack(stackFrame)
1221 #define STUB_SET_RETURN_ADDRESS(returnAddress) stackHack.savedReturnAddress = ReturnAddressPtr(returnAddress)
1222 #define STUB_RETURN_ADDRESS stackHack.savedReturnAddress
1223
1224 #else
1225
1226 #define STUB_INIT_STACK_FRAME(stackFrame) JITStackFrame& stackFrame = *reinterpret_cast_ptr<JITStackFrame*>(STUB_ARGS)
1227 #define STUB_SET_RETURN_ADDRESS(returnAddress) *stackFrame.returnAddressSlot() = ReturnAddressPtr(returnAddress)
1228 #define STUB_RETURN_ADDRESS *stackFrame.returnAddressSlot()
1229
1230 #endif
1231
1232 // The reason this is not inlined is to avoid having to do a PIC branch
1233 // to get the address of the ctiVMThrowTrampoline function. It's also
1234 // good to keep the code size down by leaving as much of the exception
1235 // handling code out of line as possible.
1236 static NEVER_INLINE void returnToThrowTrampoline(VM* vm, ReturnAddressPtr exceptionLocation, ReturnAddressPtr& returnAddressSlot)
1237 {
1238 RELEASE_ASSERT(vm->exception);
1239 vm->exceptionLocation = exceptionLocation;
1240 returnAddressSlot = ReturnAddressPtr(FunctionPtr(ctiVMThrowTrampoline));
1241 }
1242
1243 #define VM_THROW_EXCEPTION() \
1244 do { \
1245 VM_THROW_EXCEPTION_AT_END(); \
1246 return 0; \
1247 } while (0)
1248 #define VM_THROW_EXCEPTION_AT_END() \
1249 do {\
1250 returnToThrowTrampoline(stackFrame.vm, STUB_RETURN_ADDRESS, STUB_RETURN_ADDRESS);\
1251 } while (0)
1252
1253 #define CHECK_FOR_EXCEPTION() \
1254 do { \
1255 if (UNLIKELY(stackFrame.vm->exception)) \
1256 VM_THROW_EXCEPTION(); \
1257 } while (0)
1258 #define CHECK_FOR_EXCEPTION_AT_END() \
1259 do { \
1260 if (UNLIKELY(stackFrame.vm->exception)) \
1261 VM_THROW_EXCEPTION_AT_END(); \
1262 } while (0)
1263 #define CHECK_FOR_EXCEPTION_VOID() \
1264 do { \
1265 if (UNLIKELY(stackFrame.vm->exception)) { \
1266 VM_THROW_EXCEPTION_AT_END(); \
1267 return; \
1268 } \
1269 } while (0)
1270
1271 // Helper function for JIT stubs that may throw an exception in the middle of
1272 // processing a function call. This function rolls back the stack to
1273 // our caller, so exception processing can proceed from a valid state.
1274 template<typename T> static T throwExceptionFromOpCall(JITStackFrame& jitStackFrame, CallFrame* newCallFrame, ReturnAddressPtr& returnAddressSlot)
1275 {
1276 CallFrame* callFrame = newCallFrame->callerFrame();
1277 ASSERT(callFrame->vm().exception);
1278 jitStackFrame.callFrame = callFrame;
1279 callFrame->vm().topCallFrame = callFrame;
1280 returnToThrowTrampoline(&callFrame->vm(), ReturnAddressPtr(newCallFrame->returnPC()), returnAddressSlot);
1281 return T();
1282 }
1283
1284 template<typename T> static T throwExceptionFromOpCall(JITStackFrame& jitStackFrame, CallFrame* newCallFrame, ReturnAddressPtr& returnAddressSlot, JSValue exception)
1285 {
1286 newCallFrame->callerFrame()->vm().exception = exception;
1287 return throwExceptionFromOpCall<T>(jitStackFrame, newCallFrame, returnAddressSlot);
1288 }
1289
1290 #if CPU(ARM_THUMB2) && COMPILER(GCC)
1291
1292 #define DEFINE_STUB_FUNCTION(rtype, op) \
1293 extern "C" { \
1294 rtype JITStubThunked_##op(STUB_ARGS_DECLARATION); \
1295 }; \
1296 asm ( \
1297 ".text" "\n" \
1298 ".align 2" "\n" \
1299 ".globl " SYMBOL_STRING(cti_##op) "\n" \
1300 HIDE_SYMBOL(cti_##op) "\n" \
1301 ".thumb" "\n" \
1302 ".thumb_func " THUMB_FUNC_PARAM(cti_##op) "\n" \
1303 SYMBOL_STRING(cti_##op) ":" "\n" \
1304 "str lr, [sp, #" STRINGIZE_VALUE_OF(THUNK_RETURN_ADDRESS_OFFSET) "]" "\n" \
1305 "bl " SYMBOL_STRING(JITStubThunked_##op) "\n" \
1306 "ldr lr, [sp, #" STRINGIZE_VALUE_OF(THUNK_RETURN_ADDRESS_OFFSET) "]" "\n" \
1307 "bx lr" "\n" \
1308 ); \
1309 rtype JITStubThunked_##op(STUB_ARGS_DECLARATION) \
1310
1311 #elif CPU(ARM64) && COMPILER(GCC)
1312
1313 #define DEFINE_STUB_FUNCTION(rtype, op) \
1314 extern "C" { \
1315 rtype JITStubThunked_##op(STUB_ARGS_DECLARATION); \
1316 }; \
1317 asm ( \
1318 ".section __TEXT,__text,regular,pure_instructions" "\n" \
1319 ".globl " SYMBOL_STRING(cti_##op) "\n" \
1320 ".align 2" "\n" \
1321 HIDE_SYMBOL(cti_##op) "\n" \
1322 SYMBOL_STRING(cti_##op) ":" "\n" \
1323 "str lr, [sp, #" STRINGIZE_VALUE_OF(THUNK_RETURN_ADDRESS_OFFSET) "]" "\n" \
1324 "bl " SYMBOL_STRING(JITStubThunked_##op) "\n" \
1325 "ldr lr, [sp, #" STRINGIZE_VALUE_OF(THUNK_RETURN_ADDRESS_OFFSET) "]" "\n" \
1326 "ret" "\n" \
1327 ); \
1328 rtype JITStubThunked_##op(STUB_ARGS_DECLARATION) \
1329
1330 #elif CPU(MIPS)
1331 #if WTF_MIPS_PIC
1332 #define DEFINE_STUB_FUNCTION(rtype, op) \
1333 extern "C" { \
1334 rtype JITStubThunked_##op(STUB_ARGS_DECLARATION); \
1335 }; \
1336 asm ( \
1337 ".text" "\n" \
1338 ".align 2" "\n" \
1339 ".set noreorder" "\n" \
1340 ".set nomacro" "\n" \
1341 ".set nomips16" "\n" \
1342 ".globl " SYMBOL_STRING(cti_##op) "\n" \
1343 ".ent " SYMBOL_STRING(cti_##op) "\n" \
1344 SYMBOL_STRING(cti_##op) ":" "\n" \
1345 ".set macro" "\n" \
1346 ".cpload $25" "\n" \
1347 "sw $31," STRINGIZE_VALUE_OF(THUNK_RETURN_ADDRESS_OFFSET) "($29)" "\n" \
1348 "la $25," SYMBOL_STRING(JITStubThunked_##op) "\n" \
1349 ".set nomacro" "\n" \
1350 ".reloc 1f,R_MIPS_JALR," SYMBOL_STRING(JITStubThunked_##op) "\n" \
1351 "1: jalr $25" "\n" \
1352 "nop" "\n" \
1353 "lw $31," STRINGIZE_VALUE_OF(THUNK_RETURN_ADDRESS_OFFSET) "($29)" "\n" \
1354 "jr $31" "\n" \
1355 "nop" "\n" \
1356 ".set reorder" "\n" \
1357 ".set macro" "\n" \
1358 ".end " SYMBOL_STRING(cti_##op) "\n" \
1359 ); \
1360 rtype JITStubThunked_##op(STUB_ARGS_DECLARATION)
1361
1362 #else // WTF_MIPS_PIC
1363 #define DEFINE_STUB_FUNCTION(rtype, op) \
1364 extern "C" { \
1365 rtype JITStubThunked_##op(STUB_ARGS_DECLARATION); \
1366 }; \
1367 asm ( \
1368 ".text" "\n" \
1369 ".align 2" "\n" \
1370 ".set noreorder" "\n" \
1371 ".set nomacro" "\n" \
1372 ".set nomips16" "\n" \
1373 ".globl " SYMBOL_STRING(cti_##op) "\n" \
1374 ".ent " SYMBOL_STRING(cti_##op) "\n" \
1375 SYMBOL_STRING(cti_##op) ":" "\n" \
1376 "sw $31," STRINGIZE_VALUE_OF(THUNK_RETURN_ADDRESS_OFFSET) "($29)" "\n" \
1377 "jal " SYMBOL_STRING(JITStubThunked_##op) "\n" \
1378 "nop" "\n" \
1379 "lw $31," STRINGIZE_VALUE_OF(THUNK_RETURN_ADDRESS_OFFSET) "($29)" "\n" \
1380 "jr $31" "\n" \
1381 "nop" "\n" \
1382 ".set reorder" "\n" \
1383 ".set macro" "\n" \
1384 ".end " SYMBOL_STRING(cti_##op) "\n" \
1385 ); \
1386 rtype JITStubThunked_##op(STUB_ARGS_DECLARATION)
1387
1388 #endif
1389
1390 #elif CPU(ARM_TRADITIONAL) && COMPILER(GCC)
1391
1392 #define DEFINE_STUB_FUNCTION(rtype, op) \
1393 extern "C" { \
1394 rtype JITStubThunked_##op(STUB_ARGS_DECLARATION); \
1395 }; \
1396 asm ( \
1397 ".globl " SYMBOL_STRING(cti_##op) "\n" \
1398 INLINE_ARM_FUNCTION(cti_##op) \
1399 SYMBOL_STRING(cti_##op) ":" "\n" \
1400 "str lr, [sp, #" STRINGIZE_VALUE_OF(THUNK_RETURN_ADDRESS_OFFSET) "]" "\n" \
1401 "bl " SYMBOL_STRING(JITStubThunked_##op) "\n" \
1402 "ldr lr, [sp, #" STRINGIZE_VALUE_OF(THUNK_RETURN_ADDRESS_OFFSET) "]" "\n" \
1403 "bx lr" "\n" \
1404 ); \
1405 rtype JITStubThunked_##op(STUB_ARGS_DECLARATION)
1406
1407 #elif (CPU(ARM_THUMB2) || CPU(ARM_TRADITIONAL)) && COMPILER(RVCT)
1408
1409 #define DEFINE_STUB_FUNCTION(rtype, op) rtype JITStubThunked_##op(STUB_ARGS_DECLARATION)
1410
1411 /* The following is a workaround for RVCT toolchain; precompiler macros are not expanded before the code is passed to the assembler */
1412
1413 /* The following section is a template to generate code for GeneratedJITStubs_RVCT.h */
1414 /* The pattern "#xxx#" will be replaced with "xxx" */
1415
1416 /*
1417 RVCT(extern "C" #rtype# JITStubThunked_#op#(STUB_ARGS_DECLARATION);)
1418 RVCT(__asm #rtype# cti_#op#(STUB_ARGS_DECLARATION))
1419 RVCT({)
1420 RVCT( PRESERVE8)
1421 RVCT( IMPORT JITStubThunked_#op#)
1422 RVCT( str lr, [sp, # THUNK_RETURN_ADDRESS_OFFSET])
1423 RVCT( bl JITStubThunked_#op#)
1424 RVCT( ldr lr, [sp, # THUNK_RETURN_ADDRESS_OFFSET])
1425 RVCT( bx lr)
1426 RVCT(})
1427 RVCT()
1428 */
1429
1430 /* Include the generated file */
1431 #include "GeneratedJITStubs_RVCT.h"
1432
1433 #elif CPU(ARM_TRADITIONAL) && COMPILER(MSVC)
1434
1435 #define DEFINE_STUB_FUNCTION(rtype, op) extern "C" rtype JITStubThunked_##op(STUB_ARGS_DECLARATION)
1436
1437 /* The following is a workaround for MSVC toolchain; inline assembler is not supported */
1438
1439 /* The following section is a template to generate code for GeneratedJITStubs_MSVC.asm */
1440 /* The pattern "#xxx#" will be replaced with "xxx" */
1441
1442 /*
1443 MSVC_BEGIN( AREA Trampoline, CODE)
1444 MSVC_BEGIN()
1445 MSVC_BEGIN( EXPORT ctiTrampoline)
1446 MSVC_BEGIN( EXPORT ctiTrampolineEnd)
1447 MSVC_BEGIN( EXPORT ctiVMThrowTrampoline)
1448 MSVC_BEGIN( EXPORT ctiOpThrowNotCaught)
1449 MSVC_BEGIN()
1450 MSVC_BEGIN(ctiTrampoline PROC)
1451 MSVC_BEGIN( stmdb sp!, {r1-r3})
1452 MSVC_BEGIN( stmdb sp!, {r4-r6, r8-r11, lr})
1453 MSVC_BEGIN( sub sp, sp, #68 ; sync with PRESERVEDR4_OFFSET)
1454 MSVC_BEGIN( mov r5, r2)
1455 MSVC_BEGIN( mov r6, #512)
1456 MSVC_BEGIN( ; r0 contains the code)
1457 MSVC_BEGIN( mov lr, pc)
1458 MSVC_BEGIN( bx r0)
1459 MSVC_BEGIN( add sp, sp, #68 ; sync with PRESERVEDR4_OFFSET)
1460 MSVC_BEGIN( ldmia sp!, {r4-r6, r8-r11, lr})
1461 MSVC_BEGIN( add sp, sp, #12)
1462 MSVC_BEGIN( bx lr)
1463 MSVC_BEGIN(ctiTrampolineEnd)
1464 MSVC_BEGIN(ctiTrampoline ENDP)
1465 MSVC_BEGIN()
1466 MSVC_BEGIN(ctiVMThrowTrampoline PROC)
1467 MSVC_BEGIN( mov r0, sp)
1468 MSVC_BEGIN( bl cti_vm_throw)
1469 MSVC_BEGIN(ctiOpThrowNotCaught)
1470 MSVC_BEGIN( add sp, sp, #68 ; sync with PRESERVEDR4_OFFSET)
1471 MSVC_BEGIN( ldmia sp!, {r4-r6, r8-r11, lr})
1472 MSVC_BEGIN( add sp, sp, #12)
1473 MSVC_BEGIN( bx lr)
1474 MSVC_BEGIN(ctiVMThrowTrampoline ENDP)
1475 MSVC_BEGIN()
1476
1477 MSVC( EXPORT cti_#op#)
1478 MSVC( IMPORT JITStubThunked_#op#)
1479 MSVC(cti_#op# PROC)
1480 MSVC( str lr, [sp, #64] ; sync with THUNK_RETURN_ADDRESS_OFFSET)
1481 MSVC( bl JITStubThunked_#op#)
1482 MSVC( ldr lr, [sp, #64] ; sync with THUNK_RETURN_ADDRESS_OFFSET)
1483 MSVC( bx lr)
1484 MSVC(cti_#op# ENDP)
1485 MSVC()
1486
1487 MSVC_END( END)
1488 */
1489
1490 #elif CPU(SH4)
1491 #define DEFINE_STUB_FUNCTION(rtype, op) \
1492 extern "C" { \
1493 rtype JITStubThunked_##op(STUB_ARGS_DECLARATION); \
1494 }; \
1495 asm volatile( \
1496 ".align 2" "\n" \
1497 ".globl " SYMBOL_STRING(cti_##op) "\n" \
1498 SYMBOL_STRING(cti_##op) ":" "\n" \
1499 "sts pr, r11" "\n" \
1500 "mov.l r11, @(0x38, r15)" "\n" \
1501 "mov.l .L2"SYMBOL_STRING(JITStubThunked_##op)",r0" "\n" \
1502 "mov.l @(r0,r12),r11" "\n" \
1503 "jsr @r11" "\n" \
1504 "nop" "\n" \
1505 "mov.l @(0x38, r15), r11 " "\n" \
1506 "lds r11, pr " "\n" \
1507 "rts" "\n" \
1508 "nop" "\n" \
1509 ".align 2" "\n" \
1510 ".L2"SYMBOL_STRING(JITStubThunked_##op)":.long " SYMBOL_STRING(JITStubThunked_##op)"@GOT \n" \
1511 ); \
1512 rtype JITStubThunked_##op(STUB_ARGS_DECLARATION)
1513 #else
1514 #define DEFINE_STUB_FUNCTION(rtype, op) rtype JIT_STUB cti_##op(STUB_ARGS_DECLARATION)
1515 #endif
1516
1517 DEFINE_STUB_FUNCTION(EncodedJSValue, op_create_this)
1518 {
1519 STUB_INIT_STACK_FRAME(stackFrame);
1520 CallFrame* callFrame = stackFrame.callFrame;
1521 size_t inlineCapacity = stackFrame.args[0].int32();
1522
1523 JSFunction* constructor = jsCast<JSFunction*>(callFrame->callee());
1524 #if !ASSERT_DISABLED
1525 ConstructData constructData;
1526 ASSERT(constructor->methodTable()->getConstructData(constructor, constructData) == ConstructTypeJS);
1527 #endif
1528
1529 Structure* structure = constructor->allocationProfile(callFrame, inlineCapacity)->structure();
1530 JSValue result = constructEmptyObject(callFrame, structure);
1531
1532 return JSValue::encode(result);
1533 }
1534
1535 DEFINE_STUB_FUNCTION(EncodedJSValue, op_convert_this)
1536 {
1537 STUB_INIT_STACK_FRAME(stackFrame);
1538
1539 JSValue v1 = stackFrame.args[0].jsValue();
1540 CallFrame* callFrame = stackFrame.callFrame;
1541
1542 ASSERT(v1.isPrimitive());
1543
1544 JSObject* result = v1.toThisObject(callFrame);
1545 CHECK_FOR_EXCEPTION_AT_END();
1546 return JSValue::encode(result);
1547 }
1548
1549 DEFINE_STUB_FUNCTION(EncodedJSValue, op_add)
1550 {
1551 STUB_INIT_STACK_FRAME(stackFrame);
1552
1553 JSValue v1 = stackFrame.args[0].jsValue();
1554 JSValue v2 = stackFrame.args[1].jsValue();
1555 CallFrame* callFrame = stackFrame.callFrame;
1556
1557 if (v1.isString() && !v2.isObject()) {
1558 JSValue result = jsString(callFrame, asString(v1), v2.toString(callFrame));
1559 CHECK_FOR_EXCEPTION_AT_END();
1560 return JSValue::encode(result);
1561 }
1562
1563 if (v1.isNumber() && v2.isNumber())
1564 return JSValue::encode(jsNumber(v1.asNumber() + v2.asNumber()));
1565
1566 // All other cases are pretty uncommon
1567 JSValue result = jsAddSlowCase(callFrame, v1, v2);
1568 CHECK_FOR_EXCEPTION_AT_END();
1569 return JSValue::encode(result);
1570 }
1571
1572 DEFINE_STUB_FUNCTION(EncodedJSValue, op_inc)
1573 {
1574 STUB_INIT_STACK_FRAME(stackFrame);
1575
1576 JSValue v = stackFrame.args[0].jsValue();
1577
1578 CallFrame* callFrame = stackFrame.callFrame;
1579 JSValue result = jsNumber(v.toNumber(callFrame) + 1);
1580 CHECK_FOR_EXCEPTION_AT_END();
1581 return JSValue::encode(result);
1582 }
1583
1584 DEFINE_STUB_FUNCTION(void, handle_watchdog_timer)
1585 {
1586 STUB_INIT_STACK_FRAME(stackFrame);
1587 CallFrame* callFrame = stackFrame.callFrame;
1588 VM* vm = stackFrame.vm;
1589 if (UNLIKELY(vm->watchdog.didFire(callFrame))) {
1590 vm->exception = createTerminatedExecutionException(vm);
1591 VM_THROW_EXCEPTION_AT_END();
1592 return;
1593 }
1594 }
1595
1596 DEFINE_STUB_FUNCTION(void*, stack_check)
1597 {
1598 STUB_INIT_STACK_FRAME(stackFrame);
1599 CallFrame* callFrame = stackFrame.callFrame;
1600
1601 if (UNLIKELY(!stackFrame.stack->grow(&callFrame->registers()[callFrame->codeBlock()->m_numCalleeRegisters])))
1602 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS, createStackOverflowError(callFrame->callerFrame()));
1603
1604 return callFrame;
1605 }
1606
1607 DEFINE_STUB_FUNCTION(JSObject*, op_new_object)
1608 {
1609 STUB_INIT_STACK_FRAME(stackFrame);
1610
1611 return constructEmptyObject(stackFrame.callFrame, stackFrame.args[0].structure());
1612 }
1613
1614 DEFINE_STUB_FUNCTION(void, op_put_by_id_generic)
1615 {
1616 STUB_INIT_STACK_FRAME(stackFrame);
1617
1618 PutPropertySlot slot(stackFrame.callFrame->codeBlock()->isStrictMode());
1619 stackFrame.args[0].jsValue().put(stackFrame.callFrame, stackFrame.args[1].identifier(), stackFrame.args[2].jsValue(), slot);
1620 CHECK_FOR_EXCEPTION_AT_END();
1621 }
1622
1623 DEFINE_STUB_FUNCTION(void, op_put_by_id_direct_generic)
1624 {
1625 STUB_INIT_STACK_FRAME(stackFrame);
1626
1627 PutPropertySlot slot(stackFrame.callFrame->codeBlock()->isStrictMode());
1628 JSValue baseValue = stackFrame.args[0].jsValue();
1629 ASSERT(baseValue.isObject());
1630 asObject(baseValue)->putDirect(stackFrame.callFrame->vm(), stackFrame.args[1].identifier(), stackFrame.args[2].jsValue(), slot);
1631 CHECK_FOR_EXCEPTION_AT_END();
1632 }
1633
1634 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_generic)
1635 {
1636 STUB_INIT_STACK_FRAME(stackFrame);
1637
1638 CallFrame* callFrame = stackFrame.callFrame;
1639 Identifier& ident = stackFrame.args[1].identifier();
1640
1641 JSValue baseValue = stackFrame.args[0].jsValue();
1642 PropertySlot slot(baseValue);
1643 JSValue result = baseValue.get(callFrame, ident, slot);
1644
1645 CHECK_FOR_EXCEPTION_AT_END();
1646 return JSValue::encode(result);
1647 }
1648
1649 DEFINE_STUB_FUNCTION(void, op_put_by_id)
1650 {
1651 STUB_INIT_STACK_FRAME(stackFrame);
1652 CallFrame* callFrame = stackFrame.callFrame;
1653 Identifier& ident = stackFrame.args[1].identifier();
1654
1655 CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
1656 StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
1657 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
1658
1659 PutPropertySlot slot(callFrame->codeBlock()->isStrictMode());
1660 stackFrame.args[0].jsValue().put(callFrame, ident, stackFrame.args[2].jsValue(), slot);
1661
1662 if (accessType == static_cast<AccessType>(stubInfo->accessType)) {
1663 stubInfo->setSeen();
1664 tryCachePutByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, stackFrame.args[0].jsValue(), slot, stubInfo, false);
1665 }
1666
1667 CHECK_FOR_EXCEPTION_AT_END();
1668 }
1669
1670 DEFINE_STUB_FUNCTION(void, op_put_by_id_direct)
1671 {
1672 STUB_INIT_STACK_FRAME(stackFrame);
1673 CallFrame* callFrame = stackFrame.callFrame;
1674 Identifier& ident = stackFrame.args[1].identifier();
1675
1676 CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
1677 StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
1678 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
1679
1680 PutPropertySlot slot(callFrame->codeBlock()->isStrictMode());
1681 JSValue baseValue = stackFrame.args[0].jsValue();
1682 ASSERT(baseValue.isObject());
1683
1684 asObject(baseValue)->putDirect(callFrame->vm(), ident, stackFrame.args[2].jsValue(), slot);
1685
1686 if (accessType == static_cast<AccessType>(stubInfo->accessType)) {
1687 stubInfo->setSeen();
1688 tryCachePutByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, stackFrame.args[0].jsValue(), slot, stubInfo, true);
1689 }
1690
1691 CHECK_FOR_EXCEPTION_AT_END();
1692 }
1693
1694 DEFINE_STUB_FUNCTION(void, op_put_by_id_fail)
1695 {
1696 STUB_INIT_STACK_FRAME(stackFrame);
1697
1698 CallFrame* callFrame = stackFrame.callFrame;
1699 Identifier& ident = stackFrame.args[1].identifier();
1700
1701 PutPropertySlot slot(callFrame->codeBlock()->isStrictMode());
1702 stackFrame.args[0].jsValue().put(callFrame, ident, stackFrame.args[2].jsValue(), slot);
1703
1704 CHECK_FOR_EXCEPTION_AT_END();
1705 }
1706
1707 DEFINE_STUB_FUNCTION(void, op_put_by_id_direct_fail)
1708 {
1709 STUB_INIT_STACK_FRAME(stackFrame);
1710
1711 CallFrame* callFrame = stackFrame.callFrame;
1712 Identifier& ident = stackFrame.args[1].identifier();
1713
1714 PutPropertySlot slot(callFrame->codeBlock()->isStrictMode());
1715 JSValue baseValue = stackFrame.args[0].jsValue();
1716 ASSERT(baseValue.isObject());
1717 asObject(baseValue)->putDirect(callFrame->vm(), ident, stackFrame.args[2].jsValue(), slot);
1718
1719 CHECK_FOR_EXCEPTION_AT_END();
1720 }
1721
1722 DEFINE_STUB_FUNCTION(JSObject*, op_put_by_id_transition_realloc)
1723 {
1724 STUB_INIT_STACK_FRAME(stackFrame);
1725
1726 JSValue baseValue = stackFrame.args[0].jsValue();
1727 int32_t oldSize = stackFrame.args[3].int32();
1728 Structure* newStructure = stackFrame.args[4].structure();
1729 int32_t newSize = newStructure->outOfLineCapacity();
1730
1731 ASSERT(oldSize >= 0);
1732 ASSERT(newSize > oldSize);
1733
1734 ASSERT(baseValue.isObject());
1735 JSObject* base = asObject(baseValue);
1736 VM& vm = *stackFrame.vm;
1737 Butterfly* butterfly = base->growOutOfLineStorage(vm, oldSize, newSize);
1738 base->setButterfly(vm, butterfly, newStructure);
1739
1740 return base;
1741 }
1742
1743 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id)
1744 {
1745 STUB_INIT_STACK_FRAME(stackFrame);
1746 CallFrame* callFrame = stackFrame.callFrame;
1747 Identifier& ident = stackFrame.args[1].identifier();
1748
1749 CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
1750 StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
1751 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
1752
1753 JSValue baseValue = stackFrame.args[0].jsValue();
1754 PropertySlot slot(baseValue);
1755 JSValue result = baseValue.get(callFrame, ident, slot);
1756
1757 if (accessType != static_cast<AccessType>(stubInfo->accessType))
1758 return JSValue::encode(result);
1759
1760 if (!stubInfo->seenOnce())
1761 stubInfo->setSeen();
1762 else
1763 tryCacheGetByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, baseValue, ident, slot, stubInfo);
1764
1765 CHECK_FOR_EXCEPTION_AT_END();
1766 return JSValue::encode(result);
1767 }
1768
1769 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_self_fail)
1770 {
1771 STUB_INIT_STACK_FRAME(stackFrame);
1772
1773 CallFrame* callFrame = stackFrame.callFrame;
1774 Identifier& ident = stackFrame.args[1].identifier();
1775
1776 CodeBlock* codeBlock = callFrame->codeBlock();
1777 StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
1778 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
1779
1780 JSValue baseValue = stackFrame.args[0].jsValue();
1781 PropertySlot slot(baseValue);
1782 JSValue result = baseValue.get(callFrame, ident, slot);
1783
1784 if (accessType != static_cast<AccessType>(stubInfo->accessType))
1785 return JSValue::encode(result);
1786
1787 CHECK_FOR_EXCEPTION();
1788
1789 if (baseValue.isCell()
1790 && slot.isCacheable()
1791 && !baseValue.asCell()->structure()->isUncacheableDictionary()
1792 && slot.slotBase() == baseValue) {
1793
1794 ASSERT(slot.slotBase().isObject());
1795
1796 PolymorphicAccessStructureList* polymorphicStructureList;
1797 int listIndex = 1;
1798
1799 if (stubInfo->accessType == access_unset)
1800 stubInfo->initGetByIdSelf(callFrame->vm(), codeBlock->ownerExecutable(), baseValue.asCell()->structure());
1801
1802 if (stubInfo->accessType == access_get_by_id_self) {
1803 ASSERT(!stubInfo->stubRoutine);
1804 polymorphicStructureList = new PolymorphicAccessStructureList(callFrame->vm(), codeBlock->ownerExecutable(), 0, stubInfo->u.getByIdSelf.baseObjectStructure.get(), true);
1805 stubInfo->initGetByIdSelfList(polymorphicStructureList, 1);
1806 } else {
1807 polymorphicStructureList = stubInfo->u.getByIdSelfList.structureList;
1808 listIndex = stubInfo->u.getByIdSelfList.listSize;
1809 }
1810 if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
1811 stubInfo->u.getByIdSelfList.listSize++;
1812 JIT::compileGetByIdSelfList(callFrame->scope()->vm(), codeBlock, stubInfo, polymorphicStructureList, listIndex, baseValue.asCell()->structure(), ident, slot, slot.cachedOffset());
1813
1814 if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
1815 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
1816 }
1817 } else
1818 ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
1819 return JSValue::encode(result);
1820 }
1821
1822 static PolymorphicAccessStructureList* getPolymorphicAccessStructureListSlot(VM& vm, ScriptExecutable* owner, StructureStubInfo* stubInfo, int& listIndex)
1823 {
1824 PolymorphicAccessStructureList* prototypeStructureList = 0;
1825 listIndex = 1;
1826
1827 switch (stubInfo->accessType) {
1828 case access_get_by_id_proto:
1829 prototypeStructureList = new PolymorphicAccessStructureList(vm, owner, stubInfo->stubRoutine, stubInfo->u.getByIdProto.baseObjectStructure.get(), stubInfo->u.getByIdProto.prototypeStructure.get(), true);
1830 stubInfo->stubRoutine.clear();
1831 stubInfo->initGetByIdProtoList(prototypeStructureList, 2);
1832 break;
1833 case access_get_by_id_chain:
1834 prototypeStructureList = new PolymorphicAccessStructureList(vm, owner, stubInfo->stubRoutine, stubInfo->u.getByIdChain.baseObjectStructure.get(), stubInfo->u.getByIdChain.chain.get(), true);
1835 stubInfo->stubRoutine.clear();
1836 stubInfo->initGetByIdProtoList(prototypeStructureList, 2);
1837 break;
1838 case access_get_by_id_proto_list:
1839 prototypeStructureList = stubInfo->u.getByIdProtoList.structureList;
1840 listIndex = stubInfo->u.getByIdProtoList.listSize;
1841 if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE)
1842 stubInfo->u.getByIdProtoList.listSize++;
1843 break;
1844 default:
1845 RELEASE_ASSERT_NOT_REACHED();
1846 }
1847
1848 ASSERT(listIndex <= POLYMORPHIC_LIST_CACHE_SIZE);
1849 return prototypeStructureList;
1850 }
1851
1852 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_getter_stub)
1853 {
1854 STUB_INIT_STACK_FRAME(stackFrame);
1855 CallFrame* callFrame = stackFrame.callFrame;
1856 GetterSetter* getterSetter = asGetterSetter(stackFrame.args[0].jsObject());
1857 if (!getterSetter->getter())
1858 return JSValue::encode(jsUndefined());
1859 JSObject* getter = asObject(getterSetter->getter());
1860 CallData callData;
1861 CallType callType = getter->methodTable()->getCallData(getter, callData);
1862 JSValue result = call(callFrame, getter, callType, callData, stackFrame.args[1].jsObject(), ArgList());
1863 if (callFrame->hadException())
1864 returnToThrowTrampoline(&callFrame->vm(), stackFrame.args[2].returnAddress(), STUB_RETURN_ADDRESS);
1865
1866 return JSValue::encode(result);
1867 }
1868
1869 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_custom_stub)
1870 {
1871 STUB_INIT_STACK_FRAME(stackFrame);
1872 CallFrame* callFrame = stackFrame.callFrame;
1873 JSObject* slotBase = stackFrame.args[0].jsObject();
1874 PropertySlot::GetValueFunc getter = reinterpret_cast<PropertySlot::GetValueFunc>(stackFrame.args[1].asPointer);
1875 const Identifier& ident = stackFrame.args[2].identifier();
1876 JSValue result = getter(callFrame, slotBase, ident);
1877 if (callFrame->hadException())
1878 returnToThrowTrampoline(&callFrame->vm(), stackFrame.args[3].returnAddress(), STUB_RETURN_ADDRESS);
1879
1880 return JSValue::encode(result);
1881 }
1882
1883 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_list)
1884 {
1885 STUB_INIT_STACK_FRAME(stackFrame);
1886
1887 CallFrame* callFrame = stackFrame.callFrame;
1888 const Identifier& propertyName = stackFrame.args[1].identifier();
1889
1890 CodeBlock* codeBlock = callFrame->codeBlock();
1891 StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
1892 AccessType accessType = static_cast<AccessType>(stubInfo->accessType);
1893
1894 JSValue baseValue = stackFrame.args[0].jsValue();
1895 PropertySlot slot(baseValue);
1896 JSValue result = baseValue.get(callFrame, propertyName, slot);
1897
1898 CHECK_FOR_EXCEPTION();
1899
1900 if (accessType != static_cast<AccessType>(stubInfo->accessType)
1901 || !baseValue.isCell()
1902 || !slot.isCacheable()
1903 || baseValue.asCell()->structure()->isDictionary()
1904 || baseValue.asCell()->structure()->typeInfo().prohibitsPropertyCaching()) {
1905 ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
1906 return JSValue::encode(result);
1907 }
1908
1909 Structure* structure = baseValue.asCell()->structure();
1910
1911 ASSERT(slot.slotBase().isObject());
1912 JSObject* slotBaseObject = asObject(slot.slotBase());
1913
1914 PropertyOffset offset = slot.cachedOffset();
1915
1916 if (slot.slotBase() == baseValue)
1917 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
1918 else if (slot.slotBase() == baseValue.asCell()->structure()->prototypeForLookup(callFrame)) {
1919 ASSERT(!baseValue.asCell()->structure()->isDictionary());
1920
1921 if (baseValue.asCell()->structure()->typeInfo().hasImpureGetOwnPropertySlot()) {
1922 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
1923 return JSValue::encode(result);
1924 }
1925
1926 // Since we're accessing a prototype in a loop, it's a good bet that it
1927 // should not be treated as a dictionary.
1928 if (slotBaseObject->structure()->isDictionary()) {
1929 slotBaseObject->flattenDictionaryObject(callFrame->vm());
1930 offset = slotBaseObject->structure()->get(callFrame->vm(), propertyName);
1931 }
1932
1933 int listIndex;
1934 PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(callFrame->vm(), codeBlock->ownerExecutable(), stubInfo, listIndex);
1935 if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
1936 JIT::compileGetByIdProtoList(callFrame->scope()->vm(), callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, slotBaseObject->structure(), propertyName, slot, offset);
1937
1938 if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
1939 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
1940 }
1941 } else {
1942 size_t count = normalizePrototypeChainForChainAccess(callFrame, baseValue, slot.slotBase(), propertyName, offset);
1943 if (count == InvalidPrototypeChain) {
1944 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
1945 return JSValue::encode(result);
1946 }
1947
1948 ASSERT(!baseValue.asCell()->structure()->isDictionary());
1949 int listIndex;
1950 PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(callFrame->vm(), codeBlock->ownerExecutable(), stubInfo, listIndex);
1951
1952 if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
1953 StructureChain* protoChain = structure->prototypeChain(callFrame);
1954 JIT::compileGetByIdChainList(callFrame->scope()->vm(), callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, protoChain, count, propertyName, slot, offset);
1955
1956 if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
1957 ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
1958 }
1959 }
1960
1961 return JSValue::encode(result);
1962 }
1963
1964 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_list_full)
1965 {
1966 STUB_INIT_STACK_FRAME(stackFrame);
1967
1968 JSValue baseValue = stackFrame.args[0].jsValue();
1969 PropertySlot slot(baseValue);
1970 JSValue result = baseValue.get(stackFrame.callFrame, stackFrame.args[1].identifier(), slot);
1971
1972 CHECK_FOR_EXCEPTION_AT_END();
1973 return JSValue::encode(result);
1974 }
1975
1976 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_fail)
1977 {
1978 STUB_INIT_STACK_FRAME(stackFrame);
1979
1980 JSValue baseValue = stackFrame.args[0].jsValue();
1981 PropertySlot slot(baseValue);
1982 JSValue result = baseValue.get(stackFrame.callFrame, stackFrame.args[1].identifier(), slot);
1983
1984 CHECK_FOR_EXCEPTION_AT_END();
1985 return JSValue::encode(result);
1986 }
1987
1988 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_array_fail)
1989 {
1990 STUB_INIT_STACK_FRAME(stackFrame);
1991
1992 JSValue baseValue = stackFrame.args[0].jsValue();
1993 PropertySlot slot(baseValue);
1994 JSValue result = baseValue.get(stackFrame.callFrame, stackFrame.args[1].identifier(), slot);
1995
1996 CHECK_FOR_EXCEPTION_AT_END();
1997 return JSValue::encode(result);
1998 }
1999
2000 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_string_fail)
2001 {
2002 STUB_INIT_STACK_FRAME(stackFrame);
2003
2004 JSValue baseValue = stackFrame.args[0].jsValue();
2005 PropertySlot slot(baseValue);
2006 JSValue result = baseValue.get(stackFrame.callFrame, stackFrame.args[1].identifier(), slot);
2007
2008 CHECK_FOR_EXCEPTION_AT_END();
2009 return JSValue::encode(result);
2010 }
2011
2012 DEFINE_STUB_FUNCTION(EncodedJSValue, op_check_has_instance)
2013 {
2014 STUB_INIT_STACK_FRAME(stackFrame);
2015
2016 CallFrame* callFrame = stackFrame.callFrame;
2017 JSValue value = stackFrame.args[0].jsValue();
2018 JSValue baseVal = stackFrame.args[1].jsValue();
2019
2020 if (baseVal.isObject()) {
2021 JSObject* baseObject = asObject(baseVal);
2022 ASSERT(!baseObject->structure()->typeInfo().implementsDefaultHasInstance());
2023 if (baseObject->structure()->typeInfo().implementsHasInstance()) {
2024 bool result = baseObject->methodTable()->customHasInstance(baseObject, callFrame, value);
2025 CHECK_FOR_EXCEPTION_AT_END();
2026 return JSValue::encode(jsBoolean(result));
2027 }
2028 }
2029
2030 stackFrame.vm->exception = createInvalidParamError(callFrame, "instanceof", baseVal);
2031 VM_THROW_EXCEPTION_AT_END();
2032 return JSValue::encode(JSValue());
2033 }
2034
2035 #if ENABLE(DFG_JIT)
2036 DEFINE_STUB_FUNCTION(void, optimize)
2037 {
2038 STUB_INIT_STACK_FRAME(stackFrame);
2039
2040 CallFrame* callFrame = stackFrame.callFrame;
2041 CodeBlock* codeBlock = callFrame->codeBlock();
2042 unsigned bytecodeIndex = stackFrame.args[0].int32();
2043
2044 #if ENABLE(JIT_VERBOSE_OSR)
2045 dataLog(
2046 *codeBlock, ": Entered optimize with bytecodeIndex = ", bytecodeIndex,
2047 ", executeCounter = ", codeBlock->jitExecuteCounter(),
2048 ", optimizationDelayCounter = ", codeBlock->reoptimizationRetryCounter(),
2049 ", exitCounter = ");
2050 if (codeBlock->hasOptimizedReplacement())
2051 dataLog(codeBlock->replacement()->osrExitCounter());
2052 else
2053 dataLog("N/A");
2054 dataLog("\n");
2055 #endif
2056
2057 if (!codeBlock->checkIfOptimizationThresholdReached()) {
2058 codeBlock->updateAllPredictions();
2059 #if ENABLE(JIT_VERBOSE_OSR)
2060 dataLog("Choosing not to optimize ", *codeBlock, " yet.\n");
2061 #endif
2062 return;
2063 }
2064
2065 if (codeBlock->hasOptimizedReplacement()) {
2066 #if ENABLE(JIT_VERBOSE_OSR)
2067 dataLog("Considering OSR ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
2068 #endif
2069 // If we have an optimized replacement, then it must be the case that we entered
2070 // cti_optimize from a loop. That's because is there's an optimized replacement,
2071 // then all calls to this function will be relinked to the replacement and so
2072 // the prologue OSR will never fire.
2073
2074 // This is an interesting threshold check. Consider that a function OSR exits
2075 // in the middle of a loop, while having a relatively low exit count. The exit
2076 // will reset the execution counter to some target threshold, meaning that this
2077 // code won't be reached until that loop heats up for >=1000 executions. But then
2078 // we do a second check here, to see if we should either reoptimize, or just
2079 // attempt OSR entry. Hence it might even be correct for
2080 // shouldReoptimizeFromLoopNow() to always return true. But we make it do some
2081 // additional checking anyway, to reduce the amount of recompilation thrashing.
2082 if (codeBlock->replacement()->shouldReoptimizeFromLoopNow()) {
2083 #if ENABLE(JIT_VERBOSE_OSR)
2084 dataLog("Triggering reoptimization of ", *codeBlock, "(", *codeBlock->replacement(), ") (in loop).\n");
2085 #endif
2086 codeBlock->reoptimize();
2087 return;
2088 }
2089 } else {
2090 if (!codeBlock->shouldOptimizeNow()) {
2091 #if ENABLE(JIT_VERBOSE_OSR)
2092 dataLog("Delaying optimization for ", *codeBlock, " (in loop) because of insufficient profiling.\n");
2093 #endif
2094 return;
2095 }
2096
2097 #if ENABLE(JIT_VERBOSE_OSR)
2098 dataLog("Triggering optimized compilation of ", *codeBlock, "\n");
2099 #endif
2100
2101 JSScope* scope = callFrame->scope();
2102 JSObject* error = codeBlock->compileOptimized(callFrame, scope, bytecodeIndex);
2103 #if ENABLE(JIT_VERBOSE_OSR)
2104 if (error)
2105 dataLog("WARNING: optimized compilation failed.\n");
2106 #else
2107 UNUSED_PARAM(error);
2108 #endif
2109
2110 if (codeBlock->replacement() == codeBlock) {
2111 #if ENABLE(JIT_VERBOSE_OSR)
2112 dataLog("Optimizing ", *codeBlock, " failed.\n");
2113 #endif
2114
2115 ASSERT(codeBlock->getJITType() == JITCode::BaselineJIT);
2116 codeBlock->dontOptimizeAnytimeSoon();
2117 return;
2118 }
2119 }
2120
2121 CodeBlock* optimizedCodeBlock = codeBlock->replacement();
2122 ASSERT(optimizedCodeBlock->getJITType() == JITCode::DFGJIT);
2123
2124 if (void* address = DFG::prepareOSREntry(callFrame, optimizedCodeBlock, bytecodeIndex)) {
2125 if (Options::showDFGDisassembly()) {
2126 dataLog(
2127 "Performing OSR ", *codeBlock, " -> ", *optimizedCodeBlock, ", address ",
2128 RawPointer((STUB_RETURN_ADDRESS).value()), " -> ", RawPointer(address), ".\n");
2129 }
2130 #if ENABLE(JIT_VERBOSE_OSR)
2131 dataLog("Optimizing ", *codeBlock, " succeeded, performing OSR after a delay of ", codeBlock->optimizationDelayCounter(), ".\n");
2132 #endif
2133
2134 codeBlock->optimizeSoon();
2135 STUB_SET_RETURN_ADDRESS(address);
2136 return;
2137 }
2138
2139 #if ENABLE(JIT_VERBOSE_OSR)
2140 dataLog("Optimizing ", *codeBlock, " succeeded, OSR failed, after a delay of ", codeBlock->optimizationDelayCounter(), ".\n");
2141 #endif
2142
2143 // Count the OSR failure as a speculation failure. If this happens a lot, then
2144 // reoptimize.
2145 optimizedCodeBlock->countOSRExit();
2146
2147 #if ENABLE(JIT_VERBOSE_OSR)
2148 dataLog("Encountered OSR failure ", *codeBlock, " -> ", *codeBlock->replacement(), ".\n");
2149 #endif
2150
2151 // We are a lot more conservative about triggering reoptimization after OSR failure than
2152 // before it. If we enter the optimize_from_loop trigger with a bucket full of fail
2153 // already, then we really would like to reoptimize immediately. But this case covers
2154 // something else: there weren't many (or any) speculation failures before, but we just
2155 // failed to enter the speculative code because some variable had the wrong value or
2156 // because the OSR code decided for any spurious reason that it did not want to OSR
2157 // right now. So, we only trigger reoptimization only upon the more conservative (non-loop)
2158 // reoptimization trigger.
2159 if (optimizedCodeBlock->shouldReoptimizeNow()) {
2160 #if ENABLE(JIT_VERBOSE_OSR)
2161 dataLog("Triggering reoptimization of ", *codeBlock, " -> ", *codeBlock->replacement(), " (after OSR fail).\n");
2162 #endif
2163 codeBlock->reoptimize();
2164 return;
2165 }
2166
2167 // OSR failed this time, but it might succeed next time! Let the code run a bit
2168 // longer and then try again.
2169 codeBlock->optimizeAfterWarmUp();
2170 }
2171 #endif // ENABLE(DFG_JIT)
2172
2173 DEFINE_STUB_FUNCTION(EncodedJSValue, op_instanceof)
2174 {
2175 STUB_INIT_STACK_FRAME(stackFrame);
2176
2177 CallFrame* callFrame = stackFrame.callFrame;
2178 JSValue value = stackFrame.args[0].jsValue();
2179 JSValue proto = stackFrame.args[1].jsValue();
2180
2181 ASSERT(!value.isObject() || !proto.isObject());
2182
2183 bool result = JSObject::defaultHasInstance(callFrame, value, proto);
2184 CHECK_FOR_EXCEPTION_AT_END();
2185 return JSValue::encode(jsBoolean(result));
2186 }
2187
2188 DEFINE_STUB_FUNCTION(EncodedJSValue, op_del_by_id)
2189 {
2190 STUB_INIT_STACK_FRAME(stackFrame);
2191
2192 CallFrame* callFrame = stackFrame.callFrame;
2193
2194 JSObject* baseObj = stackFrame.args[0].jsValue().toObject(callFrame);
2195
2196 bool couldDelete = baseObj->methodTable()->deleteProperty(baseObj, callFrame, stackFrame.args[1].identifier());
2197 JSValue result = jsBoolean(couldDelete);
2198 if (!couldDelete && callFrame->codeBlock()->isStrictMode())
2199 stackFrame.vm->exception = createTypeError(stackFrame.callFrame, "Unable to delete property.");
2200
2201 CHECK_FOR_EXCEPTION_AT_END();
2202 return JSValue::encode(result);
2203 }
2204
2205 DEFINE_STUB_FUNCTION(EncodedJSValue, op_mul)
2206 {
2207 STUB_INIT_STACK_FRAME(stackFrame);
2208
2209 JSValue src1 = stackFrame.args[0].jsValue();
2210 JSValue src2 = stackFrame.args[1].jsValue();
2211
2212 if (src1.isNumber() && src2.isNumber())
2213 return JSValue::encode(jsNumber(src1.asNumber() * src2.asNumber()));
2214
2215 CallFrame* callFrame = stackFrame.callFrame;
2216 JSValue result = jsNumber(src1.toNumber(callFrame) * src2.toNumber(callFrame));
2217 CHECK_FOR_EXCEPTION_AT_END();
2218 return JSValue::encode(result);
2219 }
2220
2221 DEFINE_STUB_FUNCTION(JSObject*, op_new_func)
2222 {
2223 STUB_INIT_STACK_FRAME(stackFrame);
2224
2225 ASSERT(stackFrame.callFrame->codeBlock()->codeType() != FunctionCode || !stackFrame.callFrame->codeBlock()->needsFullScopeChain() || stackFrame.callFrame->uncheckedR(stackFrame.callFrame->codeBlock()->activationRegister()).jsValue());
2226 return JSFunction::create(stackFrame.callFrame, stackFrame.args[0].function(), stackFrame.callFrame->scope());
2227 }
2228
2229 inline void* jitCompileFor(CallFrame* callFrame, CodeSpecializationKind kind)
2230 {
2231 // This function is called by cti_op_call_jitCompile() and
2232 // cti_op_construct_jitCompile() JIT glue trampolines to compile the
2233 // callee function that we want to call. Both cti glue trampolines are
2234 // called by JIT'ed code which has pushed a frame and initialized most of
2235 // the frame content except for the codeBlock.
2236 //
2237 // Normally, the prologue of the callee is supposed to set the frame's cb
2238 // pointer to the cb of the callee. But in this case, the callee code does
2239 // not exist yet until it is compiled below. The compilation process will
2240 // allocate memory which may trigger a GC. The GC, in turn, will scan the
2241 // JSStack, and will expect the frame's cb to either be valid or 0. If
2242 // we don't initialize it, the GC will be accessing invalid memory and may
2243 // crash.
2244 //
2245 // Hence, we should nullify it here before proceeding with the compilation.
2246 callFrame->setCodeBlock(0);
2247
2248 JSFunction* function = jsCast<JSFunction*>(callFrame->callee());
2249 ASSERT(!function->isHostFunction());
2250 FunctionExecutable* executable = function->jsExecutable();
2251 JSScope* callDataScopeChain = function->scope();
2252 JSObject* error = executable->compileFor(callFrame, callDataScopeChain, kind);
2253 if (!error)
2254 return function;
2255 callFrame->vm().exception = error;
2256 return 0;
2257 }
2258
2259 DEFINE_STUB_FUNCTION(void*, op_call_jitCompile)
2260 {
2261 STUB_INIT_STACK_FRAME(stackFrame);
2262
2263 #if !ASSERT_DISABLED
2264 CallData callData;
2265 ASSERT(stackFrame.callFrame->callee()->methodTable()->getCallData(stackFrame.callFrame->callee(), callData) == CallTypeJS);
2266 #endif
2267
2268 CallFrame* callFrame = stackFrame.callFrame;
2269 void* result = jitCompileFor(callFrame, CodeForCall);
2270 if (!result)
2271 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
2272
2273 return result;
2274 }
2275
2276 DEFINE_STUB_FUNCTION(void*, op_construct_jitCompile)
2277 {
2278 STUB_INIT_STACK_FRAME(stackFrame);
2279
2280 #if !ASSERT_DISABLED
2281 ConstructData constructData;
2282 ASSERT(jsCast<JSFunction*>(stackFrame.callFrame->callee())->methodTable()->getConstructData(stackFrame.callFrame->callee(), constructData) == ConstructTypeJS);
2283 #endif
2284
2285 CallFrame* callFrame = stackFrame.callFrame;
2286 void* result = jitCompileFor(callFrame, CodeForConstruct);
2287 if (!result)
2288 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
2289
2290 return result;
2291 }
2292
2293 DEFINE_STUB_FUNCTION(void*, op_call_arityCheck)
2294 {
2295 STUB_INIT_STACK_FRAME(stackFrame);
2296
2297 CallFrame* callFrame = stackFrame.callFrame;
2298
2299 CallFrame* newCallFrame = CommonSlowPaths::arityCheckFor(callFrame, stackFrame.stack, CodeForCall);
2300 if (!newCallFrame)
2301 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS, createStackOverflowError(callFrame->callerFrame()));
2302
2303 return newCallFrame;
2304 }
2305
2306 DEFINE_STUB_FUNCTION(void*, op_construct_arityCheck)
2307 {
2308 STUB_INIT_STACK_FRAME(stackFrame);
2309
2310 CallFrame* callFrame = stackFrame.callFrame;
2311
2312 CallFrame* newCallFrame = CommonSlowPaths::arityCheckFor(callFrame, stackFrame.stack, CodeForConstruct);
2313 if (!newCallFrame)
2314 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS, createStackOverflowError(callFrame->callerFrame()));
2315
2316 return newCallFrame;
2317 }
2318
2319 inline void* lazyLinkFor(CallFrame* callFrame, CodeSpecializationKind kind)
2320 {
2321 JSFunction* callee = jsCast<JSFunction*>(callFrame->callee());
2322 ExecutableBase* executable = callee->executable();
2323
2324 MacroAssemblerCodePtr codePtr;
2325 CodeBlock* codeBlock = 0;
2326 CallLinkInfo* callLinkInfo = &callFrame->callerFrame()->codeBlock()->getCallLinkInfo(callFrame->returnPC());
2327
2328 // This function is called by cti_vm_lazyLinkCall() and
2329 // cti_lazyLinkConstruct JIT glue trampolines to link the callee function
2330 // that we want to call. Both cti glue trampolines are called by JIT'ed
2331 // code which has pushed a frame and initialized most of the frame content
2332 // except for the codeBlock.
2333 //
2334 // Normally, the prologue of the callee is supposed to set the frame's cb
2335 // field to the cb of the callee. But in this case, the callee may not
2336 // exist yet, and if not, it will be generated in the compilation below.
2337 // The compilation will allocate memory which may trigger a GC. The GC, in
2338 // turn, will scan the JSStack, and will expect the frame's cb to be valid
2339 // or 0. If we don't initialize it, the GC will be accessing invalid
2340 // memory and may crash.
2341 //
2342 // Hence, we should nullify it here before proceeding with the compilation.
2343 callFrame->setCodeBlock(0);
2344
2345 if (executable->isHostFunction())
2346 codePtr = executable->generatedJITCodeFor(kind).addressForCall();
2347 else {
2348 FunctionExecutable* functionExecutable = static_cast<FunctionExecutable*>(executable);
2349 if (JSObject* error = functionExecutable->compileFor(callFrame, callee->scope(), kind)) {
2350 callFrame->vm().exception = error;
2351 return 0;
2352 }
2353 codeBlock = &functionExecutable->generatedBytecodeFor(kind);
2354 if (callFrame->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters())
2355 || callLinkInfo->callType == CallLinkInfo::CallVarargs)
2356 codePtr = functionExecutable->generatedJITCodeWithArityCheckFor(kind);
2357 else
2358 codePtr = functionExecutable->generatedJITCodeFor(kind).addressForCall();
2359 }
2360
2361 if (!callLinkInfo->seenOnce())
2362 callLinkInfo->setSeen();
2363 else
2364 JIT::linkFor(callee, callFrame->callerFrame()->codeBlock(), codeBlock, codePtr, callLinkInfo, &callFrame->vm(), kind);
2365
2366 return codePtr.executableAddress();
2367 }
2368
2369 DEFINE_STUB_FUNCTION(void*, vm_lazyLinkCall)
2370 {
2371 STUB_INIT_STACK_FRAME(stackFrame);
2372
2373 CallFrame* callFrame = stackFrame.callFrame;
2374 void* result = lazyLinkFor(callFrame, CodeForCall);
2375 if (!result)
2376 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
2377
2378 return result;
2379 }
2380
2381 DEFINE_STUB_FUNCTION(void*, vm_lazyLinkClosureCall)
2382 {
2383 STUB_INIT_STACK_FRAME(stackFrame);
2384
2385 CallFrame* callFrame = stackFrame.callFrame;
2386
2387 CodeBlock* callerCodeBlock = callFrame->callerFrame()->codeBlock();
2388 VM* vm = callerCodeBlock->vm();
2389 CallLinkInfo* callLinkInfo = &callerCodeBlock->getCallLinkInfo(callFrame->returnPC());
2390 JSFunction* callee = jsCast<JSFunction*>(callFrame->callee());
2391 ExecutableBase* executable = callee->executable();
2392 Structure* structure = callee->structure();
2393
2394 ASSERT(callLinkInfo->callType == CallLinkInfo::Call);
2395 ASSERT(callLinkInfo->isLinked());
2396 ASSERT(callLinkInfo->callee);
2397 ASSERT(callee != callLinkInfo->callee.get());
2398
2399 bool shouldLink = false;
2400 CodeBlock* calleeCodeBlock = 0;
2401 MacroAssemblerCodePtr codePtr;
2402
2403 if (executable == callLinkInfo->callee.get()->executable()
2404 && structure == callLinkInfo->callee.get()->structure()) {
2405
2406 shouldLink = true;
2407
2408 ASSERT(executable->hasJITCodeForCall());
2409 codePtr = executable->generatedJITCodeForCall().addressForCall();
2410 if (!callee->executable()->isHostFunction()) {
2411 calleeCodeBlock = &jsCast<FunctionExecutable*>(executable)->generatedBytecodeForCall();
2412 if (callFrame->argumentCountIncludingThis() < static_cast<size_t>(calleeCodeBlock->numParameters())) {
2413 shouldLink = false;
2414 codePtr = executable->generatedJITCodeWithArityCheckFor(CodeForCall);
2415 }
2416 }
2417 } else if (callee->isHostFunction())
2418 codePtr = executable->generatedJITCodeForCall().addressForCall();
2419 else {
2420 // Need to clear the code block before compilation, because compilation can GC.
2421 callFrame->setCodeBlock(0);
2422
2423 FunctionExecutable* functionExecutable = jsCast<FunctionExecutable*>(executable);
2424 JSScope* scopeChain = callee->scope();
2425 JSObject* error = functionExecutable->compileFor(callFrame, scopeChain, CodeForCall);
2426 if (error) {
2427 callFrame->vm().exception = error;
2428 return 0;
2429 }
2430
2431 codePtr = functionExecutable->generatedJITCodeWithArityCheckFor(CodeForCall);
2432 }
2433
2434 if (shouldLink) {
2435 ASSERT(codePtr);
2436 JIT::compileClosureCall(vm, callLinkInfo, callerCodeBlock, calleeCodeBlock, structure, executable, codePtr);
2437 callLinkInfo->hasSeenClosure = true;
2438 } else
2439 JIT::linkSlowCall(callerCodeBlock, callLinkInfo);
2440
2441 return codePtr.executableAddress();
2442 }
2443
2444 DEFINE_STUB_FUNCTION(void*, vm_lazyLinkConstruct)
2445 {
2446 STUB_INIT_STACK_FRAME(stackFrame);
2447
2448 CallFrame* callFrame = stackFrame.callFrame;
2449 void* result = lazyLinkFor(callFrame, CodeForConstruct);
2450 if (!result)
2451 return throwExceptionFromOpCall<void*>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
2452
2453 return result;
2454 }
2455
2456 DEFINE_STUB_FUNCTION(JSObject*, op_push_activation)
2457 {
2458 STUB_INIT_STACK_FRAME(stackFrame);
2459
2460 JSActivation* activation = JSActivation::create(stackFrame.callFrame->vm(), stackFrame.callFrame, stackFrame.callFrame->codeBlock());
2461 stackFrame.callFrame->setScope(activation);
2462 return activation;
2463 }
2464
2465 DEFINE_STUB_FUNCTION(EncodedJSValue, op_call_NotJSFunction)
2466 {
2467 STUB_INIT_STACK_FRAME(stackFrame);
2468
2469 CallFrame* callFrame = stackFrame.callFrame;
2470
2471 JSValue callee = callFrame->calleeAsValue();
2472
2473 CallData callData;
2474 CallType callType = getCallData(callee, callData);
2475
2476 ASSERT(callType != CallTypeJS);
2477 if (callType != CallTypeHost) {
2478 ASSERT(callType == CallTypeNone);
2479 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS, createNotAFunctionError(callFrame->callerFrame(), callee));
2480 }
2481
2482 EncodedJSValue returnValue;
2483 {
2484 SamplingTool::CallRecord callRecord(CTI_SAMPLER, true);
2485 returnValue = callData.native.function(callFrame);
2486 }
2487
2488 if (stackFrame.vm->exception)
2489 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
2490
2491 return returnValue;
2492 }
2493
2494 DEFINE_STUB_FUNCTION(EncodedJSValue, op_create_arguments)
2495 {
2496 STUB_INIT_STACK_FRAME(stackFrame);
2497
2498 Arguments* arguments = Arguments::create(*stackFrame.vm, stackFrame.callFrame);
2499 return JSValue::encode(JSValue(arguments));
2500 }
2501
2502 DEFINE_STUB_FUNCTION(void, op_tear_off_activation)
2503 {
2504 STUB_INIT_STACK_FRAME(stackFrame);
2505
2506 ASSERT(stackFrame.callFrame->codeBlock()->needsFullScopeChain());
2507 jsCast<JSActivation*>(stackFrame.args[0].jsValue())->tearOff(*stackFrame.vm);
2508 }
2509
2510 DEFINE_STUB_FUNCTION(void, op_tear_off_arguments)
2511 {
2512 STUB_INIT_STACK_FRAME(stackFrame);
2513
2514 CallFrame* callFrame = stackFrame.callFrame;
2515 ASSERT(callFrame->codeBlock()->usesArguments());
2516 Arguments* arguments = jsCast<Arguments*>(stackFrame.args[0].jsValue());
2517 if (JSValue activationValue = stackFrame.args[1].jsValue()) {
2518 arguments->didTearOffActivation(callFrame, jsCast<JSActivation*>(activationValue));
2519 return;
2520 }
2521 arguments->tearOff(callFrame);
2522 }
2523
2524 DEFINE_STUB_FUNCTION(void, op_profile_will_call)
2525 {
2526 STUB_INIT_STACK_FRAME(stackFrame);
2527
2528 if (LegacyProfiler* profiler = stackFrame.vm->enabledProfiler())
2529 profiler->willExecute(stackFrame.callFrame, stackFrame.args[0].jsValue());
2530 }
2531
2532 DEFINE_STUB_FUNCTION(void, op_profile_did_call)
2533 {
2534 STUB_INIT_STACK_FRAME(stackFrame);
2535
2536 if (LegacyProfiler* profiler = stackFrame.vm->enabledProfiler())
2537 profiler->didExecute(stackFrame.callFrame, stackFrame.args[0].jsValue());
2538 }
2539
2540 DEFINE_STUB_FUNCTION(JSObject*, op_new_array)
2541 {
2542 STUB_INIT_STACK_FRAME(stackFrame);
2543
2544 return constructArray(stackFrame.callFrame, stackFrame.args[2].arrayAllocationProfile(), reinterpret_cast<JSValue*>(&stackFrame.callFrame->registers()[stackFrame.args[0].int32()]), stackFrame.args[1].int32());
2545 }
2546
2547 DEFINE_STUB_FUNCTION(JSObject*, op_new_array_with_size)
2548 {
2549 STUB_INIT_STACK_FRAME(stackFrame);
2550
2551 return constructArrayWithSizeQuirk(stackFrame.callFrame, stackFrame.args[1].arrayAllocationProfile(), stackFrame.callFrame->lexicalGlobalObject(), stackFrame.args[0].jsValue());
2552 }
2553
2554 DEFINE_STUB_FUNCTION(JSObject*, op_new_array_buffer)
2555 {
2556 STUB_INIT_STACK_FRAME(stackFrame);
2557
2558 return constructArray(stackFrame.callFrame, stackFrame.args[2].arrayAllocationProfile(), stackFrame.callFrame->codeBlock()->constantBuffer(stackFrame.args[0].int32()), stackFrame.args[1].int32());
2559 }
2560
2561 DEFINE_STUB_FUNCTION(void, op_init_global_const_check)
2562 {
2563 STUB_INIT_STACK_FRAME(stackFrame);
2564
2565 CallFrame* callFrame = stackFrame.callFrame;
2566 CodeBlock* codeBlock = callFrame->codeBlock();
2567 symbolTablePut(codeBlock->globalObject(), callFrame, codeBlock->identifier(stackFrame.args[1].int32()), stackFrame.args[0].jsValue(), true);
2568 }
2569
2570 DEFINE_STUB_FUNCTION(EncodedJSValue, op_resolve)
2571 {
2572 STUB_INIT_STACK_FRAME(stackFrame);
2573
2574 CallFrame* callFrame = stackFrame.callFrame;
2575
2576 JSValue result = JSScope::resolve(callFrame, stackFrame.args[0].identifier(), stackFrame.args[1].resolveOperations());
2577 CHECK_FOR_EXCEPTION_AT_END();
2578 return JSValue::encode(result);
2579 }
2580
2581 DEFINE_STUB_FUNCTION(void, op_put_to_base)
2582 {
2583 STUB_INIT_STACK_FRAME(stackFrame);
2584
2585 CallFrame* callFrame = stackFrame.callFrame;
2586 JSValue base = callFrame->r(stackFrame.args[0].int32()).jsValue();
2587 JSValue value = callFrame->r(stackFrame.args[2].int32()).jsValue();
2588 JSScope::resolvePut(callFrame, base, stackFrame.args[1].identifier(), value, stackFrame.args[3].putToBaseOperation());
2589 CHECK_FOR_EXCEPTION_AT_END();
2590 }
2591
2592 DEFINE_STUB_FUNCTION(EncodedJSValue, op_construct_NotJSConstruct)
2593 {
2594 STUB_INIT_STACK_FRAME(stackFrame);
2595
2596 CallFrame* callFrame = stackFrame.callFrame;
2597 JSValue callee = callFrame->calleeAsValue();
2598
2599 ConstructData constructData;
2600 ConstructType constructType = getConstructData(callee, constructData);
2601
2602 ASSERT(constructType != ConstructTypeJS);
2603 if (constructType != ConstructTypeHost) {
2604 ASSERT(constructType == ConstructTypeNone);
2605 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS, createNotAConstructorError(callFrame->callerFrame(), callee));
2606 }
2607
2608 EncodedJSValue returnValue;
2609 {
2610 SamplingTool::CallRecord callRecord(CTI_SAMPLER, true);
2611 returnValue = constructData.native.function(callFrame);
2612 }
2613
2614 if (stackFrame.vm->exception)
2615 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
2616
2617 return returnValue;
2618 }
2619
2620 static JSValue getByVal(
2621 CallFrame* callFrame, JSValue baseValue, JSValue subscript, ReturnAddressPtr returnAddress)
2622 {
2623 if (LIKELY(baseValue.isCell() && subscript.isString())) {
2624 if (JSValue result = baseValue.asCell()->fastGetOwnProperty(callFrame, asString(subscript)->value(callFrame)))
2625 return result;
2626 }
2627
2628 if (subscript.isUInt32()) {
2629 uint32_t i = subscript.asUInt32();
2630 if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i)) {
2631 ctiPatchCallByReturnAddress(callFrame->codeBlock(), returnAddress, FunctionPtr(cti_op_get_by_val_string));
2632 return asString(baseValue)->getIndex(callFrame, i);
2633 }
2634 return baseValue.get(callFrame, i);
2635 }
2636
2637 if (isName(subscript))
2638 return baseValue.get(callFrame, jsCast<NameInstance*>(subscript.asCell())->privateName());
2639
2640 Identifier property(callFrame, subscript.toString(callFrame)->value(callFrame));
2641 return baseValue.get(callFrame, property);
2642 }
2643
2644 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_val)
2645 {
2646 STUB_INIT_STACK_FRAME(stackFrame);
2647
2648 CallFrame* callFrame = stackFrame.callFrame;
2649
2650 JSValue baseValue = stackFrame.args[0].jsValue();
2651 JSValue subscript = stackFrame.args[1].jsValue();
2652
2653 if (baseValue.isObject() && subscript.isInt32()) {
2654 // See if it's worth optimizing this at all.
2655 JSObject* object = asObject(baseValue);
2656 bool didOptimize = false;
2657
2658 unsigned bytecodeOffset = callFrame->bytecodeOffsetForNonDFGCode();
2659 ASSERT(bytecodeOffset);
2660 ByValInfo& byValInfo = callFrame->codeBlock()->getByValInfo(bytecodeOffset - 1);
2661 ASSERT(!byValInfo.stubRoutine);
2662
2663 if (hasOptimizableIndexing(object->structure())) {
2664 // Attempt to optimize.
2665 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure());
2666 if (arrayMode != byValInfo.arrayMode) {
2667 JIT::compileGetByVal(&callFrame->vm(), callFrame->codeBlock(), &byValInfo, STUB_RETURN_ADDRESS, arrayMode);
2668 didOptimize = true;
2669 }
2670 }
2671
2672 if (!didOptimize) {
2673 // If we take slow path more than 10 times without patching then make sure we
2674 // never make that mistake again. Or, if we failed to patch and we have some object
2675 // that intercepts indexed get, then don't even wait until 10 times. For cases
2676 // where we see non-index-intercepting objects, this gives 10 iterations worth of
2677 // opportunity for us to observe that the get_by_val may be polymorphic.
2678 if (++byValInfo.slowPathCount >= 10
2679 || object->structure()->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
2680 // Don't ever try to optimize.
2681 RepatchBuffer repatchBuffer(callFrame->codeBlock());
2682 repatchBuffer.relinkCallerToFunction(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val_generic));
2683 }
2684 }
2685 }
2686
2687 JSValue result = getByVal(callFrame, baseValue, subscript, STUB_RETURN_ADDRESS);
2688 CHECK_FOR_EXCEPTION();
2689 return JSValue::encode(result);
2690 }
2691
2692 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_val_generic)
2693 {
2694 STUB_INIT_STACK_FRAME(stackFrame);
2695
2696 CallFrame* callFrame = stackFrame.callFrame;
2697
2698 JSValue baseValue = stackFrame.args[0].jsValue();
2699 JSValue subscript = stackFrame.args[1].jsValue();
2700
2701 JSValue result = getByVal(callFrame, baseValue, subscript, STUB_RETURN_ADDRESS);
2702 CHECK_FOR_EXCEPTION();
2703 return JSValue::encode(result);
2704 }
2705
2706 DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_val_string)
2707 {
2708 STUB_INIT_STACK_FRAME(stackFrame);
2709
2710 CallFrame* callFrame = stackFrame.callFrame;
2711
2712 JSValue baseValue = stackFrame.args[0].jsValue();
2713 JSValue subscript = stackFrame.args[1].jsValue();
2714
2715 JSValue result;
2716
2717 if (LIKELY(subscript.isUInt32())) {
2718 uint32_t i = subscript.asUInt32();
2719 if (isJSString(baseValue) && asString(baseValue)->canGetIndex(i))
2720 result = asString(baseValue)->getIndex(callFrame, i);
2721 else {
2722 result = baseValue.get(callFrame, i);
2723 if (!isJSString(baseValue))
2724 ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val));
2725 }
2726 } else if (isName(subscript))
2727 result = baseValue.get(callFrame, jsCast<NameInstance*>(subscript.asCell())->privateName());
2728 else {
2729 Identifier property(callFrame, subscript.toString(callFrame)->value(callFrame));
2730 result = baseValue.get(callFrame, property);
2731 }
2732
2733 CHECK_FOR_EXCEPTION_AT_END();
2734 return JSValue::encode(result);
2735 }
2736
2737 DEFINE_STUB_FUNCTION(EncodedJSValue, op_sub)
2738 {
2739 STUB_INIT_STACK_FRAME(stackFrame);
2740
2741 JSValue src1 = stackFrame.args[0].jsValue();
2742 JSValue src2 = stackFrame.args[1].jsValue();
2743
2744 if (src1.isNumber() && src2.isNumber())
2745 return JSValue::encode(jsNumber(src1.asNumber() - src2.asNumber()));
2746
2747 CallFrame* callFrame = stackFrame.callFrame;
2748 JSValue result = jsNumber(src1.toNumber(callFrame) - src2.toNumber(callFrame));
2749 CHECK_FOR_EXCEPTION_AT_END();
2750 return JSValue::encode(result);
2751 }
2752
2753 static void putByVal(CallFrame* callFrame, JSValue baseValue, JSValue subscript, JSValue value)
2754 {
2755 if (LIKELY(subscript.isUInt32())) {
2756 uint32_t i = subscript.asUInt32();
2757 if (baseValue.isObject()) {
2758 JSObject* object = asObject(baseValue);
2759 if (object->canSetIndexQuickly(i))
2760 object->setIndexQuickly(callFrame->vm(), i, value);
2761 else
2762 object->methodTable()->putByIndex(object, callFrame, i, value, callFrame->codeBlock()->isStrictMode());
2763 } else
2764 baseValue.putByIndex(callFrame, i, value, callFrame->codeBlock()->isStrictMode());
2765 } else if (isName(subscript)) {
2766 PutPropertySlot slot(callFrame->codeBlock()->isStrictMode());
2767 baseValue.put(callFrame, jsCast<NameInstance*>(subscript.asCell())->privateName(), value, slot);
2768 } else {
2769 Identifier property(callFrame, subscript.toString(callFrame)->value(callFrame));
2770 if (!callFrame->vm().exception) { // Don't put to an object if toString threw an exception.
2771 PutPropertySlot slot(callFrame->codeBlock()->isStrictMode());
2772 baseValue.put(callFrame, property, value, slot);
2773 }
2774 }
2775 }
2776
2777 DEFINE_STUB_FUNCTION(void, op_put_by_val)
2778 {
2779 STUB_INIT_STACK_FRAME(stackFrame);
2780
2781 CallFrame* callFrame = stackFrame.callFrame;
2782
2783 JSValue baseValue = stackFrame.args[0].jsValue();
2784 JSValue subscript = stackFrame.args[1].jsValue();
2785 JSValue value = stackFrame.args[2].jsValue();
2786
2787 if (baseValue.isObject() && subscript.isInt32()) {
2788 // See if it's worth optimizing at all.
2789 JSObject* object = asObject(baseValue);
2790 bool didOptimize = false;
2791
2792 unsigned bytecodeOffset = callFrame->bytecodeOffsetForNonDFGCode();
2793 ASSERT(bytecodeOffset);
2794 ByValInfo& byValInfo = callFrame->codeBlock()->getByValInfo(bytecodeOffset - 1);
2795 ASSERT(!byValInfo.stubRoutine);
2796
2797 if (hasOptimizableIndexing(object->structure())) {
2798 // Attempt to optimize.
2799 JITArrayMode arrayMode = jitArrayModeForStructure(object->structure());
2800 if (arrayMode != byValInfo.arrayMode) {
2801 JIT::compilePutByVal(&callFrame->vm(), callFrame->codeBlock(), &byValInfo, STUB_RETURN_ADDRESS, arrayMode);
2802 didOptimize = true;
2803 }
2804 }
2805
2806 if (!didOptimize) {
2807 // If we take slow path more than 10 times without patching then make sure we
2808 // never make that mistake again. Or, if we failed to patch and we have some object
2809 // that intercepts indexed get, then don't even wait until 10 times. For cases
2810 // where we see non-index-intercepting objects, this gives 10 iterations worth of
2811 // opportunity for us to observe that the get_by_val may be polymorphic.
2812 if (++byValInfo.slowPathCount >= 10
2813 || object->structure()->typeInfo().interceptsGetOwnPropertySlotByIndexEvenWhenLengthIsNotZero()) {
2814 // Don't ever try to optimize.
2815 RepatchBuffer repatchBuffer(callFrame->codeBlock());
2816 repatchBuffer.relinkCallerToFunction(STUB_RETURN_ADDRESS, FunctionPtr(cti_op_put_by_val_generic));
2817 }
2818 }
2819 }
2820
2821 putByVal(callFrame, baseValue, subscript, value);
2822
2823 CHECK_FOR_EXCEPTION_AT_END();
2824 }
2825
2826 DEFINE_STUB_FUNCTION(void, op_put_by_val_generic)
2827 {
2828 STUB_INIT_STACK_FRAME(stackFrame);
2829
2830 CallFrame* callFrame = stackFrame.callFrame;
2831
2832 JSValue baseValue = stackFrame.args[0].jsValue();
2833 JSValue subscript = stackFrame.args[1].jsValue();
2834 JSValue value = stackFrame.args[2].jsValue();
2835
2836 putByVal(callFrame, baseValue, subscript, value);
2837
2838 CHECK_FOR_EXCEPTION_AT_END();
2839 }
2840
2841 DEFINE_STUB_FUNCTION(EncodedJSValue, op_less)
2842 {
2843 STUB_INIT_STACK_FRAME(stackFrame);
2844
2845 CallFrame* callFrame = stackFrame.callFrame;
2846 JSValue result = jsBoolean(jsLess<true>(callFrame, stackFrame.args[0].jsValue(), stackFrame.args[1].jsValue()));
2847 CHECK_FOR_EXCEPTION_AT_END();
2848 return JSValue::encode(result);
2849 }
2850
2851 DEFINE_STUB_FUNCTION(EncodedJSValue, op_lesseq)
2852 {
2853 STUB_INIT_STACK_FRAME(stackFrame);
2854
2855 CallFrame* callFrame = stackFrame.callFrame;
2856 JSValue result = jsBoolean(jsLessEq<true>(callFrame, stackFrame.args[0].jsValue(), stackFrame.args[1].jsValue()));
2857 CHECK_FOR_EXCEPTION_AT_END();
2858 return JSValue::encode(result);
2859 }
2860
2861 DEFINE_STUB_FUNCTION(EncodedJSValue, op_greater)
2862 {
2863 STUB_INIT_STACK_FRAME(stackFrame);
2864
2865 CallFrame* callFrame = stackFrame.callFrame;
2866 JSValue result = jsBoolean(jsLess<false>(callFrame, stackFrame.args[1].jsValue(), stackFrame.args[0].jsValue()));
2867 CHECK_FOR_EXCEPTION_AT_END();
2868 return JSValue::encode(result);
2869 }
2870
2871 DEFINE_STUB_FUNCTION(EncodedJSValue, op_greatereq)
2872 {
2873 STUB_INIT_STACK_FRAME(stackFrame);
2874
2875 CallFrame* callFrame = stackFrame.callFrame;
2876 JSValue result = jsBoolean(jsLessEq<false>(callFrame, stackFrame.args[1].jsValue(), stackFrame.args[0].jsValue()));
2877 CHECK_FOR_EXCEPTION_AT_END();
2878 return JSValue::encode(result);
2879 }
2880
2881 DEFINE_STUB_FUNCTION(void*, op_load_varargs)
2882 {
2883 STUB_INIT_STACK_FRAME(stackFrame);
2884
2885 CallFrame* callFrame = stackFrame.callFrame;
2886 JSStack* stack = stackFrame.stack;
2887 JSValue thisValue = stackFrame.args[0].jsValue();
2888 JSValue arguments = stackFrame.args[1].jsValue();
2889 int firstFreeRegister = stackFrame.args[2].int32();
2890
2891 CallFrame* newCallFrame = loadVarargs(callFrame, stack, thisValue, arguments, firstFreeRegister);
2892 if (!newCallFrame)
2893 VM_THROW_EXCEPTION();
2894 return newCallFrame;
2895 }
2896
2897 DEFINE_STUB_FUNCTION(EncodedJSValue, op_negate)
2898 {
2899 STUB_INIT_STACK_FRAME(stackFrame);
2900
2901 JSValue src = stackFrame.args[0].jsValue();
2902
2903 if (src.isNumber())
2904 return JSValue::encode(jsNumber(-src.asNumber()));
2905
2906 CallFrame* callFrame = stackFrame.callFrame;
2907 JSValue result = jsNumber(-src.toNumber(callFrame));
2908 CHECK_FOR_EXCEPTION_AT_END();
2909 return JSValue::encode(result);
2910 }
2911
2912 DEFINE_STUB_FUNCTION(EncodedJSValue, op_resolve_base)
2913 {
2914 STUB_INIT_STACK_FRAME(stackFrame);
2915
2916 return JSValue::encode(JSScope::resolveBase(stackFrame.callFrame, stackFrame.args[0].identifier(), false, stackFrame.args[1].resolveOperations(), stackFrame.args[2].putToBaseOperation()));
2917 }
2918
2919 DEFINE_STUB_FUNCTION(EncodedJSValue, op_resolve_base_strict_put)
2920 {
2921 STUB_INIT_STACK_FRAME(stackFrame);
2922
2923 if (JSValue result = JSScope::resolveBase(stackFrame.callFrame, stackFrame.args[0].identifier(), true, stackFrame.args[1].resolveOperations(), stackFrame.args[2].putToBaseOperation()))
2924 return JSValue::encode(result);
2925 VM_THROW_EXCEPTION();
2926 }
2927
2928 DEFINE_STUB_FUNCTION(EncodedJSValue, op_div)
2929 {
2930 STUB_INIT_STACK_FRAME(stackFrame);
2931
2932 JSValue src1 = stackFrame.args[0].jsValue();
2933 JSValue src2 = stackFrame.args[1].jsValue();
2934
2935 if (src1.isNumber() && src2.isNumber())
2936 return JSValue::encode(jsNumber(src1.asNumber() / src2.asNumber()));
2937
2938 CallFrame* callFrame = stackFrame.callFrame;
2939 JSValue result = jsNumber(src1.toNumber(callFrame) / src2.toNumber(callFrame));
2940 CHECK_FOR_EXCEPTION_AT_END();
2941 return JSValue::encode(result);
2942 }
2943
2944 DEFINE_STUB_FUNCTION(EncodedJSValue, op_dec)
2945 {
2946 STUB_INIT_STACK_FRAME(stackFrame);
2947
2948 JSValue v = stackFrame.args[0].jsValue();
2949
2950 CallFrame* callFrame = stackFrame.callFrame;
2951 JSValue result = jsNumber(v.toNumber(callFrame) - 1);
2952 CHECK_FOR_EXCEPTION_AT_END();
2953 return JSValue::encode(result);
2954 }
2955
2956 DEFINE_STUB_FUNCTION(int, op_jless)
2957 {
2958 STUB_INIT_STACK_FRAME(stackFrame);
2959
2960 JSValue src1 = stackFrame.args[0].jsValue();
2961 JSValue src2 = stackFrame.args[1].jsValue();
2962 CallFrame* callFrame = stackFrame.callFrame;
2963
2964 bool result = jsLess<true>(callFrame, src1, src2);
2965 CHECK_FOR_EXCEPTION_AT_END();
2966 return result;
2967 }
2968
2969 DEFINE_STUB_FUNCTION(int, op_jlesseq)
2970 {
2971 STUB_INIT_STACK_FRAME(stackFrame);
2972
2973 JSValue src1 = stackFrame.args[0].jsValue();
2974 JSValue src2 = stackFrame.args[1].jsValue();
2975 CallFrame* callFrame = stackFrame.callFrame;
2976
2977 bool result = jsLessEq<true>(callFrame, src1, src2);
2978 CHECK_FOR_EXCEPTION_AT_END();
2979 return result;
2980 }
2981
2982 DEFINE_STUB_FUNCTION(int, op_jgreater)
2983 {
2984 STUB_INIT_STACK_FRAME(stackFrame);
2985
2986 JSValue src1 = stackFrame.args[0].jsValue();
2987 JSValue src2 = stackFrame.args[1].jsValue();
2988 CallFrame* callFrame = stackFrame.callFrame;
2989
2990 bool result = jsLess<false>(callFrame, src2, src1);
2991 CHECK_FOR_EXCEPTION_AT_END();
2992 return result;
2993 }
2994
2995 DEFINE_STUB_FUNCTION(int, op_jgreatereq)
2996 {
2997 STUB_INIT_STACK_FRAME(stackFrame);
2998
2999 JSValue src1 = stackFrame.args[0].jsValue();
3000 JSValue src2 = stackFrame.args[1].jsValue();
3001 CallFrame* callFrame = stackFrame.callFrame;
3002
3003 bool result = jsLessEq<false>(callFrame, src2, src1);
3004 CHECK_FOR_EXCEPTION_AT_END();
3005 return result;
3006 }
3007
3008 DEFINE_STUB_FUNCTION(EncodedJSValue, op_not)
3009 {
3010 STUB_INIT_STACK_FRAME(stackFrame);
3011
3012 JSValue src = stackFrame.args[0].jsValue();
3013
3014 JSValue result = jsBoolean(!src.toBoolean(stackFrame.callFrame));
3015 CHECK_FOR_EXCEPTION_AT_END();
3016 return JSValue::encode(result);
3017 }
3018
3019 DEFINE_STUB_FUNCTION(int, op_jtrue)
3020 {
3021 STUB_INIT_STACK_FRAME(stackFrame);
3022
3023 JSValue src1 = stackFrame.args[0].jsValue();
3024
3025 bool result = src1.toBoolean(stackFrame.callFrame);
3026 CHECK_FOR_EXCEPTION_AT_END();
3027 return result;
3028 }
3029
3030 DEFINE_STUB_FUNCTION(int, op_eq)
3031 {
3032 STUB_INIT_STACK_FRAME(stackFrame);
3033
3034 JSValue src1 = stackFrame.args[0].jsValue();
3035 JSValue src2 = stackFrame.args[1].jsValue();
3036
3037 #if USE(JSVALUE32_64)
3038 start:
3039 if (src2.isUndefined()) {
3040 return src1.isNull() ||
3041 (src1.isCell() && src1.asCell()->structure()->masqueradesAsUndefined(stackFrame.callFrame->lexicalGlobalObject()))
3042 || src1.isUndefined();
3043 }
3044
3045 if (src2.isNull()) {
3046 return src1.isUndefined() ||
3047 (src1.isCell() && src1.asCell()->structure()->masqueradesAsUndefined(stackFrame.callFrame->lexicalGlobalObject()))
3048 || src1.isNull();
3049 }
3050
3051 if (src1.isInt32()) {
3052 if (src2.isDouble())
3053 return src1.asInt32() == src2.asDouble();
3054 double d = src2.toNumber(stackFrame.callFrame);
3055 CHECK_FOR_EXCEPTION();
3056 return src1.asInt32() == d;
3057 }
3058
3059 if (src1.isDouble()) {
3060 if (src2.isInt32())
3061 return src1.asDouble() == src2.asInt32();
3062 double d = src2.toNumber(stackFrame.callFrame);
3063 CHECK_FOR_EXCEPTION();
3064 return src1.asDouble() == d;
3065 }
3066
3067 if (src1.isTrue()) {
3068 if (src2.isFalse())
3069 return false;
3070 double d = src2.toNumber(stackFrame.callFrame);
3071 CHECK_FOR_EXCEPTION();
3072 return d == 1.0;
3073 }
3074
3075 if (src1.isFalse()) {
3076 if (src2.isTrue())
3077 return false;
3078 double d = src2.toNumber(stackFrame.callFrame);
3079 CHECK_FOR_EXCEPTION();
3080 return d == 0.0;
3081 }
3082
3083 if (src1.isUndefined())
3084 return src2.isCell() && src2.asCell()->structure()->masqueradesAsUndefined(stackFrame.callFrame->lexicalGlobalObject());
3085
3086 if (src1.isNull())
3087 return src2.isCell() && src2.asCell()->structure()->masqueradesAsUndefined(stackFrame.callFrame->lexicalGlobalObject());
3088
3089 JSCell* cell1 = src1.asCell();
3090
3091 if (cell1->isString()) {
3092 if (src2.isInt32())
3093 return jsToNumber(jsCast<JSString*>(cell1)->value(stackFrame.callFrame)) == src2.asInt32();
3094
3095 if (src2.isDouble())
3096 return jsToNumber(jsCast<JSString*>(cell1)->value(stackFrame.callFrame)) == src2.asDouble();
3097
3098 if (src2.isTrue())
3099 return jsToNumber(jsCast<JSString*>(cell1)->value(stackFrame.callFrame)) == 1.0;
3100
3101 if (src2.isFalse())
3102 return jsToNumber(jsCast<JSString*>(cell1)->value(stackFrame.callFrame)) == 0.0;
3103
3104 JSCell* cell2 = src2.asCell();
3105 if (cell2->isString())
3106 return jsCast<JSString*>(cell1)->value(stackFrame.callFrame) == jsCast<JSString*>(cell2)->value(stackFrame.callFrame);
3107
3108 src2 = asObject(cell2)->toPrimitive(stackFrame.callFrame);
3109 CHECK_FOR_EXCEPTION();
3110 goto start;
3111 }
3112
3113 if (src2.isObject())
3114 return asObject(cell1) == asObject(src2);
3115 src1 = asObject(cell1)->toPrimitive(stackFrame.callFrame);
3116 CHECK_FOR_EXCEPTION();
3117 goto start;
3118
3119 #else // USE(JSVALUE32_64)
3120 CallFrame* callFrame = stackFrame.callFrame;
3121
3122 bool result = JSValue::equalSlowCaseInline(callFrame, src1, src2);
3123 CHECK_FOR_EXCEPTION_AT_END();
3124 return result;
3125 #endif // USE(JSVALUE32_64)
3126 }
3127
3128 DEFINE_STUB_FUNCTION(int, op_eq_strings)
3129 {
3130 #if USE(JSVALUE32_64)
3131 STUB_INIT_STACK_FRAME(stackFrame);
3132
3133 JSString* string1 = stackFrame.args[0].jsString();
3134 JSString* string2 = stackFrame.args[1].jsString();
3135
3136 ASSERT(string1->isString());
3137 ASSERT(string2->isString());
3138 return string1->value(stackFrame.callFrame) == string2->value(stackFrame.callFrame);
3139 #else
3140 UNUSED_PARAM(args);
3141 RELEASE_ASSERT_NOT_REACHED();
3142 return 0;
3143 #endif
3144 }
3145
3146 DEFINE_STUB_FUNCTION(EncodedJSValue, op_lshift)
3147 {
3148 STUB_INIT_STACK_FRAME(stackFrame);
3149
3150 JSValue val = stackFrame.args[0].jsValue();
3151 JSValue shift = stackFrame.args[1].jsValue();
3152
3153 CallFrame* callFrame = stackFrame.callFrame;
3154 JSValue result = jsNumber((val.toInt32(callFrame)) << (shift.toUInt32(callFrame) & 0x1f));
3155 CHECK_FOR_EXCEPTION_AT_END();
3156 return JSValue::encode(result);
3157 }
3158
3159 DEFINE_STUB_FUNCTION(EncodedJSValue, op_bitand)
3160 {
3161 STUB_INIT_STACK_FRAME(stackFrame);
3162
3163 JSValue src1 = stackFrame.args[0].jsValue();
3164 JSValue src2 = stackFrame.args[1].jsValue();
3165
3166 ASSERT(!src1.isInt32() || !src2.isInt32());
3167 CallFrame* callFrame = stackFrame.callFrame;
3168 JSValue result = jsNumber(src1.toInt32(callFrame) & src2.toInt32(callFrame));
3169 CHECK_FOR_EXCEPTION_AT_END();
3170 return JSValue::encode(result);
3171 }
3172
3173 DEFINE_STUB_FUNCTION(EncodedJSValue, op_rshift)
3174 {
3175 STUB_INIT_STACK_FRAME(stackFrame);
3176
3177 JSValue val = stackFrame.args[0].jsValue();
3178 JSValue shift = stackFrame.args[1].jsValue();
3179
3180 CallFrame* callFrame = stackFrame.callFrame;
3181 JSValue result = jsNumber((val.toInt32(callFrame)) >> (shift.toUInt32(callFrame) & 0x1f));
3182
3183 CHECK_FOR_EXCEPTION_AT_END();
3184 return JSValue::encode(result);
3185 }
3186
3187 DEFINE_STUB_FUNCTION(EncodedJSValue, op_resolve_with_base)
3188 {
3189 STUB_INIT_STACK_FRAME(stackFrame);
3190
3191 CallFrame* callFrame = stackFrame.callFrame;
3192 JSValue result = JSScope::resolveWithBase(callFrame, stackFrame.args[0].identifier(), &callFrame->registers()[stackFrame.args[1].int32()], stackFrame.args[2].resolveOperations(), stackFrame.args[3].putToBaseOperation());
3193 CHECK_FOR_EXCEPTION_AT_END();
3194 return JSValue::encode(result);
3195 }
3196
3197 DEFINE_STUB_FUNCTION(EncodedJSValue, op_resolve_with_this)
3198 {
3199 STUB_INIT_STACK_FRAME(stackFrame);
3200
3201 CallFrame* callFrame = stackFrame.callFrame;
3202 JSValue result = JSScope::resolveWithThis(callFrame, stackFrame.args[0].identifier(), &callFrame->registers()[stackFrame.args[1].int32()], stackFrame.args[2].resolveOperations());
3203 CHECK_FOR_EXCEPTION_AT_END();
3204 return JSValue::encode(result);
3205 }
3206
3207 DEFINE_STUB_FUNCTION(JSObject*, op_new_func_exp)
3208 {
3209 STUB_INIT_STACK_FRAME(stackFrame);
3210 CallFrame* callFrame = stackFrame.callFrame;
3211
3212 FunctionExecutable* function = stackFrame.args[0].function();
3213 JSFunction* func = JSFunction::create(callFrame, function, callFrame->scope());
3214 ASSERT(callFrame->codeBlock()->codeType() != FunctionCode || !callFrame->codeBlock()->needsFullScopeChain() || callFrame->uncheckedR(callFrame->codeBlock()->activationRegister()).jsValue());
3215
3216 return func;
3217 }
3218
3219 DEFINE_STUB_FUNCTION(EncodedJSValue, op_mod)
3220 {
3221 STUB_INIT_STACK_FRAME(stackFrame);
3222
3223 JSValue dividendValue = stackFrame.args[0].jsValue();
3224 JSValue divisorValue = stackFrame.args[1].jsValue();
3225
3226 CallFrame* callFrame = stackFrame.callFrame;
3227 double d = dividendValue.toNumber(callFrame);
3228 JSValue result = jsNumber(fmod(d, divisorValue.toNumber(callFrame)));
3229 CHECK_FOR_EXCEPTION_AT_END();
3230 return JSValue::encode(result);
3231 }
3232
3233 DEFINE_STUB_FUNCTION(EncodedJSValue, op_urshift)
3234 {
3235 STUB_INIT_STACK_FRAME(stackFrame);
3236
3237 JSValue val = stackFrame.args[0].jsValue();
3238 JSValue shift = stackFrame.args[1].jsValue();
3239
3240 CallFrame* callFrame = stackFrame.callFrame;
3241 JSValue result = jsNumber((val.toUInt32(callFrame)) >> (shift.toUInt32(callFrame) & 0x1f));
3242 CHECK_FOR_EXCEPTION_AT_END();
3243 return JSValue::encode(result);
3244 }
3245
3246 DEFINE_STUB_FUNCTION(EncodedJSValue, op_bitxor)
3247 {
3248 STUB_INIT_STACK_FRAME(stackFrame);
3249
3250 JSValue src1 = stackFrame.args[0].jsValue();
3251 JSValue src2 = stackFrame.args[1].jsValue();
3252
3253 CallFrame* callFrame = stackFrame.callFrame;
3254
3255 JSValue result = jsNumber(src1.toInt32(callFrame) ^ src2.toInt32(callFrame));
3256 CHECK_FOR_EXCEPTION_AT_END();
3257 return JSValue::encode(result);
3258 }
3259
3260 DEFINE_STUB_FUNCTION(JSObject*, op_new_regexp)
3261 {
3262 STUB_INIT_STACK_FRAME(stackFrame);
3263
3264 CallFrame* callFrame = stackFrame.callFrame;
3265
3266 RegExp* regExp = stackFrame.args[0].regExp();
3267 if (!regExp->isValid()) {
3268 stackFrame.vm->exception = createSyntaxError(callFrame, "Invalid flags supplied to RegExp constructor.");
3269 VM_THROW_EXCEPTION();
3270 }
3271
3272 return RegExpObject::create(*stackFrame.vm, stackFrame.callFrame->lexicalGlobalObject(), stackFrame.callFrame->lexicalGlobalObject()->regExpStructure(), regExp);
3273 }
3274
3275 DEFINE_STUB_FUNCTION(EncodedJSValue, op_bitor)
3276 {
3277 STUB_INIT_STACK_FRAME(stackFrame);
3278
3279 JSValue src1 = stackFrame.args[0].jsValue();
3280 JSValue src2 = stackFrame.args[1].jsValue();
3281
3282 CallFrame* callFrame = stackFrame.callFrame;
3283
3284 JSValue result = jsNumber(src1.toInt32(callFrame) | src2.toInt32(callFrame));
3285 CHECK_FOR_EXCEPTION_AT_END();
3286 return JSValue::encode(result);
3287 }
3288
3289 DEFINE_STUB_FUNCTION(EncodedJSValue, op_call_eval)
3290 {
3291 STUB_INIT_STACK_FRAME(stackFrame);
3292
3293 CallFrame* callFrame = stackFrame.callFrame;
3294 CallFrame* callerFrame = callFrame->callerFrame();
3295 ASSERT(callFrame->callerFrame()->codeBlock()->codeType() != FunctionCode
3296 || !callFrame->callerFrame()->codeBlock()->needsFullScopeChain()
3297 || callFrame->callerFrame()->uncheckedR(callFrame->callerFrame()->codeBlock()->activationRegister()).jsValue());
3298
3299 callFrame->setScope(callerFrame->scope());
3300 callFrame->setReturnPC(static_cast<Instruction*>((STUB_RETURN_ADDRESS).value()));
3301 callFrame->setCodeBlock(0);
3302
3303 if (!isHostFunction(callFrame->calleeAsValue(), globalFuncEval))
3304 return JSValue::encode(JSValue());
3305
3306 JSValue result = eval(callFrame);
3307 if (stackFrame.vm->exception)
3308 return throwExceptionFromOpCall<EncodedJSValue>(stackFrame, callFrame, STUB_RETURN_ADDRESS);
3309
3310 return JSValue::encode(result);
3311 }
3312
3313 DEFINE_STUB_FUNCTION(void*, op_throw)
3314 {
3315 STUB_INIT_STACK_FRAME(stackFrame);
3316 ExceptionHandler handler = jitThrow(stackFrame.vm, stackFrame.callFrame, stackFrame.args[0].jsValue(), STUB_RETURN_ADDRESS);
3317 STUB_SET_RETURN_ADDRESS(handler.catchRoutine);
3318 return handler.callFrame;
3319 }
3320
3321 DEFINE_STUB_FUNCTION(JSPropertyNameIterator*, op_get_pnames)
3322 {
3323 STUB_INIT_STACK_FRAME(stackFrame);
3324
3325 CallFrame* callFrame = stackFrame.callFrame;
3326 JSObject* o = stackFrame.args[0].jsObject();
3327 Structure* structure = o->structure();
3328 JSPropertyNameIterator* jsPropertyNameIterator = structure->enumerationCache();
3329 if (!jsPropertyNameIterator || jsPropertyNameIterator->cachedPrototypeChain() != structure->prototypeChain(callFrame))
3330 jsPropertyNameIterator = JSPropertyNameIterator::create(callFrame, o);
3331 return jsPropertyNameIterator;
3332 }
3333
3334 DEFINE_STUB_FUNCTION(int, has_property)
3335 {
3336 STUB_INIT_STACK_FRAME(stackFrame);
3337
3338 JSObject* base = stackFrame.args[0].jsObject();
3339 JSString* property = stackFrame.args[1].jsString();
3340 int result = base->hasProperty(stackFrame.callFrame, Identifier(stackFrame.callFrame, property->value(stackFrame.callFrame)));
3341 CHECK_FOR_EXCEPTION_AT_END();
3342 return result;
3343 }
3344
3345 DEFINE_STUB_FUNCTION(void, op_push_with_scope)
3346 {
3347 STUB_INIT_STACK_FRAME(stackFrame);
3348
3349 JSObject* o = stackFrame.args[0].jsValue().toObject(stackFrame.callFrame);
3350 CHECK_FOR_EXCEPTION_VOID();
3351 stackFrame.callFrame->setScope(JSWithScope::create(stackFrame.callFrame, o));
3352 }
3353
3354 DEFINE_STUB_FUNCTION(void, op_pop_scope)
3355 {
3356 STUB_INIT_STACK_FRAME(stackFrame);
3357
3358 stackFrame.callFrame->setScope(stackFrame.callFrame->scope()->next());
3359 }
3360
3361 DEFINE_STUB_FUNCTION(EncodedJSValue, op_typeof)
3362 {
3363 STUB_INIT_STACK_FRAME(stackFrame);
3364
3365 return JSValue::encode(jsTypeStringForValue(stackFrame.callFrame, stackFrame.args[0].jsValue()));
3366 }
3367
3368 DEFINE_STUB_FUNCTION(EncodedJSValue, op_is_object)
3369 {
3370 STUB_INIT_STACK_FRAME(stackFrame);
3371
3372 return JSValue::encode(jsBoolean(jsIsObjectType(stackFrame.callFrame, stackFrame.args[0].jsValue())));
3373 }
3374
3375 DEFINE_STUB_FUNCTION(EncodedJSValue, op_is_function)
3376 {
3377 STUB_INIT_STACK_FRAME(stackFrame);
3378
3379 return JSValue::encode(jsBoolean(jsIsFunctionType(stackFrame.args[0].jsValue())));
3380 }
3381
3382 DEFINE_STUB_FUNCTION(EncodedJSValue, op_stricteq)
3383 {
3384 STUB_INIT_STACK_FRAME(stackFrame);
3385
3386 JSValue src1 = stackFrame.args[0].jsValue();
3387 JSValue src2 = stackFrame.args[1].jsValue();
3388
3389 bool result = JSValue::strictEqual(stackFrame.callFrame, src1, src2);
3390 CHECK_FOR_EXCEPTION_AT_END();
3391 return JSValue::encode(jsBoolean(result));
3392 }
3393
3394 DEFINE_STUB_FUNCTION(EncodedJSValue, op_to_primitive)
3395 {
3396 STUB_INIT_STACK_FRAME(stackFrame);
3397
3398 return JSValue::encode(stackFrame.args[0].jsValue().toPrimitive(stackFrame.callFrame));
3399 }
3400
3401 DEFINE_STUB_FUNCTION(EncodedJSValue, op_strcat)
3402 {
3403 STUB_INIT_STACK_FRAME(stackFrame);
3404
3405 JSValue result = jsString(stackFrame.callFrame, &stackFrame.callFrame->registers()[stackFrame.args[0].int32()], stackFrame.args[1].int32());
3406 CHECK_FOR_EXCEPTION_AT_END();
3407 return JSValue::encode(result);
3408 }
3409
3410 DEFINE_STUB_FUNCTION(EncodedJSValue, op_nstricteq)
3411 {
3412 STUB_INIT_STACK_FRAME(stackFrame);
3413
3414 JSValue src1 = stackFrame.args[0].jsValue();
3415 JSValue src2 = stackFrame.args[1].jsValue();
3416
3417 bool result = !JSValue::strictEqual(stackFrame.callFrame, src1, src2);
3418 CHECK_FOR_EXCEPTION_AT_END();
3419 return JSValue::encode(jsBoolean(result));
3420 }
3421
3422 DEFINE_STUB_FUNCTION(EncodedJSValue, op_to_number)
3423 {
3424 STUB_INIT_STACK_FRAME(stackFrame);
3425
3426 JSValue src = stackFrame.args[0].jsValue();
3427 CallFrame* callFrame = stackFrame.callFrame;
3428
3429 double number = src.toNumber(callFrame);
3430 CHECK_FOR_EXCEPTION_AT_END();
3431 return JSValue::encode(jsNumber(number));
3432 }
3433
3434 DEFINE_STUB_FUNCTION(EncodedJSValue, op_in)
3435 {
3436 STUB_INIT_STACK_FRAME(stackFrame);
3437
3438 CallFrame* callFrame = stackFrame.callFrame;
3439 JSValue baseVal = stackFrame.args[1].jsValue();
3440
3441 if (!baseVal.isObject()) {
3442 stackFrame.vm->exception = createInvalidParamError(stackFrame.callFrame, "in", baseVal);
3443 VM_THROW_EXCEPTION();
3444 }
3445
3446 JSValue propName = stackFrame.args[0].jsValue();
3447 JSObject* baseObj = asObject(baseVal);
3448
3449 uint32_t i;
3450 if (propName.getUInt32(i))
3451 return JSValue::encode(jsBoolean(baseObj->hasProperty(callFrame, i)));
3452
3453 if (isName(propName))
3454 return JSValue::encode(jsBoolean(baseObj->hasProperty(callFrame, jsCast<NameInstance*>(propName.asCell())->privateName())));
3455
3456 Identifier property(callFrame, propName.toString(callFrame)->value(callFrame));
3457 CHECK_FOR_EXCEPTION();
3458 return JSValue::encode(jsBoolean(baseObj->hasProperty(callFrame, property)));
3459 }
3460
3461 DEFINE_STUB_FUNCTION(void, op_push_name_scope)
3462 {
3463 STUB_INIT_STACK_FRAME(stackFrame);
3464
3465 JSNameScope* scope = JSNameScope::create(stackFrame.callFrame, stackFrame.args[0].identifier(), stackFrame.args[1].jsValue(), stackFrame.args[2].int32());
3466
3467 CallFrame* callFrame = stackFrame.callFrame;
3468 callFrame->setScope(scope);
3469 }
3470
3471 DEFINE_STUB_FUNCTION(void, op_put_by_index)
3472 {
3473 STUB_INIT_STACK_FRAME(stackFrame);
3474
3475 CallFrame* callFrame = stackFrame.callFrame;
3476 unsigned property = stackFrame.args[1].int32();
3477
3478 JSValue arrayValue = stackFrame.args[0].jsValue();
3479 ASSERT(isJSArray(arrayValue));
3480 asArray(arrayValue)->putDirectIndex(callFrame, property, stackFrame.args[2].jsValue());
3481 }
3482
3483 DEFINE_STUB_FUNCTION(void*, op_switch_imm)
3484 {
3485 STUB_INIT_STACK_FRAME(stackFrame);
3486
3487 JSValue scrutinee = stackFrame.args[0].jsValue();
3488 unsigned tableIndex = stackFrame.args[1].int32();
3489 CallFrame* callFrame = stackFrame.callFrame;
3490 CodeBlock* codeBlock = callFrame->codeBlock();
3491
3492 if (scrutinee.isInt32())
3493 return codeBlock->immediateSwitchJumpTable(tableIndex).ctiForValue(scrutinee.asInt32()).executableAddress();
3494 if (scrutinee.isDouble() && scrutinee.asDouble() == static_cast<int32_t>(scrutinee.asDouble()))
3495 return codeBlock->immediateSwitchJumpTable(tableIndex).ctiForValue(static_cast<int32_t>(scrutinee.asDouble())).executableAddress();
3496 return codeBlock->immediateSwitchJumpTable(tableIndex).ctiDefault.executableAddress();
3497 }
3498
3499 DEFINE_STUB_FUNCTION(void*, op_switch_char)
3500 {
3501 STUB_INIT_STACK_FRAME(stackFrame);
3502
3503 JSValue scrutinee = stackFrame.args[0].jsValue();
3504 unsigned tableIndex = stackFrame.args[1].int32();
3505 CallFrame* callFrame = stackFrame.callFrame;
3506 CodeBlock* codeBlock = callFrame->codeBlock();
3507
3508 void* result = codeBlock->characterSwitchJumpTable(tableIndex).ctiDefault.executableAddress();
3509
3510 if (scrutinee.isString()) {
3511 StringImpl* value = asString(scrutinee)->value(callFrame).impl();
3512 if (value->length() == 1)
3513 result = codeBlock->characterSwitchJumpTable(tableIndex).ctiForValue((*value)[0]).executableAddress();
3514 }
3515
3516 CHECK_FOR_EXCEPTION_AT_END();
3517 return result;
3518 }
3519
3520 DEFINE_STUB_FUNCTION(void*, op_switch_string)
3521 {
3522 STUB_INIT_STACK_FRAME(stackFrame);
3523
3524 JSValue scrutinee = stackFrame.args[0].jsValue();
3525 unsigned tableIndex = stackFrame.args[1].int32();
3526 CallFrame* callFrame = stackFrame.callFrame;
3527 CodeBlock* codeBlock = callFrame->codeBlock();
3528
3529 void* result = codeBlock->stringSwitchJumpTable(tableIndex).ctiDefault.executableAddress();
3530
3531 if (scrutinee.isString()) {
3532 StringImpl* value = asString(scrutinee)->value(callFrame).impl();
3533 result = codeBlock->stringSwitchJumpTable(tableIndex).ctiForValue(value).executableAddress();
3534 }
3535
3536 CHECK_FOR_EXCEPTION_AT_END();
3537 return result;
3538 }
3539
3540 DEFINE_STUB_FUNCTION(EncodedJSValue, op_del_by_val)
3541 {
3542 STUB_INIT_STACK_FRAME(stackFrame);
3543
3544 CallFrame* callFrame = stackFrame.callFrame;
3545
3546 JSValue baseValue = stackFrame.args[0].jsValue();
3547 JSObject* baseObj = baseValue.toObject(callFrame); // may throw
3548
3549 JSValue subscript = stackFrame.args[1].jsValue();
3550 bool result;
3551 uint32_t i;
3552 if (subscript.getUInt32(i))
3553 result = baseObj->methodTable()->deletePropertyByIndex(baseObj, callFrame, i);
3554 else if (isName(subscript))
3555 result = baseObj->methodTable()->deleteProperty(baseObj, callFrame, jsCast<NameInstance*>(subscript.asCell())->privateName());
3556 else {
3557 CHECK_FOR_EXCEPTION();
3558 Identifier property(callFrame, subscript.toString(callFrame)->value(callFrame));
3559 CHECK_FOR_EXCEPTION();
3560 result = baseObj->methodTable()->deleteProperty(baseObj, callFrame, property);
3561 }
3562
3563 if (!result && callFrame->codeBlock()->isStrictMode())
3564 stackFrame.vm->exception = createTypeError(stackFrame.callFrame, "Unable to delete property.");
3565
3566 CHECK_FOR_EXCEPTION_AT_END();
3567 return JSValue::encode(jsBoolean(result));
3568 }
3569
3570 DEFINE_STUB_FUNCTION(void, op_put_getter_setter)
3571 {
3572 STUB_INIT_STACK_FRAME(stackFrame);
3573
3574 CallFrame* callFrame = stackFrame.callFrame;
3575
3576 ASSERT(stackFrame.args[0].jsValue().isObject());
3577 JSObject* baseObj = asObject(stackFrame.args[0].jsValue());
3578
3579 GetterSetter* accessor = GetterSetter::create(callFrame);
3580
3581 JSValue getter = stackFrame.args[2].jsValue();
3582 JSValue setter = stackFrame.args[3].jsValue();
3583 ASSERT(getter.isObject() || getter.isUndefined());
3584 ASSERT(setter.isObject() || setter.isUndefined());
3585 ASSERT(getter.isObject() || setter.isObject());
3586
3587 if (!getter.isUndefined())
3588 accessor->setGetter(callFrame->vm(), asObject(getter));
3589 if (!setter.isUndefined())
3590 accessor->setSetter(callFrame->vm(), asObject(setter));
3591 baseObj->putDirectAccessor(callFrame, stackFrame.args[1].identifier(), accessor, Accessor);
3592 }
3593
3594 DEFINE_STUB_FUNCTION(void, op_throw_static_error)
3595 {
3596 STUB_INIT_STACK_FRAME(stackFrame);
3597
3598 CallFrame* callFrame = stackFrame.callFrame;
3599 String message = stackFrame.args[0].jsValue().toString(callFrame)->value(callFrame);
3600 if (stackFrame.args[1].asInt32)
3601 stackFrame.vm->exception = createReferenceError(callFrame, message);
3602 else
3603 stackFrame.vm->exception = createTypeError(callFrame, message);
3604 VM_THROW_EXCEPTION_AT_END();
3605 }
3606
3607 DEFINE_STUB_FUNCTION(void, op_debug)
3608 {
3609 STUB_INIT_STACK_FRAME(stackFrame);
3610
3611 CallFrame* callFrame = stackFrame.callFrame;
3612
3613 int debugHookID = stackFrame.args[0].int32();
3614 int firstLine = stackFrame.args[1].int32();
3615 int lastLine = stackFrame.args[2].int32();
3616 int column = stackFrame.args[3].int32();
3617
3618 stackFrame.vm->interpreter->debug(callFrame, static_cast<DebugHookID>(debugHookID), firstLine, lastLine, column);
3619 }
3620
3621 DEFINE_STUB_FUNCTION(void*, vm_throw)
3622 {
3623 STUB_INIT_STACK_FRAME(stackFrame);
3624 VM* vm = stackFrame.vm;
3625 ExceptionHandler handler = jitThrow(vm, stackFrame.callFrame, vm->exception, vm->exceptionLocation);
3626 STUB_SET_RETURN_ADDRESS(handler.catchRoutine);
3627 return handler.callFrame;
3628 }
3629
3630 DEFINE_STUB_FUNCTION(EncodedJSValue, to_object)
3631 {
3632 STUB_INIT_STACK_FRAME(stackFrame);
3633
3634 CallFrame* callFrame = stackFrame.callFrame;
3635 return JSValue::encode(stackFrame.args[0].jsValue().toObject(callFrame));
3636 }
3637
3638 } // namespace JSC
3639
3640 #endif // ENABLE(JIT)