]>
Commit | Line | Data |
---|---|---|
6fe7ccc8 A |
1 | # Copyright (C) 2011, 2012 Apple Inc. All rights reserved. |
2 | # | |
3 | # Redistribution and use in source and binary forms, with or without | |
4 | # modification, are permitted provided that the following conditions | |
5 | # are met: | |
6 | # 1. Redistributions of source code must retain the above copyright | |
7 | # notice, this list of conditions and the following disclaimer. | |
8 | # 2. Redistributions in binary form must reproduce the above copyright | |
9 | # notice, this list of conditions and the following disclaimer in the | |
10 | # documentation and/or other materials provided with the distribution. | |
11 | # | |
12 | # THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' | |
13 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, | |
14 | # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
15 | # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS | |
16 | # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | |
17 | # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | |
18 | # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | |
19 | # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | |
20 | # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | |
21 | # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF | |
22 | # THE POSSIBILITY OF SUCH DAMAGE. | |
23 | ||
24 | ||
25 | # Some value representation constants. | |
26 | const TagBitTypeOther = 0x2 | |
27 | const TagBitBool = 0x4 | |
28 | const TagBitUndefined = 0x8 | |
29 | const ValueEmpty = 0x0 | |
30 | const ValueFalse = TagBitTypeOther | TagBitBool | |
31 | const ValueTrue = TagBitTypeOther | TagBitBool | 1 | |
32 | const ValueUndefined = TagBitTypeOther | TagBitUndefined | |
33 | const ValueNull = TagBitTypeOther | |
34 | ||
35 | # Utilities. | |
93a37866 A |
36 | macro jumpToInstruction() |
37 | jmp [PB, PC, 8] | |
38 | end | |
39 | ||
6fe7ccc8 A |
40 | macro dispatch(advance) |
41 | addp advance, PC | |
93a37866 | 42 | jumpToInstruction() |
6fe7ccc8 A |
43 | end |
44 | ||
45 | macro dispatchInt(advance) | |
46 | addi advance, PC | |
93a37866 A |
47 | jumpToInstruction() |
48 | end | |
49 | ||
50 | macro dispatchIntIndirect(offset) | |
51 | dispatchInt(offset * 8[PB, PC, 8]) | |
6fe7ccc8 A |
52 | end |
53 | ||
54 | macro dispatchAfterCall() | |
55 | loadi ArgumentCount + TagOffset[cfr], PC | |
56 | loadp CodeBlock[cfr], PB | |
57 | loadp CodeBlock::m_instructions[PB], PB | |
93a37866 | 58 | jumpToInstruction() |
6fe7ccc8 A |
59 | end |
60 | ||
61 | macro cCall2(function, arg1, arg2) | |
93a37866 A |
62 | if X86_64 |
63 | move arg1, t5 | |
64 | move arg2, t4 | |
65 | call function | |
66 | elsif ARM64 | |
67 | move arg1, t0 | |
68 | move arg2, t1 | |
69 | call function | |
70 | elsif C_LOOP | |
71 | cloopCallSlowPath function, arg1, arg2 | |
72 | else | |
73 | error | |
74 | end | |
6fe7ccc8 A |
75 | end |
76 | ||
77 | # This barely works. arg3 and arg4 should probably be immediates. | |
78 | macro cCall4(function, arg1, arg2, arg3, arg4) | |
93a37866 A |
79 | if X86_64 |
80 | move arg1, t5 | |
81 | move arg2, t4 | |
82 | move arg3, t1 | |
83 | move arg4, t2 | |
84 | call function | |
85 | elsif ARM64 | |
86 | move arg1, t0 | |
87 | move arg2, t1 | |
88 | move arg3, t2 | |
89 | move arg4, t3 | |
90 | call function | |
91 | elsif C_LOOP | |
92 | error | |
93 | else | |
94 | error | |
95 | end | |
6fe7ccc8 A |
96 | end |
97 | ||
98 | macro prepareStateForCCall() | |
99 | leap [PB, PC, 8], PC | |
100 | move PB, t3 | |
93a37866 A |
101 | if X86_64 |
102 | resetX87Stack | |
103 | end | |
6fe7ccc8 A |
104 | end |
105 | ||
106 | macro restoreStateAfterCCall() | |
107 | move t0, PC | |
108 | move t1, cfr | |
109 | move t3, PB | |
110 | subp PB, PC | |
93a37866 | 111 | rshiftp 3, PC |
6fe7ccc8 A |
112 | end |
113 | ||
114 | macro callSlowPath(slowPath) | |
115 | prepareStateForCCall() | |
116 | cCall2(slowPath, cfr, PC) | |
117 | restoreStateAfterCCall() | |
118 | end | |
119 | ||
120 | macro traceOperand(fromWhere, operand) | |
121 | prepareStateForCCall() | |
122 | cCall4(_llint_trace_operand, cfr, PC, fromWhere, operand) | |
123 | restoreStateAfterCCall() | |
124 | end | |
125 | ||
126 | macro traceValue(fromWhere, operand) | |
127 | prepareStateForCCall() | |
128 | cCall4(_llint_trace_value, cfr, PC, fromWhere, operand) | |
129 | restoreStateAfterCCall() | |
130 | end | |
131 | ||
132 | # Call a slow path for call call opcodes. | |
133 | macro callCallSlowPath(advance, slowPath, action) | |
134 | addi advance, PC, t0 | |
135 | storei t0, ArgumentCount + TagOffset[cfr] | |
136 | prepareStateForCCall() | |
137 | cCall2(slowPath, cfr, PC) | |
138 | move t1, cfr | |
139 | action(t0) | |
140 | end | |
141 | ||
93a37866 A |
142 | macro callWatchdogTimerHandler(throwHandler) |
143 | storei PC, ArgumentCount + TagOffset[cfr] | |
144 | prepareStateForCCall() | |
145 | cCall2(_llint_slow_path_handle_watchdog_timer, cfr, PC) | |
146 | move t1, cfr | |
147 | btpnz t0, throwHandler | |
148 | move t3, PB | |
149 | loadi ArgumentCount + TagOffset[cfr], PC | |
150 | end | |
151 | ||
6fe7ccc8 A |
152 | macro checkSwitchToJITForLoop() |
153 | checkSwitchToJIT( | |
154 | 1, | |
155 | macro() | |
156 | storei PC, ArgumentCount + TagOffset[cfr] | |
157 | prepareStateForCCall() | |
158 | cCall2(_llint_loop_osr, cfr, PC) | |
159 | move t1, cfr | |
160 | btpz t0, .recover | |
161 | jmp t0 | |
162 | .recover: | |
163 | move t3, PB | |
164 | loadi ArgumentCount + TagOffset[cfr], PC | |
165 | end) | |
166 | end | |
167 | ||
168 | # Index and value must be different registers. Index may be clobbered. | |
169 | macro loadConstantOrVariable(index, value) | |
170 | bpgteq index, FirstConstantRegisterIndex, .constant | |
93a37866 | 171 | loadq [cfr, index, 8], value |
6fe7ccc8 A |
172 | jmp .done |
173 | .constant: | |
174 | loadp CodeBlock[cfr], value | |
175 | loadp CodeBlock::m_constantRegisters + VectorBufferOffset[value], value | |
176 | subp FirstConstantRegisterIndex, index | |
93a37866 | 177 | loadq [value, index, 8], value |
6fe7ccc8 A |
178 | .done: |
179 | end | |
180 | ||
181 | macro loadConstantOrVariableInt32(index, value, slow) | |
182 | loadConstantOrVariable(index, value) | |
93a37866 | 183 | bqb value, tagTypeNumber, slow |
6fe7ccc8 A |
184 | end |
185 | ||
186 | macro loadConstantOrVariableCell(index, value, slow) | |
187 | loadConstantOrVariable(index, value) | |
93a37866 | 188 | btqnz value, tagMask, slow |
6fe7ccc8 A |
189 | end |
190 | ||
191 | macro writeBarrier(value) | |
192 | # Nothing to do, since we don't have a generational or incremental collector. | |
193 | end | |
194 | ||
195 | macro valueProfile(value, profile) | |
196 | if VALUE_PROFILER | |
93a37866 | 197 | storeq value, ValueProfile::m_buckets[profile] |
6fe7ccc8 A |
198 | end |
199 | end | |
200 | ||
201 | ||
202 | # Entrypoints into the interpreter. | |
203 | ||
204 | # Expects that CodeBlock is in t1, which is what prologue() leaves behind. | |
205 | macro functionArityCheck(doneLabel, slow_path) | |
206 | loadi PayloadOffset + ArgumentCount[cfr], t0 | |
207 | biaeq t0, CodeBlock::m_numParameters[t1], doneLabel | |
208 | prepareStateForCCall() | |
209 | cCall2(slow_path, cfr, PC) # This slow_path has a simple protocol: t0 = 0 => no error, t0 != 0 => error | |
210 | move t1, cfr | |
211 | btiz t0, .continue | |
93a37866 A |
212 | loadp JITStackFrame::vm[sp], t1 |
213 | loadp VM::callFrameForThrow[t1], t0 | |
214 | jmp VM::targetMachinePCForThrow[t1] | |
6fe7ccc8 A |
215 | .continue: |
216 | # Reload CodeBlock and reset PC, since the slow_path clobbered them. | |
217 | loadp CodeBlock[cfr], t1 | |
218 | loadp CodeBlock::m_instructions[t1], PB | |
219 | move 0, PC | |
220 | jmp doneLabel | |
221 | end | |
222 | ||
223 | ||
224 | # Instruction implementations | |
225 | ||
226 | _llint_op_enter: | |
227 | traceExecution() | |
93a37866 A |
228 | loadp CodeBlock[cfr], t2 // t2<CodeBlock> = cfr.CodeBlock |
229 | loadi CodeBlock::m_numVars[t2], t2 // t2<size_t> = t2<CodeBlock>.m_numVars | |
6fe7ccc8 A |
230 | btiz t2, .opEnterDone |
231 | move ValueUndefined, t0 | |
232 | .opEnterLoop: | |
233 | subi 1, t2 | |
93a37866 | 234 | storeq t0, [cfr, t2, 8] |
6fe7ccc8 A |
235 | btinz t2, .opEnterLoop |
236 | .opEnterDone: | |
237 | dispatch(1) | |
238 | ||
239 | ||
240 | _llint_op_create_activation: | |
241 | traceExecution() | |
93a37866 A |
242 | loadisFromInstruction(1, t0) |
243 | bqneq [cfr, t0, 8], ValueEmpty, .opCreateActivationDone | |
6fe7ccc8 A |
244 | callSlowPath(_llint_slow_path_create_activation) |
245 | .opCreateActivationDone: | |
246 | dispatch(2) | |
247 | ||
248 | ||
249 | _llint_op_init_lazy_reg: | |
250 | traceExecution() | |
93a37866 A |
251 | loadisFromInstruction(1, t0) |
252 | storeq ValueEmpty, [cfr, t0, 8] | |
6fe7ccc8 A |
253 | dispatch(2) |
254 | ||
255 | ||
256 | _llint_op_create_arguments: | |
257 | traceExecution() | |
93a37866 A |
258 | loadisFromInstruction(1, t0) |
259 | bqneq [cfr, t0, 8], ValueEmpty, .opCreateArgumentsDone | |
6fe7ccc8 A |
260 | callSlowPath(_llint_slow_path_create_arguments) |
261 | .opCreateArgumentsDone: | |
262 | dispatch(2) | |
263 | ||
264 | ||
265 | _llint_op_create_this: | |
266 | traceExecution() | |
93a37866 | 267 | loadisFromInstruction(2, t0) |
6fe7ccc8 | 268 | loadp [cfr, t0, 8], t0 |
93a37866 A |
269 | loadp JSFunction::m_allocationProfile + ObjectAllocationProfile::m_allocator[t0], t1 |
270 | loadp JSFunction::m_allocationProfile + ObjectAllocationProfile::m_structure[t0], t2 | |
271 | btpz t1, .opCreateThisSlow | |
272 | allocateJSObject(t1, t2, t0, t3, .opCreateThisSlow) | |
273 | loadisFromInstruction(1, t1) | |
274 | storeq t0, [cfr, t1, 8] | |
275 | dispatch(4) | |
6fe7ccc8 A |
276 | |
277 | .opCreateThisSlow: | |
278 | callSlowPath(_llint_slow_path_create_this) | |
93a37866 | 279 | dispatch(4) |
6fe7ccc8 A |
280 | |
281 | ||
282 | _llint_op_get_callee: | |
283 | traceExecution() | |
93a37866 A |
284 | loadisFromInstruction(1, t0) |
285 | loadpFromInstruction(2, t2) | |
6fe7ccc8 | 286 | loadp Callee[cfr], t1 |
93a37866 | 287 | valueProfile(t1, t2) |
6fe7ccc8 | 288 | storep t1, [cfr, t0, 8] |
93a37866 | 289 | dispatch(3) |
6fe7ccc8 A |
290 | |
291 | ||
292 | _llint_op_convert_this: | |
293 | traceExecution() | |
93a37866 A |
294 | loadisFromInstruction(1, t0) |
295 | loadq [cfr, t0, 8], t0 | |
296 | btqnz t0, tagMask, .opConvertThisSlow | |
6fe7ccc8 A |
297 | loadp JSCell::m_structure[t0], t0 |
298 | bbb Structure::m_typeInfo + TypeInfo::m_type[t0], ObjectType, .opConvertThisSlow | |
93a37866 A |
299 | loadpFromInstruction(2, t1) |
300 | valueProfile(t0, t1) | |
301 | dispatch(3) | |
6fe7ccc8 A |
302 | |
303 | .opConvertThisSlow: | |
304 | callSlowPath(_llint_slow_path_convert_this) | |
93a37866 | 305 | dispatch(3) |
6fe7ccc8 A |
306 | |
307 | ||
308 | _llint_op_new_object: | |
309 | traceExecution() | |
93a37866 A |
310 | loadpFromInstruction(3, t0) |
311 | loadp ObjectAllocationProfile::m_allocator[t0], t1 | |
312 | loadp ObjectAllocationProfile::m_structure[t0], t2 | |
313 | allocateJSObject(t1, t2, t0, t3, .opNewObjectSlow) | |
314 | loadisFromInstruction(1, t1) | |
315 | storeq t0, [cfr, t1, 8] | |
316 | dispatch(4) | |
6fe7ccc8 A |
317 | |
318 | .opNewObjectSlow: | |
319 | callSlowPath(_llint_slow_path_new_object) | |
93a37866 | 320 | dispatch(4) |
6fe7ccc8 A |
321 | |
322 | ||
323 | _llint_op_mov: | |
324 | traceExecution() | |
93a37866 A |
325 | loadisFromInstruction(2, t1) |
326 | loadisFromInstruction(1, t0) | |
6fe7ccc8 | 327 | loadConstantOrVariable(t1, t2) |
93a37866 | 328 | storeq t2, [cfr, t0, 8] |
6fe7ccc8 A |
329 | dispatch(3) |
330 | ||
331 | ||
332 | _llint_op_not: | |
333 | traceExecution() | |
93a37866 A |
334 | loadisFromInstruction(2, t0) |
335 | loadisFromInstruction(1, t1) | |
6fe7ccc8 | 336 | loadConstantOrVariable(t0, t2) |
93a37866 A |
337 | xorq ValueFalse, t2 |
338 | btqnz t2, ~1, .opNotSlow | |
339 | xorq ValueTrue, t2 | |
340 | storeq t2, [cfr, t1, 8] | |
6fe7ccc8 A |
341 | dispatch(3) |
342 | ||
343 | .opNotSlow: | |
344 | callSlowPath(_llint_slow_path_not) | |
345 | dispatch(3) | |
346 | ||
347 | ||
348 | macro equalityComparison(integerComparison, slowPath) | |
349 | traceExecution() | |
93a37866 A |
350 | loadisFromInstruction(3, t0) |
351 | loadisFromInstruction(2, t2) | |
352 | loadisFromInstruction(1, t3) | |
6fe7ccc8 A |
353 | loadConstantOrVariableInt32(t0, t1, .slow) |
354 | loadConstantOrVariableInt32(t2, t0, .slow) | |
355 | integerComparison(t0, t1, t0) | |
93a37866 A |
356 | orq ValueFalse, t0 |
357 | storeq t0, [cfr, t3, 8] | |
6fe7ccc8 A |
358 | dispatch(4) |
359 | ||
360 | .slow: | |
361 | callSlowPath(slowPath) | |
362 | dispatch(4) | |
363 | end | |
364 | ||
365 | _llint_op_eq: | |
366 | equalityComparison( | |
367 | macro (left, right, result) cieq left, right, result end, | |
368 | _llint_slow_path_eq) | |
369 | ||
370 | ||
371 | _llint_op_neq: | |
372 | equalityComparison( | |
373 | macro (left, right, result) cineq left, right, result end, | |
374 | _llint_slow_path_neq) | |
375 | ||
376 | ||
377 | macro equalNullComparison() | |
93a37866 A |
378 | loadisFromInstruction(2, t0) |
379 | loadq [cfr, t0, 8], t0 | |
380 | btqnz t0, tagMask, .immediate | |
6fe7ccc8 | 381 | loadp JSCell::m_structure[t0], t2 |
93a37866 A |
382 | btbnz Structure::m_typeInfo + TypeInfo::m_flags[t2], MasqueradesAsUndefined, .masqueradesAsUndefined |
383 | move 0, t0 | |
384 | jmp .done | |
385 | .masqueradesAsUndefined: | |
386 | loadp CodeBlock[cfr], t0 | |
387 | loadp CodeBlock::m_globalObject[t0], t0 | |
388 | cpeq Structure::m_globalObject[t2], t0, t0 | |
6fe7ccc8 A |
389 | jmp .done |
390 | .immediate: | |
93a37866 A |
391 | andq ~TagBitUndefined, t0 |
392 | cqeq t0, ValueNull, t0 | |
6fe7ccc8 A |
393 | .done: |
394 | end | |
395 | ||
396 | _llint_op_eq_null: | |
397 | traceExecution() | |
398 | equalNullComparison() | |
93a37866 A |
399 | loadisFromInstruction(1, t1) |
400 | orq ValueFalse, t0 | |
401 | storeq t0, [cfr, t1, 8] | |
6fe7ccc8 A |
402 | dispatch(3) |
403 | ||
404 | ||
405 | _llint_op_neq_null: | |
406 | traceExecution() | |
407 | equalNullComparison() | |
93a37866 A |
408 | loadisFromInstruction(1, t1) |
409 | xorq ValueTrue, t0 | |
410 | storeq t0, [cfr, t1, 8] | |
6fe7ccc8 A |
411 | dispatch(3) |
412 | ||
413 | ||
414 | macro strictEq(equalityOperation, slowPath) | |
415 | traceExecution() | |
93a37866 A |
416 | loadisFromInstruction(3, t0) |
417 | loadisFromInstruction(2, t2) | |
6fe7ccc8 A |
418 | loadConstantOrVariable(t0, t1) |
419 | loadConstantOrVariable(t2, t0) | |
420 | move t0, t2 | |
93a37866 A |
421 | orq t1, t2 |
422 | btqz t2, tagMask, .slow | |
423 | bqaeq t0, tagTypeNumber, .leftOK | |
424 | btqnz t0, tagTypeNumber, .slow | |
6fe7ccc8 | 425 | .leftOK: |
93a37866 A |
426 | bqaeq t1, tagTypeNumber, .rightOK |
427 | btqnz t1, tagTypeNumber, .slow | |
6fe7ccc8 A |
428 | .rightOK: |
429 | equalityOperation(t0, t1, t0) | |
93a37866 A |
430 | loadisFromInstruction(1, t1) |
431 | orq ValueFalse, t0 | |
432 | storeq t0, [cfr, t1, 8] | |
6fe7ccc8 A |
433 | dispatch(4) |
434 | ||
435 | .slow: | |
436 | callSlowPath(slowPath) | |
437 | dispatch(4) | |
438 | end | |
439 | ||
440 | _llint_op_stricteq: | |
441 | strictEq( | |
93a37866 | 442 | macro (left, right, result) cqeq left, right, result end, |
6fe7ccc8 A |
443 | _llint_slow_path_stricteq) |
444 | ||
445 | ||
446 | _llint_op_nstricteq: | |
447 | strictEq( | |
93a37866 | 448 | macro (left, right, result) cqneq left, right, result end, |
6fe7ccc8 A |
449 | _llint_slow_path_nstricteq) |
450 | ||
451 | ||
452 | macro preOp(arithmeticOperation, slowPath) | |
453 | traceExecution() | |
93a37866 A |
454 | loadisFromInstruction(1, t0) |
455 | loadq [cfr, t0, 8], t1 | |
456 | bqb t1, tagTypeNumber, .slow | |
6fe7ccc8 | 457 | arithmeticOperation(t1, .slow) |
93a37866 A |
458 | orq tagTypeNumber, t1 |
459 | storeq t1, [cfr, t0, 8] | |
6fe7ccc8 A |
460 | dispatch(2) |
461 | ||
462 | .slow: | |
463 | callSlowPath(slowPath) | |
464 | dispatch(2) | |
465 | end | |
466 | ||
93a37866 | 467 | _llint_op_inc: |
6fe7ccc8 A |
468 | preOp( |
469 | macro (value, slow) baddio 1, value, slow end, | |
470 | _llint_slow_path_pre_inc) | |
471 | ||
472 | ||
93a37866 | 473 | _llint_op_dec: |
6fe7ccc8 A |
474 | preOp( |
475 | macro (value, slow) bsubio 1, value, slow end, | |
476 | _llint_slow_path_pre_dec) | |
477 | ||
478 | ||
93a37866 | 479 | _llint_op_to_number: |
6fe7ccc8 | 480 | traceExecution() |
93a37866 A |
481 | loadisFromInstruction(2, t0) |
482 | loadisFromInstruction(1, t1) | |
6fe7ccc8 | 483 | loadConstantOrVariable(t0, t2) |
93a37866 A |
484 | bqaeq t2, tagTypeNumber, .opToNumberIsImmediate |
485 | btqz t2, tagTypeNumber, .opToNumberSlow | |
486 | .opToNumberIsImmediate: | |
487 | storeq t2, [cfr, t1, 8] | |
6fe7ccc8 A |
488 | dispatch(3) |
489 | ||
93a37866 A |
490 | .opToNumberSlow: |
491 | callSlowPath(_llint_slow_path_to_number) | |
6fe7ccc8 A |
492 | dispatch(3) |
493 | ||
494 | ||
495 | _llint_op_negate: | |
496 | traceExecution() | |
93a37866 A |
497 | loadisFromInstruction(2, t0) |
498 | loadisFromInstruction(1, t1) | |
6fe7ccc8 | 499 | loadConstantOrVariable(t0, t2) |
93a37866 | 500 | bqb t2, tagTypeNumber, .opNegateNotInt |
6fe7ccc8 A |
501 | btiz t2, 0x7fffffff, .opNegateSlow |
502 | negi t2 | |
93a37866 A |
503 | orq tagTypeNumber, t2 |
504 | storeq t2, [cfr, t1, 8] | |
6fe7ccc8 A |
505 | dispatch(3) |
506 | .opNegateNotInt: | |
93a37866 A |
507 | btqz t2, tagTypeNumber, .opNegateSlow |
508 | xorq 0x8000000000000000, t2 | |
509 | storeq t2, [cfr, t1, 8] | |
6fe7ccc8 A |
510 | dispatch(3) |
511 | ||
512 | .opNegateSlow: | |
513 | callSlowPath(_llint_slow_path_negate) | |
514 | dispatch(3) | |
515 | ||
516 | ||
517 | macro binaryOpCustomStore(integerOperationAndStore, doubleOperation, slowPath) | |
93a37866 A |
518 | loadisFromInstruction(3, t0) |
519 | loadisFromInstruction(2, t2) | |
6fe7ccc8 A |
520 | loadConstantOrVariable(t0, t1) |
521 | loadConstantOrVariable(t2, t0) | |
93a37866 A |
522 | bqb t0, tagTypeNumber, .op1NotInt |
523 | bqb t1, tagTypeNumber, .op2NotInt | |
524 | loadisFromInstruction(1, t2) | |
6fe7ccc8 A |
525 | integerOperationAndStore(t1, t0, .slow, t2) |
526 | dispatch(5) | |
527 | ||
528 | .op1NotInt: | |
529 | # First operand is definitely not an int, the second operand could be anything. | |
93a37866 A |
530 | btqz t0, tagTypeNumber, .slow |
531 | bqaeq t1, tagTypeNumber, .op1NotIntOp2Int | |
532 | btqz t1, tagTypeNumber, .slow | |
533 | addq tagTypeNumber, t1 | |
534 | fq2d t1, ft1 | |
6fe7ccc8 A |
535 | jmp .op1NotIntReady |
536 | .op1NotIntOp2Int: | |
537 | ci2d t1, ft1 | |
538 | .op1NotIntReady: | |
93a37866 A |
539 | loadisFromInstruction(1, t2) |
540 | addq tagTypeNumber, t0 | |
541 | fq2d t0, ft0 | |
6fe7ccc8 | 542 | doubleOperation(ft1, ft0) |
93a37866 A |
543 | fd2q ft0, t0 |
544 | subq tagTypeNumber, t0 | |
545 | storeq t0, [cfr, t2, 8] | |
6fe7ccc8 A |
546 | dispatch(5) |
547 | ||
548 | .op2NotInt: | |
549 | # First operand is definitely an int, the second is definitely not. | |
93a37866 A |
550 | loadisFromInstruction(1, t2) |
551 | btqz t1, tagTypeNumber, .slow | |
6fe7ccc8 | 552 | ci2d t0, ft0 |
93a37866 A |
553 | addq tagTypeNumber, t1 |
554 | fq2d t1, ft1 | |
6fe7ccc8 | 555 | doubleOperation(ft1, ft0) |
93a37866 A |
556 | fd2q ft0, t0 |
557 | subq tagTypeNumber, t0 | |
558 | storeq t0, [cfr, t2, 8] | |
6fe7ccc8 A |
559 | dispatch(5) |
560 | ||
561 | .slow: | |
562 | callSlowPath(slowPath) | |
563 | dispatch(5) | |
564 | end | |
565 | ||
566 | macro binaryOp(integerOperation, doubleOperation, slowPath) | |
567 | binaryOpCustomStore( | |
568 | macro (left, right, slow, index) | |
569 | integerOperation(left, right, slow) | |
93a37866 A |
570 | orq tagTypeNumber, right |
571 | storeq right, [cfr, index, 8] | |
6fe7ccc8 A |
572 | end, |
573 | doubleOperation, slowPath) | |
574 | end | |
575 | ||
576 | _llint_op_add: | |
577 | traceExecution() | |
578 | binaryOp( | |
579 | macro (left, right, slow) baddio left, right, slow end, | |
580 | macro (left, right) addd left, right end, | |
581 | _llint_slow_path_add) | |
582 | ||
583 | ||
584 | _llint_op_mul: | |
585 | traceExecution() | |
586 | binaryOpCustomStore( | |
587 | macro (left, right, slow, index) | |
588 | # Assume t3 is scratchable. | |
589 | move right, t3 | |
590 | bmulio left, t3, slow | |
591 | btinz t3, .done | |
592 | bilt left, 0, slow | |
593 | bilt right, 0, slow | |
594 | .done: | |
93a37866 A |
595 | orq tagTypeNumber, t3 |
596 | storeq t3, [cfr, index, 8] | |
6fe7ccc8 A |
597 | end, |
598 | macro (left, right) muld left, right end, | |
599 | _llint_slow_path_mul) | |
600 | ||
601 | ||
602 | _llint_op_sub: | |
603 | traceExecution() | |
604 | binaryOp( | |
605 | macro (left, right, slow) bsubio left, right, slow end, | |
606 | macro (left, right) subd left, right end, | |
607 | _llint_slow_path_sub) | |
608 | ||
609 | ||
610 | _llint_op_div: | |
611 | traceExecution() | |
93a37866 A |
612 | if X86_64 |
613 | binaryOpCustomStore( | |
614 | macro (left, right, slow, index) | |
615 | # Assume t3 is scratchable. | |
616 | btiz left, slow | |
617 | bineq left, -1, .notNeg2TwoThe31DivByNeg1 | |
618 | bieq right, -2147483648, .slow | |
619 | .notNeg2TwoThe31DivByNeg1: | |
620 | btinz right, .intOK | |
621 | bilt left, 0, slow | |
622 | .intOK: | |
623 | move left, t3 | |
624 | move right, t0 | |
625 | cdqi | |
626 | idivi t3 | |
627 | btinz t1, slow | |
628 | orq tagTypeNumber, t0 | |
629 | storeq t0, [cfr, index, 8] | |
630 | end, | |
631 | macro (left, right) divd left, right end, | |
632 | _llint_slow_path_div) | |
633 | else | |
634 | callSlowPath(_llint_slow_path_div) | |
635 | dispatch(5) | |
636 | end | |
6fe7ccc8 A |
637 | |
638 | ||
639 | macro bitOp(operation, slowPath, advance) | |
93a37866 A |
640 | loadisFromInstruction(3, t0) |
641 | loadisFromInstruction(2, t2) | |
642 | loadisFromInstruction(1, t3) | |
6fe7ccc8 A |
643 | loadConstantOrVariable(t0, t1) |
644 | loadConstantOrVariable(t2, t0) | |
93a37866 A |
645 | bqb t0, tagTypeNumber, .slow |
646 | bqb t1, tagTypeNumber, .slow | |
6fe7ccc8 | 647 | operation(t1, t0, .slow) |
93a37866 A |
648 | orq tagTypeNumber, t0 |
649 | storeq t0, [cfr, t3, 8] | |
6fe7ccc8 A |
650 | dispatch(advance) |
651 | ||
652 | .slow: | |
653 | callSlowPath(slowPath) | |
654 | dispatch(advance) | |
655 | end | |
656 | ||
657 | _llint_op_lshift: | |
658 | traceExecution() | |
659 | bitOp( | |
660 | macro (left, right, slow) lshifti left, right end, | |
661 | _llint_slow_path_lshift, | |
662 | 4) | |
663 | ||
664 | ||
665 | _llint_op_rshift: | |
666 | traceExecution() | |
667 | bitOp( | |
668 | macro (left, right, slow) rshifti left, right end, | |
669 | _llint_slow_path_rshift, | |
670 | 4) | |
671 | ||
672 | ||
673 | _llint_op_urshift: | |
674 | traceExecution() | |
675 | bitOp( | |
676 | macro (left, right, slow) | |
677 | urshifti left, right | |
678 | bilt right, 0, slow | |
679 | end, | |
680 | _llint_slow_path_urshift, | |
681 | 4) | |
682 | ||
683 | ||
684 | _llint_op_bitand: | |
685 | traceExecution() | |
686 | bitOp( | |
687 | macro (left, right, slow) andi left, right end, | |
688 | _llint_slow_path_bitand, | |
689 | 5) | |
690 | ||
691 | ||
692 | _llint_op_bitxor: | |
693 | traceExecution() | |
694 | bitOp( | |
695 | macro (left, right, slow) xori left, right end, | |
696 | _llint_slow_path_bitxor, | |
697 | 5) | |
698 | ||
699 | ||
700 | _llint_op_bitor: | |
701 | traceExecution() | |
702 | bitOp( | |
703 | macro (left, right, slow) ori left, right end, | |
704 | _llint_slow_path_bitor, | |
705 | 5) | |
706 | ||
707 | ||
708 | _llint_op_check_has_instance: | |
709 | traceExecution() | |
93a37866 | 710 | loadisFromInstruction(3, t1) |
6fe7ccc8 A |
711 | loadConstantOrVariableCell(t1, t0, .opCheckHasInstanceSlow) |
712 | loadp JSCell::m_structure[t0], t0 | |
93a37866 A |
713 | btbz Structure::m_typeInfo + TypeInfo::m_flags[t0], ImplementsDefaultHasInstance, .opCheckHasInstanceSlow |
714 | dispatch(5) | |
6fe7ccc8 A |
715 | |
716 | .opCheckHasInstanceSlow: | |
717 | callSlowPath(_llint_slow_path_check_has_instance) | |
93a37866 | 718 | dispatch(0) |
6fe7ccc8 A |
719 | |
720 | ||
721 | _llint_op_instanceof: | |
722 | traceExecution() | |
6fe7ccc8 | 723 | # Actually do the work. |
93a37866 A |
724 | loadisFromInstruction(3, t0) |
725 | loadisFromInstruction(1, t3) | |
6fe7ccc8 A |
726 | loadConstantOrVariableCell(t0, t1, .opInstanceofSlow) |
727 | loadp JSCell::m_structure[t1], t2 | |
728 | bbb Structure::m_typeInfo + TypeInfo::m_type[t2], ObjectType, .opInstanceofSlow | |
93a37866 | 729 | loadisFromInstruction(2, t0) |
6fe7ccc8 A |
730 | loadConstantOrVariableCell(t0, t2, .opInstanceofSlow) |
731 | ||
732 | # Register state: t1 = prototype, t2 = value | |
733 | move 1, t0 | |
734 | .opInstanceofLoop: | |
735 | loadp JSCell::m_structure[t2], t2 | |
93a37866 A |
736 | loadq Structure::m_prototype[t2], t2 |
737 | bqeq t2, t1, .opInstanceofDone | |
738 | btqz t2, tagMask, .opInstanceofLoop | |
6fe7ccc8 A |
739 | |
740 | move 0, t0 | |
741 | .opInstanceofDone: | |
93a37866 A |
742 | orq ValueFalse, t0 |
743 | storeq t0, [cfr, t3, 8] | |
744 | dispatch(4) | |
6fe7ccc8 A |
745 | |
746 | .opInstanceofSlow: | |
747 | callSlowPath(_llint_slow_path_instanceof) | |
93a37866 | 748 | dispatch(4) |
6fe7ccc8 A |
749 | |
750 | ||
751 | _llint_op_is_undefined: | |
752 | traceExecution() | |
93a37866 A |
753 | loadisFromInstruction(2, t1) |
754 | loadisFromInstruction(1, t2) | |
6fe7ccc8 | 755 | loadConstantOrVariable(t1, t0) |
93a37866 A |
756 | btqz t0, tagMask, .opIsUndefinedCell |
757 | cqeq t0, ValueUndefined, t3 | |
758 | orq ValueFalse, t3 | |
759 | storeq t3, [cfr, t2, 8] | |
6fe7ccc8 A |
760 | dispatch(3) |
761 | .opIsUndefinedCell: | |
762 | loadp JSCell::m_structure[t0], t0 | |
93a37866 A |
763 | btbnz Structure::m_typeInfo + TypeInfo::m_flags[t0], MasqueradesAsUndefined, .masqueradesAsUndefined |
764 | move ValueFalse, t1 | |
765 | storeq t1, [cfr, t2, 8] | |
766 | dispatch(3) | |
767 | .masqueradesAsUndefined: | |
768 | loadp CodeBlock[cfr], t1 | |
769 | loadp CodeBlock::m_globalObject[t1], t1 | |
770 | cpeq Structure::m_globalObject[t0], t1, t3 | |
771 | orq ValueFalse, t3 | |
772 | storeq t3, [cfr, t2, 8] | |
6fe7ccc8 A |
773 | dispatch(3) |
774 | ||
775 | ||
776 | _llint_op_is_boolean: | |
777 | traceExecution() | |
93a37866 A |
778 | loadisFromInstruction(2, t1) |
779 | loadisFromInstruction(1, t2) | |
6fe7ccc8 | 780 | loadConstantOrVariable(t1, t0) |
93a37866 A |
781 | xorq ValueFalse, t0 |
782 | tqz t0, ~1, t0 | |
783 | orq ValueFalse, t0 | |
784 | storeq t0, [cfr, t2, 8] | |
6fe7ccc8 A |
785 | dispatch(3) |
786 | ||
787 | ||
788 | _llint_op_is_number: | |
789 | traceExecution() | |
93a37866 A |
790 | loadisFromInstruction(2, t1) |
791 | loadisFromInstruction(1, t2) | |
6fe7ccc8 | 792 | loadConstantOrVariable(t1, t0) |
93a37866 A |
793 | tqnz t0, tagTypeNumber, t1 |
794 | orq ValueFalse, t1 | |
795 | storeq t1, [cfr, t2, 8] | |
6fe7ccc8 A |
796 | dispatch(3) |
797 | ||
798 | ||
799 | _llint_op_is_string: | |
800 | traceExecution() | |
93a37866 A |
801 | loadisFromInstruction(2, t1) |
802 | loadisFromInstruction(1, t2) | |
6fe7ccc8 | 803 | loadConstantOrVariable(t1, t0) |
93a37866 | 804 | btqnz t0, tagMask, .opIsStringNotCell |
6fe7ccc8 A |
805 | loadp JSCell::m_structure[t0], t0 |
806 | cbeq Structure::m_typeInfo + TypeInfo::m_type[t0], StringType, t1 | |
93a37866 A |
807 | orq ValueFalse, t1 |
808 | storeq t1, [cfr, t2, 8] | |
6fe7ccc8 A |
809 | dispatch(3) |
810 | .opIsStringNotCell: | |
93a37866 | 811 | storeq ValueFalse, [cfr, t2, 8] |
6fe7ccc8 A |
812 | dispatch(3) |
813 | ||
814 | ||
93a37866 A |
815 | macro loadPropertyAtVariableOffsetKnownNotInline(propertyOffsetAsPointer, objectAndStorage, value) |
816 | assert(macro (ok) bigteq propertyOffsetAsPointer, firstOutOfLineOffset, ok end) | |
817 | negp propertyOffsetAsPointer | |
818 | loadp JSObject::m_butterfly[objectAndStorage], objectAndStorage | |
819 | loadq (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffsetAsPointer, 8], value | |
6fe7ccc8 A |
820 | end |
821 | ||
93a37866 A |
822 | macro loadPropertyAtVariableOffset(propertyOffsetAsInt, objectAndStorage, value) |
823 | bilt propertyOffsetAsInt, firstOutOfLineOffset, .isInline | |
824 | loadp JSObject::m_butterfly[objectAndStorage], objectAndStorage | |
825 | negi propertyOffsetAsInt | |
826 | sxi2q propertyOffsetAsInt, propertyOffsetAsInt | |
827 | jmp .ready | |
828 | .isInline: | |
829 | addp sizeof JSObject - (firstOutOfLineOffset - 2) * 8, objectAndStorage | |
830 | .ready: | |
831 | loadq (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffsetAsInt, 8], value | |
6fe7ccc8 A |
832 | end |
833 | ||
93a37866 | 834 | _llint_op_init_global_const: |
6fe7ccc8 | 835 | traceExecution() |
93a37866 A |
836 | loadisFromInstruction(2, t1) |
837 | loadpFromInstruction(1, t0) | |
838 | loadConstantOrVariable(t1, t2) | |
839 | writeBarrier(t2) | |
840 | storeq t2, [t0] | |
6fe7ccc8 A |
841 | dispatch(5) |
842 | ||
843 | ||
93a37866 | 844 | _llint_op_init_global_const_check: |
6fe7ccc8 | 845 | traceExecution() |
93a37866 A |
846 | loadpFromInstruction(3, t2) |
847 | loadisFromInstruction(2, t1) | |
848 | loadpFromInstruction(1, t0) | |
849 | btbnz [t2], .opInitGlobalConstCheckSlow | |
6fe7ccc8 | 850 | loadConstantOrVariable(t1, t2) |
6fe7ccc8 | 851 | writeBarrier(t2) |
93a37866 A |
852 | storeq t2, [t0] |
853 | dispatch(5) | |
854 | .opInitGlobalConstCheckSlow: | |
855 | callSlowPath(_llint_slow_path_init_global_const_check) | |
856 | dispatch(5) | |
6fe7ccc8 | 857 | |
93a37866 | 858 | macro getById(getPropertyStorage) |
6fe7ccc8 A |
859 | traceExecution() |
860 | # We only do monomorphic get_by_id caching for now, and we do not modify the | |
861 | # opcode. We do, however, allow for the cache to change anytime if fails, since | |
862 | # ping-ponging is free. At best we get lucky and the get_by_id will continue | |
863 | # to take fast path on the new cache. At worst we take slow path, which is what | |
864 | # we would have been doing anyway. | |
93a37866 A |
865 | loadisFromInstruction(2, t0) |
866 | loadpFromInstruction(4, t1) | |
6fe7ccc8 | 867 | loadConstantOrVariableCell(t0, t3, .opGetByIdSlow) |
93a37866 A |
868 | loadisFromInstruction(5, t2) |
869 | getPropertyStorage( | |
870 | t3, | |
871 | t0, | |
872 | macro (propertyStorage, scratch) | |
873 | bpneq JSCell::m_structure[t3], t1, .opGetByIdSlow | |
874 | loadisFromInstruction(1, t1) | |
875 | loadq [propertyStorage, t2], scratch | |
876 | storeq scratch, [cfr, t1, 8] | |
877 | loadpFromInstruction(8, t1) | |
878 | valueProfile(scratch, t1) | |
879 | dispatch(9) | |
880 | end) | |
881 | ||
882 | .opGetByIdSlow: | |
883 | callSlowPath(_llint_slow_path_get_by_id) | |
884 | dispatch(9) | |
885 | end | |
886 | ||
887 | _llint_op_get_by_id: | |
888 | getById(withInlineStorage) | |
889 | ||
890 | ||
891 | _llint_op_get_by_id_out_of_line: | |
892 | getById(withOutOfLineStorage) | |
893 | ||
894 | ||
895 | _llint_op_get_array_length: | |
896 | traceExecution() | |
897 | loadisFromInstruction(2, t0) | |
898 | loadpFromInstruction(4, t1) | |
899 | loadConstantOrVariableCell(t0, t3, .opGetArrayLengthSlow) | |
900 | loadp JSCell::m_structure[t3], t2 | |
901 | arrayProfile(t2, t1, t0) | |
902 | btiz t2, IsArray, .opGetArrayLengthSlow | |
903 | btiz t2, IndexingShapeMask, .opGetArrayLengthSlow | |
904 | loadisFromInstruction(1, t1) | |
905 | loadpFromInstruction(8, t2) | |
906 | loadp JSObject::m_butterfly[t3], t0 | |
907 | loadi -sizeof IndexingHeader + IndexingHeader::m_publicLength[t0], t0 | |
908 | bilt t0, 0, .opGetArrayLengthSlow | |
909 | orq tagTypeNumber, t0 | |
910 | valueProfile(t0, t2) | |
911 | storeq t0, [cfr, t1, 8] | |
6fe7ccc8 A |
912 | dispatch(9) |
913 | ||
93a37866 | 914 | .opGetArrayLengthSlow: |
6fe7ccc8 A |
915 | callSlowPath(_llint_slow_path_get_by_id) |
916 | dispatch(9) | |
917 | ||
918 | ||
919 | _llint_op_get_arguments_length: | |
920 | traceExecution() | |
93a37866 A |
921 | loadisFromInstruction(2, t0) |
922 | loadisFromInstruction(1, t1) | |
923 | btqnz [cfr, t0, 8], .opGetArgumentsLengthSlow | |
6fe7ccc8 A |
924 | loadi ArgumentCount + PayloadOffset[cfr], t2 |
925 | subi 1, t2 | |
93a37866 A |
926 | orq tagTypeNumber, t2 |
927 | storeq t2, [cfr, t1, 8] | |
6fe7ccc8 A |
928 | dispatch(4) |
929 | ||
930 | .opGetArgumentsLengthSlow: | |
931 | callSlowPath(_llint_slow_path_get_arguments_length) | |
932 | dispatch(4) | |
933 | ||
934 | ||
93a37866 | 935 | macro putById(getPropertyStorage) |
6fe7ccc8 | 936 | traceExecution() |
93a37866 A |
937 | loadisFromInstruction(1, t3) |
938 | loadpFromInstruction(4, t1) | |
6fe7ccc8 | 939 | loadConstantOrVariableCell(t3, t0, .opPutByIdSlow) |
93a37866 A |
940 | loadisFromInstruction(3, t2) |
941 | getPropertyStorage( | |
942 | t0, | |
943 | t3, | |
944 | macro (propertyStorage, scratch) | |
945 | bpneq JSCell::m_structure[t0], t1, .opPutByIdSlow | |
946 | loadisFromInstruction(5, t1) | |
947 | loadConstantOrVariable(t2, scratch) | |
948 | writeBarrier(t0) | |
949 | storeq scratch, [propertyStorage, t1] | |
950 | dispatch(9) | |
951 | end) | |
952 | end | |
953 | ||
954 | _llint_op_put_by_id: | |
955 | putById(withInlineStorage) | |
6fe7ccc8 A |
956 | |
957 | .opPutByIdSlow: | |
958 | callSlowPath(_llint_slow_path_put_by_id) | |
959 | dispatch(9) | |
960 | ||
961 | ||
93a37866 A |
962 | _llint_op_put_by_id_out_of_line: |
963 | putById(withOutOfLineStorage) | |
964 | ||
965 | ||
966 | macro putByIdTransition(additionalChecks, getPropertyStorage) | |
6fe7ccc8 | 967 | traceExecution() |
93a37866 A |
968 | loadisFromInstruction(1, t3) |
969 | loadpFromInstruction(4, t1) | |
6fe7ccc8 | 970 | loadConstantOrVariableCell(t3, t0, .opPutByIdSlow) |
93a37866 | 971 | loadisFromInstruction(3, t2) |
6fe7ccc8 | 972 | bpneq JSCell::m_structure[t0], t1, .opPutByIdSlow |
93a37866 A |
973 | additionalChecks(t1, t3) |
974 | loadisFromInstruction(5, t1) | |
975 | getPropertyStorage( | |
976 | t0, | |
977 | t3, | |
978 | macro (propertyStorage, scratch) | |
979 | addp t1, propertyStorage, t3 | |
980 | loadConstantOrVariable(t2, t1) | |
981 | writeBarrier(t1) | |
982 | storeq t1, [t3] | |
983 | loadpFromInstruction(6, t1) | |
984 | storep t1, JSCell::m_structure[t0] | |
985 | dispatch(9) | |
986 | end) | |
987 | end | |
988 | ||
989 | macro noAdditionalChecks(oldStructure, scratch) | |
990 | end | |
991 | ||
992 | macro structureChainChecks(oldStructure, scratch) | |
993 | const protoCell = oldStructure # Reusing the oldStructure register for the proto | |
994 | loadpFromInstruction(7, scratch) | |
995 | assert(macro (ok) btpnz scratch, ok end) | |
996 | loadp StructureChain::m_vector[scratch], scratch | |
997 | assert(macro (ok) btpnz scratch, ok end) | |
998 | bqeq Structure::m_prototype[oldStructure], ValueNull, .done | |
999 | .loop: | |
1000 | loadq Structure::m_prototype[oldStructure], protoCell | |
1001 | loadp JSCell::m_structure[protoCell], oldStructure | |
1002 | bpneq oldStructure, [scratch], .opPutByIdSlow | |
1003 | addp 8, scratch | |
1004 | bqneq Structure::m_prototype[oldStructure], ValueNull, .loop | |
1005 | .done: | |
6fe7ccc8 A |
1006 | end |
1007 | ||
1008 | _llint_op_put_by_id_transition_direct: | |
93a37866 A |
1009 | putByIdTransition(noAdditionalChecks, withInlineStorage) |
1010 | ||
1011 | ||
1012 | _llint_op_put_by_id_transition_direct_out_of_line: | |
1013 | putByIdTransition(noAdditionalChecks, withOutOfLineStorage) | |
6fe7ccc8 A |
1014 | |
1015 | ||
1016 | _llint_op_put_by_id_transition_normal: | |
93a37866 A |
1017 | putByIdTransition(structureChainChecks, withInlineStorage) |
1018 | ||
1019 | ||
1020 | _llint_op_put_by_id_transition_normal_out_of_line: | |
1021 | putByIdTransition(structureChainChecks, withOutOfLineStorage) | |
6fe7ccc8 A |
1022 | |
1023 | ||
1024 | _llint_op_get_by_val: | |
1025 | traceExecution() | |
93a37866 | 1026 | loadisFromInstruction(2, t2) |
6fe7ccc8 | 1027 | loadConstantOrVariableCell(t2, t0, .opGetByValSlow) |
93a37866 A |
1028 | loadp JSCell::m_structure[t0], t2 |
1029 | loadpFromInstruction(4, t3) | |
1030 | arrayProfile(t2, t3, t1) | |
1031 | loadisFromInstruction(3, t3) | |
6fe7ccc8 | 1032 | loadConstantOrVariableInt32(t3, t1, .opGetByValSlow) |
93a37866 A |
1033 | sxi2q t1, t1 |
1034 | loadp JSObject::m_butterfly[t0], t3 | |
1035 | andi IndexingShapeMask, t2 | |
1036 | bieq t2, Int32Shape, .opGetByValIsContiguous | |
1037 | bineq t2, ContiguousShape, .opGetByValNotContiguous | |
1038 | .opGetByValIsContiguous: | |
1039 | ||
1040 | biaeq t1, -sizeof IndexingHeader + IndexingHeader::m_publicLength[t3], .opGetByValOutOfBounds | |
1041 | loadisFromInstruction(1, t0) | |
1042 | loadq [t3, t1, 8], t2 | |
1043 | btqz t2, .opGetByValOutOfBounds | |
1044 | jmp .opGetByValDone | |
1045 | ||
1046 | .opGetByValNotContiguous: | |
1047 | bineq t2, DoubleShape, .opGetByValNotDouble | |
1048 | biaeq t1, -sizeof IndexingHeader + IndexingHeader::m_publicLength[t3], .opGetByValOutOfBounds | |
6fe7ccc8 | 1049 | loadis 8[PB, PC, 8], t0 |
93a37866 A |
1050 | loadd [t3, t1, 8], ft0 |
1051 | bdnequn ft0, ft0, .opGetByValOutOfBounds | |
1052 | fd2q ft0, t2 | |
1053 | subq tagTypeNumber, t2 | |
1054 | jmp .opGetByValDone | |
1055 | ||
1056 | .opGetByValNotDouble: | |
1057 | subi ArrayStorageShape, t2 | |
1058 | bia t2, SlowPutArrayStorageShape - ArrayStorageShape, .opGetByValSlow | |
1059 | biaeq t1, -sizeof IndexingHeader + IndexingHeader::m_vectorLength[t3], .opGetByValOutOfBounds | |
1060 | loadisFromInstruction(1, t0) | |
1061 | loadq ArrayStorage::m_vector[t3, t1, 8], t2 | |
1062 | btqz t2, .opGetByValOutOfBounds | |
1063 | ||
1064 | .opGetByValDone: | |
1065 | storeq t2, [cfr, t0, 8] | |
1066 | loadpFromInstruction(5, t0) | |
6fe7ccc8 | 1067 | valueProfile(t2, t0) |
93a37866 | 1068 | dispatch(6) |
6fe7ccc8 | 1069 | |
93a37866 A |
1070 | .opGetByValOutOfBounds: |
1071 | if VALUE_PROFILER | |
1072 | loadpFromInstruction(4, t0) | |
1073 | storeb 1, ArrayProfile::m_outOfBounds[t0] | |
1074 | end | |
6fe7ccc8 A |
1075 | .opGetByValSlow: |
1076 | callSlowPath(_llint_slow_path_get_by_val) | |
93a37866 | 1077 | dispatch(6) |
6fe7ccc8 A |
1078 | |
1079 | ||
1080 | _llint_op_get_argument_by_val: | |
93a37866 A |
1081 | # FIXME: At some point we should array profile this. Right now it isn't necessary |
1082 | # since the DFG will never turn a get_argument_by_val into a GetByVal. | |
6fe7ccc8 | 1083 | traceExecution() |
93a37866 A |
1084 | loadisFromInstruction(2, t0) |
1085 | loadisFromInstruction(3, t1) | |
1086 | btqnz [cfr, t0, 8], .opGetArgumentByValSlow | |
6fe7ccc8 A |
1087 | loadConstantOrVariableInt32(t1, t2, .opGetArgumentByValSlow) |
1088 | addi 1, t2 | |
1089 | loadi ArgumentCount + PayloadOffset[cfr], t1 | |
1090 | biaeq t2, t1, .opGetArgumentByValSlow | |
1091 | negi t2 | |
93a37866 A |
1092 | sxi2q t2, t2 |
1093 | loadisFromInstruction(1, t3) | |
1094 | loadpFromInstruction(5, t1) | |
1095 | loadq ThisArgumentOffset[cfr, t2, 8], t0 | |
1096 | storeq t0, [cfr, t3, 8] | |
1097 | valueProfile(t0, t1) | |
1098 | dispatch(6) | |
6fe7ccc8 A |
1099 | |
1100 | .opGetArgumentByValSlow: | |
1101 | callSlowPath(_llint_slow_path_get_argument_by_val) | |
93a37866 | 1102 | dispatch(6) |
6fe7ccc8 A |
1103 | |
1104 | ||
1105 | _llint_op_get_by_pname: | |
1106 | traceExecution() | |
93a37866 | 1107 | loadisFromInstruction(3, t1) |
6fe7ccc8 | 1108 | loadConstantOrVariable(t1, t0) |
93a37866 | 1109 | loadisFromInstruction(4, t1) |
6fe7ccc8 | 1110 | assertNotConstant(t1) |
93a37866 A |
1111 | bqneq t0, [cfr, t1, 8], .opGetByPnameSlow |
1112 | loadisFromInstruction(2, t2) | |
1113 | loadisFromInstruction(5, t3) | |
6fe7ccc8 A |
1114 | loadConstantOrVariableCell(t2, t0, .opGetByPnameSlow) |
1115 | assertNotConstant(t3) | |
93a37866 | 1116 | loadq [cfr, t3, 8], t1 |
6fe7ccc8 A |
1117 | loadp JSCell::m_structure[t0], t2 |
1118 | bpneq t2, JSPropertyNameIterator::m_cachedStructure[t1], .opGetByPnameSlow | |
93a37866 | 1119 | loadisFromInstruction(6, t3) |
6fe7ccc8 A |
1120 | loadi PayloadOffset[cfr, t3, 8], t3 |
1121 | subi 1, t3 | |
1122 | biaeq t3, JSPropertyNameIterator::m_numCacheableSlots[t1], .opGetByPnameSlow | |
93a37866 A |
1123 | bilt t3, JSPropertyNameIterator::m_cachedStructureInlineCapacity[t1], .opGetByPnameInlineProperty |
1124 | addi firstOutOfLineOffset, t3 | |
1125 | subi JSPropertyNameIterator::m_cachedStructureInlineCapacity[t1], t3 | |
1126 | .opGetByPnameInlineProperty: | |
1127 | loadPropertyAtVariableOffset(t3, t0, t0) | |
1128 | loadisFromInstruction(1, t1) | |
1129 | storeq t0, [cfr, t1, 8] | |
6fe7ccc8 A |
1130 | dispatch(7) |
1131 | ||
1132 | .opGetByPnameSlow: | |
1133 | callSlowPath(_llint_slow_path_get_by_pname) | |
1134 | dispatch(7) | |
1135 | ||
1136 | ||
93a37866 A |
1137 | macro contiguousPutByVal(storeCallback) |
1138 | biaeq t3, -sizeof IndexingHeader + IndexingHeader::m_publicLength[t0], .outOfBounds | |
1139 | .storeResult: | |
1140 | loadisFromInstruction(3, t2) | |
1141 | storeCallback(t2, t1, [t0, t3, 8]) | |
1142 | dispatch(5) | |
1143 | ||
1144 | .outOfBounds: | |
1145 | biaeq t3, -sizeof IndexingHeader + IndexingHeader::m_vectorLength[t0], .opPutByValOutOfBounds | |
1146 | if VALUE_PROFILER | |
1147 | loadp 32[PB, PC, 8], t2 | |
1148 | storeb 1, ArrayProfile::m_mayStoreToHole[t2] | |
1149 | end | |
1150 | addi 1, t3, t2 | |
1151 | storei t2, -sizeof IndexingHeader + IndexingHeader::m_publicLength[t0] | |
1152 | jmp .storeResult | |
1153 | end | |
1154 | ||
6fe7ccc8 A |
1155 | _llint_op_put_by_val: |
1156 | traceExecution() | |
93a37866 | 1157 | loadisFromInstruction(1, t0) |
6fe7ccc8 | 1158 | loadConstantOrVariableCell(t0, t1, .opPutByValSlow) |
93a37866 A |
1159 | loadp JSCell::m_structure[t1], t2 |
1160 | loadpFromInstruction(4, t3) | |
1161 | arrayProfile(t2, t3, t0) | |
1162 | loadisFromInstruction(2, t0) | |
1163 | loadConstantOrVariableInt32(t0, t3, .opPutByValSlow) | |
1164 | sxi2q t3, t3 | |
1165 | loadp JSObject::m_butterfly[t1], t0 | |
1166 | andi IndexingShapeMask, t2 | |
1167 | bineq t2, Int32Shape, .opPutByValNotInt32 | |
1168 | contiguousPutByVal( | |
1169 | macro (operand, scratch, address) | |
1170 | loadConstantOrVariable(operand, scratch) | |
1171 | bpb scratch, tagTypeNumber, .opPutByValSlow | |
1172 | storep scratch, address | |
1173 | end) | |
1174 | ||
1175 | .opPutByValNotInt32: | |
1176 | bineq t2, DoubleShape, .opPutByValNotDouble | |
1177 | contiguousPutByVal( | |
1178 | macro (operand, scratch, address) | |
1179 | loadConstantOrVariable(operand, scratch) | |
1180 | bqb scratch, tagTypeNumber, .notInt | |
1181 | ci2d scratch, ft0 | |
1182 | jmp .ready | |
1183 | .notInt: | |
1184 | addp tagTypeNumber, scratch | |
1185 | fq2d scratch, ft0 | |
1186 | bdnequn ft0, ft0, .opPutByValSlow | |
1187 | .ready: | |
1188 | stored ft0, address | |
1189 | end) | |
1190 | ||
1191 | .opPutByValNotDouble: | |
1192 | bineq t2, ContiguousShape, .opPutByValNotContiguous | |
1193 | contiguousPutByVal( | |
1194 | macro (operand, scratch, address) | |
1195 | loadConstantOrVariable(operand, scratch) | |
1196 | writeBarrier(scratch) | |
1197 | storep scratch, address | |
1198 | end) | |
1199 | ||
1200 | .opPutByValNotContiguous: | |
1201 | bineq t2, ArrayStorageShape, .opPutByValSlow | |
1202 | biaeq t3, -sizeof IndexingHeader + IndexingHeader::m_vectorLength[t0], .opPutByValOutOfBounds | |
1203 | btqz ArrayStorage::m_vector[t0, t3, 8], .opPutByValArrayStorageEmpty | |
1204 | .opPutByValArrayStorageStoreResult: | |
1205 | loadisFromInstruction(3, t2) | |
1206 | loadConstantOrVariable(t2, t1) | |
6fe7ccc8 | 1207 | writeBarrier(t1) |
93a37866 A |
1208 | storeq t1, ArrayStorage::m_vector[t0, t3, 8] |
1209 | dispatch(5) | |
6fe7ccc8 | 1210 | |
93a37866 A |
1211 | .opPutByValArrayStorageEmpty: |
1212 | if VALUE_PROFILER | |
1213 | loadpFromInstruction(4, t1) | |
1214 | storeb 1, ArrayProfile::m_mayStoreToHole[t1] | |
1215 | end | |
6fe7ccc8 | 1216 | addi 1, ArrayStorage::m_numValuesInVector[t0] |
93a37866 A |
1217 | bib t3, -sizeof IndexingHeader + IndexingHeader::m_publicLength[t0], .opPutByValArrayStorageStoreResult |
1218 | addi 1, t3, t1 | |
1219 | storei t1, -sizeof IndexingHeader + IndexingHeader::m_publicLength[t0] | |
1220 | jmp .opPutByValArrayStorageStoreResult | |
6fe7ccc8 | 1221 | |
93a37866 A |
1222 | .opPutByValOutOfBounds: |
1223 | if VALUE_PROFILER | |
1224 | loadpFromInstruction(4, t0) | |
1225 | storeb 1, ArrayProfile::m_outOfBounds[t0] | |
1226 | end | |
6fe7ccc8 A |
1227 | .opPutByValSlow: |
1228 | callSlowPath(_llint_slow_path_put_by_val) | |
93a37866 | 1229 | dispatch(5) |
6fe7ccc8 A |
1230 | |
1231 | ||
6fe7ccc8 A |
1232 | _llint_op_jmp: |
1233 | traceExecution() | |
93a37866 | 1234 | dispatchIntIndirect(1) |
6fe7ccc8 A |
1235 | |
1236 | ||
1237 | macro jumpTrueOrFalse(conditionOp, slow) | |
93a37866 | 1238 | loadisFromInstruction(1, t1) |
6fe7ccc8 | 1239 | loadConstantOrVariable(t1, t0) |
93a37866 A |
1240 | xorq ValueFalse, t0 |
1241 | btqnz t0, -1, .slow | |
6fe7ccc8 A |
1242 | conditionOp(t0, .target) |
1243 | dispatch(3) | |
1244 | ||
1245 | .target: | |
93a37866 | 1246 | dispatchIntIndirect(2) |
6fe7ccc8 A |
1247 | |
1248 | .slow: | |
1249 | callSlowPath(slow) | |
1250 | dispatch(0) | |
1251 | end | |
1252 | ||
1253 | ||
1254 | macro equalNull(cellHandler, immediateHandler) | |
93a37866 | 1255 | loadisFromInstruction(1, t0) |
6fe7ccc8 | 1256 | assertNotConstant(t0) |
93a37866 A |
1257 | loadq [cfr, t0, 8], t0 |
1258 | btqnz t0, tagMask, .immediate | |
6fe7ccc8 | 1259 | loadp JSCell::m_structure[t0], t2 |
93a37866 | 1260 | cellHandler(t2, Structure::m_typeInfo + TypeInfo::m_flags[t2], .target) |
6fe7ccc8 A |
1261 | dispatch(3) |
1262 | ||
1263 | .target: | |
93a37866 | 1264 | dispatchIntIndirect(2) |
6fe7ccc8 A |
1265 | |
1266 | .immediate: | |
93a37866 | 1267 | andq ~TagBitUndefined, t0 |
6fe7ccc8 A |
1268 | immediateHandler(t0, .target) |
1269 | dispatch(3) | |
1270 | end | |
1271 | ||
1272 | _llint_op_jeq_null: | |
1273 | traceExecution() | |
1274 | equalNull( | |
93a37866 A |
1275 | macro (structure, value, target) |
1276 | btbz value, MasqueradesAsUndefined, .notMasqueradesAsUndefined | |
1277 | loadp CodeBlock[cfr], t0 | |
1278 | loadp CodeBlock::m_globalObject[t0], t0 | |
1279 | bpeq Structure::m_globalObject[structure], t0, target | |
1280 | .notMasqueradesAsUndefined: | |
1281 | end, | |
1282 | macro (value, target) bqeq value, ValueNull, target end) | |
6fe7ccc8 A |
1283 | |
1284 | ||
1285 | _llint_op_jneq_null: | |
1286 | traceExecution() | |
1287 | equalNull( | |
93a37866 A |
1288 | macro (structure, value, target) |
1289 | btbz value, MasqueradesAsUndefined, target | |
1290 | loadp CodeBlock[cfr], t0 | |
1291 | loadp CodeBlock::m_globalObject[t0], t0 | |
1292 | bpneq Structure::m_globalObject[structure], t0, target | |
1293 | end, | |
1294 | macro (value, target) bqneq value, ValueNull, target end) | |
6fe7ccc8 A |
1295 | |
1296 | ||
1297 | _llint_op_jneq_ptr: | |
1298 | traceExecution() | |
93a37866 A |
1299 | loadisFromInstruction(1, t0) |
1300 | loadisFromInstruction(2, t1) | |
1301 | loadp CodeBlock[cfr], t2 | |
1302 | loadp CodeBlock::m_globalObject[t2], t2 | |
1303 | loadp JSGlobalObject::m_specialPointers[t2, t1, 8], t1 | |
6fe7ccc8 A |
1304 | bpneq t1, [cfr, t0, 8], .opJneqPtrTarget |
1305 | dispatch(4) | |
1306 | ||
1307 | .opJneqPtrTarget: | |
93a37866 | 1308 | dispatchIntIndirect(3) |
6fe7ccc8 A |
1309 | |
1310 | ||
1311 | macro compare(integerCompare, doubleCompare, slowPath) | |
93a37866 A |
1312 | loadisFromInstruction(1, t2) |
1313 | loadisFromInstruction(2, t3) | |
6fe7ccc8 A |
1314 | loadConstantOrVariable(t2, t0) |
1315 | loadConstantOrVariable(t3, t1) | |
93a37866 A |
1316 | bqb t0, tagTypeNumber, .op1NotInt |
1317 | bqb t1, tagTypeNumber, .op2NotInt | |
6fe7ccc8 A |
1318 | integerCompare(t0, t1, .jumpTarget) |
1319 | dispatch(4) | |
1320 | ||
1321 | .op1NotInt: | |
93a37866 A |
1322 | btqz t0, tagTypeNumber, .slow |
1323 | bqb t1, tagTypeNumber, .op1NotIntOp2NotInt | |
6fe7ccc8 A |
1324 | ci2d t1, ft1 |
1325 | jmp .op1NotIntReady | |
1326 | .op1NotIntOp2NotInt: | |
93a37866 A |
1327 | btqz t1, tagTypeNumber, .slow |
1328 | addq tagTypeNumber, t1 | |
1329 | fq2d t1, ft1 | |
6fe7ccc8 | 1330 | .op1NotIntReady: |
93a37866 A |
1331 | addq tagTypeNumber, t0 |
1332 | fq2d t0, ft0 | |
6fe7ccc8 A |
1333 | doubleCompare(ft0, ft1, .jumpTarget) |
1334 | dispatch(4) | |
1335 | ||
1336 | .op2NotInt: | |
1337 | ci2d t0, ft0 | |
93a37866 A |
1338 | btqz t1, tagTypeNumber, .slow |
1339 | addq tagTypeNumber, t1 | |
1340 | fq2d t1, ft1 | |
6fe7ccc8 A |
1341 | doubleCompare(ft0, ft1, .jumpTarget) |
1342 | dispatch(4) | |
1343 | ||
1344 | .jumpTarget: | |
93a37866 | 1345 | dispatchIntIndirect(3) |
6fe7ccc8 A |
1346 | |
1347 | .slow: | |
1348 | callSlowPath(slowPath) | |
1349 | dispatch(0) | |
1350 | end | |
1351 | ||
1352 | ||
1353 | _llint_op_switch_imm: | |
1354 | traceExecution() | |
93a37866 A |
1355 | loadisFromInstruction(3, t2) |
1356 | loadisFromInstruction(1, t3) | |
6fe7ccc8 A |
1357 | loadConstantOrVariable(t2, t1) |
1358 | loadp CodeBlock[cfr], t2 | |
1359 | loadp CodeBlock::m_rareData[t2], t2 | |
1360 | muli sizeof SimpleJumpTable, t3 # FIXME: would be nice to peephole this! | |
1361 | loadp CodeBlock::RareData::m_immediateSwitchJumpTables + VectorBufferOffset[t2], t2 | |
1362 | addp t3, t2 | |
93a37866 | 1363 | bqb t1, tagTypeNumber, .opSwitchImmNotInt |
6fe7ccc8 A |
1364 | subi SimpleJumpTable::min[t2], t1 |
1365 | biaeq t1, SimpleJumpTable::branchOffsets + VectorSizeOffset[t2], .opSwitchImmFallThrough | |
1366 | loadp SimpleJumpTable::branchOffsets + VectorBufferOffset[t2], t3 | |
1367 | loadis [t3, t1, 4], t1 | |
1368 | btiz t1, .opSwitchImmFallThrough | |
1369 | dispatch(t1) | |
1370 | ||
1371 | .opSwitchImmNotInt: | |
93a37866 | 1372 | btqnz t1, tagTypeNumber, .opSwitchImmSlow # Go slow if it's a double. |
6fe7ccc8 | 1373 | .opSwitchImmFallThrough: |
93a37866 | 1374 | dispatchIntIndirect(2) |
6fe7ccc8 A |
1375 | |
1376 | .opSwitchImmSlow: | |
1377 | callSlowPath(_llint_slow_path_switch_imm) | |
1378 | dispatch(0) | |
1379 | ||
1380 | ||
1381 | _llint_op_switch_char: | |
1382 | traceExecution() | |
93a37866 A |
1383 | loadisFromInstruction(3, t2) |
1384 | loadisFromInstruction(1, t3) | |
6fe7ccc8 A |
1385 | loadConstantOrVariable(t2, t1) |
1386 | loadp CodeBlock[cfr], t2 | |
1387 | loadp CodeBlock::m_rareData[t2], t2 | |
1388 | muli sizeof SimpleJumpTable, t3 | |
1389 | loadp CodeBlock::RareData::m_characterSwitchJumpTables + VectorBufferOffset[t2], t2 | |
1390 | addp t3, t2 | |
93a37866 | 1391 | btqnz t1, tagMask, .opSwitchCharFallThrough |
6fe7ccc8 A |
1392 | loadp JSCell::m_structure[t1], t0 |
1393 | bbneq Structure::m_typeInfo + TypeInfo::m_type[t0], StringType, .opSwitchCharFallThrough | |
1394 | bineq JSString::m_length[t1], 1, .opSwitchCharFallThrough | |
1395 | loadp JSString::m_value[t1], t0 | |
1396 | btpz t0, .opSwitchOnRope | |
1397 | loadp StringImpl::m_data8[t0], t1 | |
1398 | btinz StringImpl::m_hashAndFlags[t0], HashFlags8BitBuffer, .opSwitchChar8Bit | |
1399 | loadh [t1], t0 | |
1400 | jmp .opSwitchCharReady | |
1401 | .opSwitchChar8Bit: | |
1402 | loadb [t1], t0 | |
1403 | .opSwitchCharReady: | |
1404 | subi SimpleJumpTable::min[t2], t0 | |
1405 | biaeq t0, SimpleJumpTable::branchOffsets + VectorSizeOffset[t2], .opSwitchCharFallThrough | |
1406 | loadp SimpleJumpTable::branchOffsets + VectorBufferOffset[t2], t2 | |
1407 | loadis [t2, t0, 4], t1 | |
1408 | btiz t1, .opSwitchCharFallThrough | |
1409 | dispatch(t1) | |
1410 | ||
1411 | .opSwitchCharFallThrough: | |
93a37866 | 1412 | dispatchIntIndirect(2) |
6fe7ccc8 A |
1413 | |
1414 | .opSwitchOnRope: | |
1415 | callSlowPath(_llint_slow_path_switch_char) | |
1416 | dispatch(0) | |
1417 | ||
1418 | ||
1419 | _llint_op_new_func: | |
1420 | traceExecution() | |
93a37866 A |
1421 | loadisFromInstruction(3, t2) |
1422 | btiz t2, .opNewFuncUnchecked | |
1423 | loadisFromInstruction(1, t1) | |
1424 | btqnz [cfr, t1, 8], .opNewFuncDone | |
6fe7ccc8 A |
1425 | .opNewFuncUnchecked: |
1426 | callSlowPath(_llint_slow_path_new_func) | |
1427 | .opNewFuncDone: | |
1428 | dispatch(4) | |
1429 | ||
1430 | ||
93a37866 A |
1431 | macro arrayProfileForCall() |
1432 | if VALUE_PROFILER | |
1433 | loadisFromInstruction(3, t3) | |
1434 | loadq ThisArgumentOffset[cfr, t3, 8], t0 | |
1435 | btqnz t0, tagMask, .done | |
1436 | loadp JSCell::m_structure[t0], t0 | |
1437 | loadpFromInstruction(5, t1) | |
1438 | storep t0, ArrayProfile::m_lastSeenStructure[t1] | |
1439 | .done: | |
1440 | end | |
1441 | end | |
1442 | ||
6fe7ccc8 | 1443 | macro doCall(slowPath) |
93a37866 A |
1444 | loadisFromInstruction(1, t0) |
1445 | loadpFromInstruction(4, t1) | |
6fe7ccc8 A |
1446 | loadp LLIntCallLinkInfo::callee[t1], t2 |
1447 | loadConstantOrVariable(t0, t3) | |
93a37866 A |
1448 | bqneq t3, t2, .opCallSlow |
1449 | loadisFromInstruction(3, t3) | |
6fe7ccc8 A |
1450 | addi 6, PC |
1451 | lshifti 3, t3 | |
1452 | addp cfr, t3 | |
93a37866 A |
1453 | loadp JSFunction::m_scope[t2], t0 |
1454 | storeq t2, Callee[t3] | |
1455 | storeq t0, ScopeChain[t3] | |
1456 | loadisFromInstruction(-4, t2) | |
6fe7ccc8 | 1457 | storei PC, ArgumentCount + TagOffset[cfr] |
93a37866 | 1458 | storeq cfr, CallerFrame[t3] |
6fe7ccc8 A |
1459 | storei t2, ArgumentCount + PayloadOffset[t3] |
1460 | move t3, cfr | |
93a37866 | 1461 | callTargetFunction(t1) |
6fe7ccc8 A |
1462 | |
1463 | .opCallSlow: | |
1464 | slowPathForCall(6, slowPath) | |
1465 | end | |
1466 | ||
1467 | ||
1468 | _llint_op_tear_off_activation: | |
1469 | traceExecution() | |
93a37866 A |
1470 | loadisFromInstruction(1, t0) |
1471 | btqz [cfr, t0, 8], .opTearOffActivationNotCreated | |
6fe7ccc8 A |
1472 | callSlowPath(_llint_slow_path_tear_off_activation) |
1473 | .opTearOffActivationNotCreated: | |
93a37866 | 1474 | dispatch(2) |
6fe7ccc8 A |
1475 | |
1476 | ||
1477 | _llint_op_tear_off_arguments: | |
1478 | traceExecution() | |
93a37866 | 1479 | loadisFromInstruction(1, t0) |
6fe7ccc8 | 1480 | subi 1, t0 # Get the unmodifiedArgumentsRegister |
93a37866 | 1481 | btqz [cfr, t0, 8], .opTearOffArgumentsNotCreated |
6fe7ccc8 A |
1482 | callSlowPath(_llint_slow_path_tear_off_arguments) |
1483 | .opTearOffArgumentsNotCreated: | |
93a37866 | 1484 | dispatch(3) |
6fe7ccc8 A |
1485 | |
1486 | ||
1487 | _llint_op_ret: | |
1488 | traceExecution() | |
1489 | checkSwitchToJITForEpilogue() | |
93a37866 | 1490 | loadisFromInstruction(1, t2) |
6fe7ccc8 A |
1491 | loadConstantOrVariable(t2, t0) |
1492 | doReturn() | |
1493 | ||
1494 | ||
1495 | _llint_op_call_put_result: | |
93a37866 A |
1496 | loadisFromInstruction(1, t2) |
1497 | loadpFromInstruction(2, t3) | |
1498 | storeq t0, [cfr, t2, 8] | |
6fe7ccc8 A |
1499 | valueProfile(t0, t3) |
1500 | traceExecution() | |
1501 | dispatch(3) | |
1502 | ||
1503 | ||
1504 | _llint_op_ret_object_or_this: | |
1505 | traceExecution() | |
1506 | checkSwitchToJITForEpilogue() | |
93a37866 | 1507 | loadisFromInstruction(1, t2) |
6fe7ccc8 | 1508 | loadConstantOrVariable(t2, t0) |
93a37866 | 1509 | btqnz t0, tagMask, .opRetObjectOrThisNotObject |
6fe7ccc8 A |
1510 | loadp JSCell::m_structure[t0], t2 |
1511 | bbb Structure::m_typeInfo + TypeInfo::m_type[t2], ObjectType, .opRetObjectOrThisNotObject | |
1512 | doReturn() | |
1513 | ||
1514 | .opRetObjectOrThisNotObject: | |
93a37866 | 1515 | loadisFromInstruction(2, t2) |
6fe7ccc8 A |
1516 | loadConstantOrVariable(t2, t0) |
1517 | doReturn() | |
1518 | ||
1519 | ||
1520 | _llint_op_to_primitive: | |
1521 | traceExecution() | |
93a37866 A |
1522 | loadisFromInstruction(2, t2) |
1523 | loadisFromInstruction(1, t3) | |
6fe7ccc8 | 1524 | loadConstantOrVariable(t2, t0) |
93a37866 | 1525 | btqnz t0, tagMask, .opToPrimitiveIsImm |
6fe7ccc8 A |
1526 | loadp JSCell::m_structure[t0], t2 |
1527 | bbneq Structure::m_typeInfo + TypeInfo::m_type[t2], StringType, .opToPrimitiveSlowCase | |
1528 | .opToPrimitiveIsImm: | |
93a37866 | 1529 | storeq t0, [cfr, t3, 8] |
6fe7ccc8 A |
1530 | dispatch(3) |
1531 | ||
1532 | .opToPrimitiveSlowCase: | |
1533 | callSlowPath(_llint_slow_path_to_primitive) | |
1534 | dispatch(3) | |
1535 | ||
1536 | ||
1537 | _llint_op_next_pname: | |
1538 | traceExecution() | |
93a37866 A |
1539 | loadisFromInstruction(3, t1) |
1540 | loadisFromInstruction(4, t2) | |
6fe7ccc8 A |
1541 | assertNotConstant(t1) |
1542 | assertNotConstant(t2) | |
1543 | loadi PayloadOffset[cfr, t1, 8], t0 | |
1544 | bieq t0, PayloadOffset[cfr, t2, 8], .opNextPnameEnd | |
93a37866 | 1545 | loadisFromInstruction(5, t2) |
6fe7ccc8 A |
1546 | assertNotConstant(t2) |
1547 | loadp [cfr, t2, 8], t2 | |
1548 | loadp JSPropertyNameIterator::m_jsStrings[t2], t3 | |
93a37866 | 1549 | loadq [t3, t0, 8], t3 |
6fe7ccc8 A |
1550 | addi 1, t0 |
1551 | storei t0, PayloadOffset[cfr, t1, 8] | |
93a37866 A |
1552 | loadisFromInstruction(1, t1) |
1553 | storeq t3, [cfr, t1, 8] | |
1554 | loadisFromInstruction(2, t3) | |
6fe7ccc8 | 1555 | assertNotConstant(t3) |
93a37866 | 1556 | loadq [cfr, t3, 8], t3 |
6fe7ccc8 A |
1557 | loadp JSCell::m_structure[t3], t1 |
1558 | bpneq t1, JSPropertyNameIterator::m_cachedStructure[t2], .opNextPnameSlow | |
1559 | loadp JSPropertyNameIterator::m_cachedPrototypeChain[t2], t0 | |
1560 | loadp StructureChain::m_vector[t0], t0 | |
1561 | btpz [t0], .opNextPnameTarget | |
1562 | .opNextPnameCheckPrototypeLoop: | |
93a37866 A |
1563 | bqeq Structure::m_prototype[t1], ValueNull, .opNextPnameSlow |
1564 | loadq Structure::m_prototype[t1], t2 | |
6fe7ccc8 A |
1565 | loadp JSCell::m_structure[t2], t1 |
1566 | bpneq t1, [t0], .opNextPnameSlow | |
1567 | addp 8, t0 | |
1568 | btpnz [t0], .opNextPnameCheckPrototypeLoop | |
1569 | .opNextPnameTarget: | |
93a37866 | 1570 | dispatchIntIndirect(6) |
6fe7ccc8 A |
1571 | |
1572 | .opNextPnameEnd: | |
1573 | dispatch(7) | |
1574 | ||
1575 | .opNextPnameSlow: | |
1576 | callSlowPath(_llint_slow_path_next_pname) # This either keeps the PC where it was (causing us to loop) or sets it to target. | |
1577 | dispatch(0) | |
1578 | ||
1579 | ||
1580 | _llint_op_catch: | |
1581 | # This is where we end up from the JIT's throw trampoline (because the | |
1582 | # machine code return address will be set to _llint_op_catch), and from | |
1583 | # the interpreter's throw trampoline (see _llint_throw_trampoline). | |
1584 | # The JIT throwing protocol calls for the cfr to be in t0. The throwing | |
1585 | # code must have known that we were throwing to the interpreter, and have | |
93a37866 | 1586 | # set VM::targetInterpreterPCForThrow. |
6fe7ccc8 A |
1587 | move t0, cfr |
1588 | loadp CodeBlock[cfr], PB | |
1589 | loadp CodeBlock::m_instructions[PB], PB | |
93a37866 A |
1590 | loadp JITStackFrame::vm[sp], t3 |
1591 | loadp VM::targetInterpreterPCForThrow[t3], PC | |
6fe7ccc8 | 1592 | subp PB, PC |
93a37866 A |
1593 | rshiftp 3, PC |
1594 | loadq VM::exception[t3], t0 | |
1595 | storeq 0, VM::exception[t3] | |
1596 | loadisFromInstruction(1, t2) | |
1597 | storeq t0, [cfr, t2, 8] | |
6fe7ccc8 A |
1598 | traceExecution() |
1599 | dispatch(2) | |
1600 | ||
1601 | ||
1602 | _llint_op_end: | |
1603 | traceExecution() | |
1604 | checkSwitchToJITForEpilogue() | |
93a37866 | 1605 | loadisFromInstruction(1, t0) |
6fe7ccc8 | 1606 | assertNotConstant(t0) |
93a37866 | 1607 | loadq [cfr, t0, 8], t0 |
6fe7ccc8 A |
1608 | doReturn() |
1609 | ||
1610 | ||
1611 | _llint_throw_from_slow_path_trampoline: | |
1612 | # When throwing from the interpreter (i.e. throwing from LLIntSlowPaths), so | |
1613 | # the throw target is not necessarily interpreted code, we come to here. | |
1614 | # This essentially emulates the JIT's throwing protocol. | |
93a37866 A |
1615 | loadp JITStackFrame::vm[sp], t1 |
1616 | loadp VM::callFrameForThrow[t1], t0 | |
1617 | jmp VM::targetMachinePCForThrow[t1] | |
6fe7ccc8 A |
1618 | |
1619 | ||
1620 | _llint_throw_during_call_trampoline: | |
1621 | preserveReturnAddressAfterCall(t2) | |
93a37866 A |
1622 | loadp JITStackFrame::vm[sp], t1 |
1623 | loadp VM::callFrameForThrow[t1], t0 | |
1624 | jmp VM::targetMachinePCForThrow[t1] | |
6fe7ccc8 | 1625 | |
93a37866 A |
1626 | # Gives you the scope in t0, while allowing you to optionally perform additional checks on the |
1627 | # scopes as they are traversed. scopeCheck() is called with two arguments: the register | |
1628 | # holding the scope, and a register that can be used for scratch. Note that this does not | |
1629 | # use t3, so you can hold stuff in t3 if need be. | |
1630 | macro getDeBruijnScope(deBruijinIndexOperand, scopeCheck) | |
1631 | loadp ScopeChain[cfr], t0 | |
1632 | loadis deBruijinIndexOperand, t2 | |
1633 | ||
1634 | btiz t2, .done | |
1635 | ||
1636 | loadp CodeBlock[cfr], t1 | |
1637 | bineq CodeBlock::m_codeType[t1], FunctionCode, .loop | |
1638 | btbz CodeBlock::m_needsActivation[t1], .loop | |
1639 | ||
1640 | loadis CodeBlock::m_activationRegister[t1], t1 | |
1641 | ||
1642 | # Need to conditionally skip over one scope. | |
1643 | btpz [cfr, t1, 8], .noActivation | |
1644 | scopeCheck(t0, t1) | |
1645 | loadp JSScope::m_next[t0], t0 | |
1646 | .noActivation: | |
1647 | subi 1, t2 | |
1648 | ||
1649 | btiz t2, .done | |
1650 | .loop: | |
1651 | scopeCheck(t0, t1) | |
1652 | loadp JSScope::m_next[t0], t0 | |
1653 | subi 1, t2 | |
1654 | btinz t2, .loop | |
1655 | ||
1656 | .done: | |
1657 | end | |
1658 | ||
1659 | _llint_op_get_scoped_var: | |
1660 | traceExecution() | |
1661 | # Operands are as follows: | |
1662 | # pc[1]: Destination for the load | |
1663 | # pc[2]: Index of register in the scope | |
1664 | # 24[PB, PC, 8] De Bruijin index. | |
1665 | getDeBruijnScope(24[PB, PC, 8], macro (scope, scratch) end) | |
1666 | loadisFromInstruction(1, t1) | |
1667 | loadisFromInstruction(2, t2) | |
1668 | ||
1669 | loadp JSVariableObject::m_registers[t0], t0 | |
1670 | loadp [t0, t2, 8], t3 | |
1671 | storep t3, [cfr, t1, 8] | |
1672 | loadp 32[PB, PC, 8], t1 | |
1673 | valueProfile(t3, t1) | |
1674 | dispatch(5) | |
1675 | ||
1676 | ||
1677 | _llint_op_put_scoped_var: | |
1678 | traceExecution() | |
1679 | getDeBruijnScope(16[PB, PC, 8], macro (scope, scratch) end) | |
1680 | loadis 24[PB, PC, 8], t1 | |
1681 | loadConstantOrVariable(t1, t3) | |
1682 | loadis 8[PB, PC, 8], t1 | |
1683 | writeBarrier(t3) | |
1684 | loadp JSVariableObject::m_registers[t0], t0 | |
1685 | storep t3, [t0, t1, 8] | |
1686 | dispatch(4) | |
6fe7ccc8 A |
1687 | |
1688 | macro nativeCallTrampoline(executableOffsetToFunction) | |
1689 | storep 0, CodeBlock[cfr] | |
93a37866 A |
1690 | if X86_64 |
1691 | loadp JITStackFrame::vm + 8[sp], t0 | |
1692 | storep cfr, VM::topCallFrame[t0] | |
1693 | loadp CallerFrame[cfr], t0 | |
1694 | loadq ScopeChain[t0], t1 | |
1695 | storeq t1, ScopeChain[cfr] | |
1696 | peek 0, t1 | |
1697 | storep t1, ReturnPC[cfr] | |
1698 | move cfr, t5 # t5 = rdi, so arg #1 | |
1699 | subp 16 - 8, sp | |
1700 | loadp Callee[cfr], t4 # t4 = rsi, so arg #2 | |
1701 | loadp JSFunction::m_executable[t4], t1 | |
1702 | move t0, cfr # Restore cfr to avoid loading from stack | |
1703 | call executableOffsetToFunction[t1] | |
1704 | addp 16 - 8, sp | |
1705 | loadp JITStackFrame::vm + 8[sp], t3 | |
1706 | elsif ARM64 | |
1707 | loadp JITStackFrame::vm[sp], t0 | |
1708 | storep cfr, VM::topCallFrame[t0] | |
1709 | loadp CallerFrame[cfr], t2 | |
1710 | loadp ScopeChain[t2], t1 | |
1711 | storep t1, ScopeChain[cfr] | |
1712 | preserveReturnAddressAfterCall(t3) | |
1713 | storep t3, ReturnPC[cfr] | |
1714 | move cfr, t0 | |
1715 | loadp Callee[cfr], t1 | |
1716 | loadp JSFunction::m_executable[t1], t1 | |
1717 | move t2, cfr # Restore cfr to avoid loading from stack | |
1718 | call executableOffsetToFunction[t1] | |
1719 | restoreReturnAddressBeforeReturn(t3) | |
1720 | loadp JITStackFrame::vm[sp], t3 | |
1721 | elsif C_LOOP | |
1722 | loadp CallerFrame[cfr], t0 | |
1723 | loadp ScopeChain[t0], t1 | |
1724 | storep t1, ScopeChain[cfr] | |
1725 | ||
1726 | loadp JITStackFrame::vm[sp], t3 | |
1727 | storep cfr, VM::topCallFrame[t3] | |
1728 | ||
1729 | move t0, t2 | |
1730 | preserveReturnAddressAfterCall(t3) | |
1731 | storep t3, ReturnPC[cfr] | |
1732 | move cfr, t0 | |
1733 | loadp Callee[cfr], t1 | |
1734 | loadp JSFunction::m_executable[t1], t1 | |
1735 | move t2, cfr | |
1736 | cloopCallNative executableOffsetToFunction[t1] | |
1737 | ||
1738 | restoreReturnAddressBeforeReturn(t3) | |
1739 | loadp JITStackFrame::vm[sp], t3 | |
1740 | else | |
1741 | error | |
1742 | end | |
1743 | ||
1744 | btqnz VM::exception[t3], .exception | |
6fe7ccc8 A |
1745 | ret |
1746 | .exception: | |
93a37866 | 1747 | preserveReturnAddressAfterCall(t1) # This is really only needed on X86_64 |
6fe7ccc8 A |
1748 | loadi ArgumentCount + TagOffset[cfr], PC |
1749 | loadp CodeBlock[cfr], PB | |
1750 | loadp CodeBlock::m_instructions[PB], PB | |
93a37866 A |
1751 | loadp JITStackFrame::vm[sp], t0 |
1752 | storep cfr, VM::topCallFrame[t0] | |
6fe7ccc8 A |
1753 | callSlowPath(_llint_throw_from_native_call) |
1754 | jmp _llint_throw_from_slow_path_trampoline | |
1755 | end | |
1756 |