1 # Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3 # Redistribution and use in source and binary forms, with or without
4 # modification, are permitted provided that the following conditions
6 # 1. Redistributions of source code must retain the above copyright
7 # notice, this list of conditions and the following disclaimer.
8 # 2. Redistributions in binary form must reproduce the above copyright
9 # notice, this list of conditions and the following disclaimer in the
10 # documentation and/or other materials provided with the distribution.
12 # THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
13 # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
14 # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
15 # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
16 # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
17 # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
18 # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
19 # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
20 # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
21 # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
22 # THE POSSIBILITY OF SUCH DAMAGE.
26 macro jumpToInstruction()
30 macro dispatch(advance)
35 macro dispatchInt(advance)
40 macro dispatchIntIndirect(offset)
41 dispatchInt(offset * 8[PB, PC, 8])
44 macro dispatchAfterCall()
45 loadi ArgumentCount + TagOffset[cfr], PC
46 loadp CodeBlock[cfr], PB
47 loadp CodeBlock::m_instructions[PB], PB
48 loadisFromInstruction(1, t1)
49 storeq t0, [cfr, t1, 8]
50 valueProfile(t0, (CallOpCodeSize - 1), t2)
51 dispatch(CallOpCodeSize)
54 macro cCall2(function, arg1, arg2)
55 checkStackPointerAlignment(t4, 0xbad0c002)
61 # Note: this implementation is only correct if the return type size is > 8 bytes.
62 # See macro cCall2Void for an implementation when the return type <= 8 bytes.
63 # On Win64, when the return type is larger than 8 bytes, we need to allocate space on the stack for the return value.
64 # On entry rcx (t2), should contain a pointer to this stack space. The other parameters are shifted to the right,
65 # rdx (t1) should contain the first argument, and r8 (t6) should contain the second argument.
66 # On return, rax contains a pointer to this stack value, and we then need to copy the 16 byte return value into rax (t0) and rdx (t1)
67 # since the return value is expected to be split between the two.
68 # See http://msdn.microsoft.com/en-us/library/7572ztz4.aspx
83 cloopCallSlowPath function, arg1, arg2
89 macro cCall2Void(function, arg1, arg2)
91 cloopCallSlowPathVoid function, arg1, arg2
93 # Note: we cannot use the cCall2 macro for Win64 in this case,
94 # as the Win64 cCall2 implemenation is only correct when the return type size is > 8 bytes.
95 # On Win64, rcx and rdx are used for passing the first two parameters.
96 # We also need to make room on the stack for all four parameter registers.
97 # See http://msdn.microsoft.com/en-us/library/ms235286.aspx
104 cCall2(function, arg1, arg2)
108 # This barely works. arg3 and arg4 should probably be immediates.
109 macro cCall4(function, arg1, arg2, arg3, arg4)
110 checkStackPointerAlignment(t4, 0xbad0c004)
118 # On Win64, rcx, rdx, r8, and r9 are used for passing the first four parameters.
119 # We also need to make room on the stack for all four parameter registers.
120 # See http://msdn.microsoft.com/en-us/library/ms235286.aspx
141 macro doVMEntry(makeCall)
145 const protoCallFrame = t1
147 const previousCFR = t0
148 const previousPC = t6
155 const protoCallFrame = t6
157 const previousCFR = t0
158 const previousPC = t4
162 elsif ARM64 or C_LOOP
165 const protoCallFrame = a2
167 const previousCFR = t5
168 const previousPC = lr
177 vmEntryRecord(cfr, sp)
179 checkStackPointerAlignment(temp2, 0xbad0dc01)
181 storep vm, VMEntryRecord::m_vm[sp]
182 loadp VM::topCallFrame[vm], temp2
183 storep temp2, VMEntryRecord::m_prevTopCallFrame[sp]
184 loadp VM::topVMEntryFrame[vm], temp2
185 storep temp2, VMEntryRecord::m_prevTopVMEntryFrame[sp]
187 loadi ProtoCallFrame::paddedArgCount[protoCallFrame], temp2
188 addp CallFrameHeaderSlots, temp2, temp2
190 subp sp, temp2, temp1
192 # Ensure that we have enough additional stack capacity for the incoming args,
193 # and the frame for the JS code we're executing. We need to do this check
194 # before we start copying the args from the protoCallFrame below.
195 bpaeq temp1, VM::m_jsStackLimit[vm], .stackHeightOK
200 cloopCallSlowPath _llint_stack_check_at_vm_entry, vm, temp1
201 bpeq t0, 0, .stackCheckFailed
211 cCall2(_llint_throw_stack_overflow_error, vm, protoCallFrame)
213 vmEntryRecord(cfr, temp2)
215 loadp VMEntryRecord::m_vm[temp2], vm
216 loadp VMEntryRecord::m_prevTopCallFrame[temp2], temp3
217 storep temp3, VM::topCallFrame[vm]
218 loadp VMEntryRecord::m_prevTopVMEntryFrame[temp2], temp3
219 storep temp3, VM::topVMEntryFrame[vm]
221 subp cfr, CalleeRegisterSaveSize, sp
233 loadq [protoCallFrame, temp1, 8], temp3
234 storeq temp3, CodeBlock[sp, temp1, 8]
235 btinz temp1, .copyHeaderLoop
237 loadi PayloadOffset + ProtoCallFrame::argCountAndCodeOriginValue[protoCallFrame], temp2
239 loadi ProtoCallFrame::paddedArgCount[protoCallFrame], temp3
242 bieq temp2, temp3, .copyArgs
243 move ValueUndefined, temp1
246 storeq temp1, ThisArgumentOffset + 8[sp, temp3, 8]
247 bineq temp2, temp3, .fillExtraArgsLoop
250 loadp ProtoCallFrame::args[protoCallFrame], temp1
253 btiz temp2, .copyArgsDone
255 loadq [temp1, temp2, 8], temp3
256 storeq temp3, ThisArgumentOffset + 8[sp, temp2, 8]
262 storep temp2, VM::topCallFrame[vm]
264 storep sp, VM::topCallFrame[vm]
266 storep cfr, VM::topVMEntryFrame[vm]
268 move 0xffff000000000000, csr1
271 checkStackPointerAlignment(temp3, 0xbad0dc02)
273 makeCall(entry, temp1)
275 checkStackPointerAlignment(temp3, 0xbad0dc03)
277 vmEntryRecord(cfr, temp2)
279 loadp VMEntryRecord::m_vm[temp2], vm
280 loadp VMEntryRecord::m_prevTopCallFrame[temp2], temp3
281 storep temp3, VM::topCallFrame[vm]
282 loadp VMEntryRecord::m_prevTopVMEntryFrame[temp2], temp3
283 storep temp3, VM::topVMEntryFrame[vm]
285 subp cfr, CalleeRegisterSaveSize, sp
294 macro makeJavaScriptCall(entry, temp)
297 cloopCallJSFunction entry
305 macro makeHostFunctionCall(entry, temp)
312 elsif ARM64 or C_LOOP
319 # We need to allocate 32 bytes on the stack for the shadow space.
329 _handleUncaughtException:
330 loadp Callee[cfr], t3
331 andp MarkedBlockMask, t3
332 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
333 loadp VM::callFrameForThrow[t3], cfr
335 loadp CallerFrame[cfr], cfr
336 vmEntryRecord(cfr, t2)
338 loadp VMEntryRecord::m_vm[t2], t3
339 loadp VMEntryRecord::m_prevTopCallFrame[t2], t5
340 storep t5, VM::topCallFrame[t3]
341 loadp VMEntryRecord::m_prevTopVMEntryFrame[t2], t5
342 storep t5, VM::topVMEntryFrame[t3]
344 subp cfr, CalleeRegisterSaveSize, sp
351 macro prepareStateForCCall()
356 macro restoreStateAfterCCall()
363 macro callSlowPath(slowPath)
364 prepareStateForCCall()
365 cCall2(slowPath, cfr, PC)
366 restoreStateAfterCCall()
369 macro traceOperand(fromWhere, operand)
370 prepareStateForCCall()
371 cCall4(_llint_trace_operand, cfr, PC, fromWhere, operand)
372 restoreStateAfterCCall()
375 macro traceValue(fromWhere, operand)
376 prepareStateForCCall()
377 cCall4(_llint_trace_value, cfr, PC, fromWhere, operand)
378 restoreStateAfterCCall()
381 # Call a slow path for call call opcodes.
382 macro callCallSlowPath(slowPath, action)
383 storei PC, ArgumentCount + TagOffset[cfr]
384 prepareStateForCCall()
385 cCall2(slowPath, cfr, PC)
389 macro callWatchdogTimerHandler(throwHandler)
390 storei PC, ArgumentCount + TagOffset[cfr]
391 prepareStateForCCall()
392 cCall2(_llint_slow_path_handle_watchdog_timer, cfr, PC)
393 btpnz t0, throwHandler
395 loadi ArgumentCount + TagOffset[cfr], PC
398 macro checkSwitchToJITForLoop()
402 storei PC, ArgumentCount + TagOffset[cfr]
403 prepareStateForCCall()
404 cCall2(_llint_loop_osr, cfr, PC)
410 loadi ArgumentCount + TagOffset[cfr], PC
414 macro loadVariable(operand, value)
415 loadisFromInstruction(operand, value)
416 loadq [cfr, value, 8], value
419 # Index and value must be different registers. Index may be clobbered.
420 macro loadConstantOrVariable(index, value)
421 bpgteq index, FirstConstantRegisterIndex, .constant
422 loadq [cfr, index, 8], value
425 loadp CodeBlock[cfr], value
426 loadp CodeBlock::m_constantRegisters + VectorBufferOffset[value], value
427 subp FirstConstantRegisterIndex, index
428 loadq [value, index, 8], value
432 macro loadConstantOrVariableInt32(index, value, slow)
433 loadConstantOrVariable(index, value)
434 bqb value, tagTypeNumber, slow
437 macro loadConstantOrVariableCell(index, value, slow)
438 loadConstantOrVariable(index, value)
439 btqnz value, tagMask, slow
442 macro writeBarrierOnOperand(cellOperand)
444 loadisFromInstruction(cellOperand, t1)
445 loadConstantOrVariableCell(t1, t2, .writeBarrierDone)
446 skipIfIsRememberedOrInEden(t2, t1, t3,
448 btbnz gcData, .writeBarrierDone
450 cCall2Void(_llint_write_barrier_slow, cfr, t2)
458 macro writeBarrierOnOperands(cellOperand, valueOperand)
460 loadisFromInstruction(valueOperand, t1)
461 loadConstantOrVariableCell(t1, t0, .writeBarrierDone)
462 btpz t0, .writeBarrierDone
464 writeBarrierOnOperand(cellOperand)
469 macro writeBarrierOnGlobalObject(valueOperand)
471 loadisFromInstruction(valueOperand, t1)
472 loadConstantOrVariableCell(t1, t0, .writeBarrierDone)
473 btpz t0, .writeBarrierDone
475 loadp CodeBlock[cfr], t3
476 loadp CodeBlock::m_globalObject[t3], t3
477 skipIfIsRememberedOrInEden(t3, t1, t2,
479 btbnz gcData, .writeBarrierDone
481 cCall2Void(_llint_write_barrier_slow, cfr, t3)
489 macro valueProfile(value, operand, scratch)
490 loadpFromInstruction(operand, scratch)
491 storeq value, ValueProfile::m_buckets[scratch]
494 macro loadStructure(cell, structure)
497 macro loadStructureWithScratch(cell, structure, scratch)
498 loadp CodeBlock[cfr], scratch
499 loadp CodeBlock::m_vm[scratch], scratch
500 loadp VM::heap + Heap::m_structureIDTable + StructureIDTable::m_table[scratch], scratch
501 loadi JSCell::m_structureID[cell], structure
502 loadp [scratch, structure, 8], structure
505 macro loadStructureAndClobberFirstArg(cell, structure)
506 loadi JSCell::m_structureID[cell], structure
507 loadp CodeBlock[cfr], cell
508 loadp CodeBlock::m_vm[cell], cell
509 loadp VM::heap + Heap::m_structureIDTable + StructureIDTable::m_table[cell], cell
510 loadp [cell, structure, 8], structure
513 macro storeStructureWithTypeInfo(cell, structure, scratch)
514 loadq Structure::m_blob + StructureIDBlob::u.doubleWord[structure], scratch
515 storeq scratch, JSCell::m_structureID[cell]
518 # Entrypoints into the interpreter.
520 # Expects that CodeBlock is in t1, which is what prologue() leaves behind.
521 macro functionArityCheck(doneLabel, slowPath)
522 loadi PayloadOffset + ArgumentCount[cfr], t0
523 biaeq t0, CodeBlock::m_numParameters[t1], doneLabel
524 prepareStateForCCall()
525 cCall2(slowPath, cfr, PC) # This slowPath has the protocol: t0 = 0 => no error, t0 != 0 => error
527 move t1, cfr # t1 contains caller frame
528 jmp _llint_throw_from_slow_path_trampoline
531 # t1 points to ArityCheckData.
532 loadp CommonSlowPaths::ArityCheckData::thunkToCall[t1], t2
533 btpz t2, .proceedInline
535 loadp CommonSlowPaths::ArityCheckData::returnPC[t1], t7
536 loadp CommonSlowPaths::ArityCheckData::paddedStackSpace[t1], t0
539 loadp ReturnPC[cfr], t0
545 loadi CommonSlowPaths::ArityCheckData::paddedStackSpace[t1], t1
548 // Move frame up "t1 * 2" slots
552 loadi PayloadOffset + ArgumentCount[cfr], t2
553 addi CallFrameHeaderSlots, t2
556 storeq t0, [t3, t1, 8]
558 bsubinz 1, t2, .copyLoop
560 // Fill new slots with JSUndefined
562 move ValueUndefined, t0
564 storeq t0, [t3, t1, 8]
566 baddinz 1, t2, .fillLoop
573 # Reload CodeBlock and reset PC, since the slow_path clobbered them.
574 loadp CodeBlock[cfr], t1
575 loadp CodeBlock::m_instructions[t1], PB
580 macro branchIfException(label)
581 loadp Callee[cfr], t3
582 andp MarkedBlockMask, t3
583 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
584 btqz VM::m_exception[t3], .noException
590 # Instruction implementations
594 checkStackPointerAlignment(t2, 0xdead00e1)
595 loadp CodeBlock[cfr], t2 // t2<CodeBlock> = cfr.CodeBlock
596 loadi CodeBlock::m_numVars[t2], t2 // t2<size_t> = t2<CodeBlock>.m_numVars
597 btiz t2, .opEnterDone
598 move ValueUndefined, t0
602 storeq t0, [cfr, t2, 8]
604 btqnz t2, .opEnterLoop
606 callSlowPath(_slow_path_enter)
610 _llint_op_create_lexical_environment:
612 callSlowPath(_llint_slow_path_create_lexical_environment)
618 loadp Callee[cfr], t0
619 loadp JSCallee::m_scope[t0], t0
620 loadisFromInstruction(1, t1)
621 storeq t0, [cfr, t1, 8]
625 _llint_op_create_this:
627 loadisFromInstruction(2, t0)
628 loadp [cfr, t0, 8], t0
629 loadp JSFunction::m_rareData[t0], t4
630 btpz t4, .opCreateThisSlow
631 loadp FunctionRareData::m_allocationProfile + ObjectAllocationProfile::m_allocator[t4], t1
632 loadp FunctionRareData::m_allocationProfile + ObjectAllocationProfile::m_structure[t4], t2
633 btpz t1, .opCreateThisSlow
634 loadpFromInstruction(4, t4)
635 bpeq t4, 1, .hasSeenMultipleCallee
636 bpneq t4, t0, .opCreateThisSlow
637 .hasSeenMultipleCallee:
638 allocateJSObject(t1, t2, t0, t3, .opCreateThisSlow)
639 loadisFromInstruction(1, t1)
640 storeq t0, [cfr, t1, 8]
644 callSlowPath(_slow_path_create_this)
650 loadisFromInstruction(1, t0)
651 loadq [cfr, t0, 8], t0
652 btqnz t0, tagMask, .opToThisSlow
653 bbneq JSCell::m_type[t0], FinalObjectType, .opToThisSlow
654 loadStructureWithScratch(t0, t1, t2)
655 loadpFromInstruction(2, t2)
656 bpneq t1, t2, .opToThisSlow
660 callSlowPath(_slow_path_to_this)
664 _llint_op_new_object:
666 loadpFromInstruction(3, t0)
667 loadp ObjectAllocationProfile::m_allocator[t0], t1
668 loadp ObjectAllocationProfile::m_structure[t0], t2
669 allocateJSObject(t1, t2, t0, t3, .opNewObjectSlow)
670 loadisFromInstruction(1, t1)
671 storeq t0, [cfr, t1, 8]
675 callSlowPath(_llint_slow_path_new_object)
681 loadpFromInstruction(1, t0)
682 loadq [cfr, t0, 8], t0
683 bqneq t0, ValueEmpty, .opNotTDZ
684 callSlowPath(_slow_path_throw_tdz_error)
692 loadisFromInstruction(2, t1)
693 loadisFromInstruction(1, t0)
694 loadConstantOrVariable(t1, t2)
695 storeq t2, [cfr, t0, 8]
701 loadisFromInstruction(2, t0)
702 loadisFromInstruction(1, t1)
703 loadConstantOrVariable(t0, t2)
705 btqnz t2, ~1, .opNotSlow
707 storeq t2, [cfr, t1, 8]
711 callSlowPath(_slow_path_not)
715 macro equalityComparison(integerComparison, slowPath)
717 loadisFromInstruction(3, t0)
718 loadisFromInstruction(2, t2)
719 loadisFromInstruction(1, t3)
720 loadConstantOrVariableInt32(t0, t1, .slow)
721 loadConstantOrVariableInt32(t2, t0, .slow)
722 integerComparison(t0, t1, t0)
724 storeq t0, [cfr, t3, 8]
728 callSlowPath(slowPath)
734 macro (left, right, result) cieq left, right, result end,
740 macro (left, right, result) cineq left, right, result end,
744 macro equalNullComparison()
745 loadisFromInstruction(2, t0)
746 loadq [cfr, t0, 8], t0
747 btqnz t0, tagMask, .immediate
748 btbnz JSCell::m_flags[t0], MasqueradesAsUndefined, .masqueradesAsUndefined
751 .masqueradesAsUndefined:
752 loadStructureWithScratch(t0, t2, t1)
753 loadp CodeBlock[cfr], t0
754 loadp CodeBlock::m_globalObject[t0], t0
755 cpeq Structure::m_globalObject[t2], t0, t0
758 andq ~TagBitUndefined, t0
759 cqeq t0, ValueNull, t0
765 equalNullComparison()
766 loadisFromInstruction(1, t1)
768 storeq t0, [cfr, t1, 8]
774 equalNullComparison()
775 loadisFromInstruction(1, t1)
777 storeq t0, [cfr, t1, 8]
781 macro strictEq(equalityOperation, slowPath)
783 loadisFromInstruction(3, t0)
784 loadisFromInstruction(2, t2)
785 loadConstantOrVariable(t0, t1)
786 loadConstantOrVariable(t2, t0)
789 btqz t2, tagMask, .slow
790 bqaeq t0, tagTypeNumber, .leftOK
791 btqnz t0, tagTypeNumber, .slow
793 bqaeq t1, tagTypeNumber, .rightOK
794 btqnz t1, tagTypeNumber, .slow
796 equalityOperation(t0, t1, t0)
797 loadisFromInstruction(1, t1)
799 storeq t0, [cfr, t1, 8]
803 callSlowPath(slowPath)
809 macro (left, right, result) cqeq left, right, result end,
815 macro (left, right, result) cqneq left, right, result end,
816 _slow_path_nstricteq)
819 macro preOp(arithmeticOperation, slowPath)
821 loadisFromInstruction(1, t0)
822 loadq [cfr, t0, 8], t1
823 bqb t1, tagTypeNumber, .slow
824 arithmeticOperation(t1, .slow)
825 orq tagTypeNumber, t1
826 storeq t1, [cfr, t0, 8]
830 callSlowPath(slowPath)
836 macro (value, slow) baddio 1, value, slow end,
842 macro (value, slow) bsubio 1, value, slow end,
848 loadisFromInstruction(2, t0)
849 loadisFromInstruction(1, t1)
850 loadConstantOrVariable(t0, t2)
851 bqaeq t2, tagTypeNumber, .opToNumberIsImmediate
852 btqz t2, tagTypeNumber, .opToNumberSlow
853 .opToNumberIsImmediate:
854 storeq t2, [cfr, t1, 8]
858 callSlowPath(_slow_path_to_number)
864 loadisFromInstruction(2, t1)
865 loadisFromInstruction(1, t2)
866 loadConstantOrVariable(t1, t0)
867 btqnz t0, tagMask, .opToStringSlow
868 bbneq JSCell::m_type[t0], StringType, .opToStringSlow
870 storeq t0, [cfr, t2, 8]
874 callSlowPath(_slow_path_to_string)
880 loadisFromInstruction(2, t0)
881 loadisFromInstruction(1, t1)
882 loadConstantOrVariable(t0, t2)
883 bqb t2, tagTypeNumber, .opNegateNotInt
884 btiz t2, 0x7fffffff, .opNegateSlow
886 orq tagTypeNumber, t2
887 storeq t2, [cfr, t1, 8]
890 btqz t2, tagTypeNumber, .opNegateSlow
891 xorq 0x8000000000000000, t2
892 storeq t2, [cfr, t1, 8]
896 callSlowPath(_slow_path_negate)
900 macro binaryOpCustomStore(integerOperationAndStore, doubleOperation, slowPath)
901 loadisFromInstruction(3, t0)
902 loadisFromInstruction(2, t2)
903 loadConstantOrVariable(t0, t1)
904 loadConstantOrVariable(t2, t0)
905 bqb t0, tagTypeNumber, .op1NotInt
906 bqb t1, tagTypeNumber, .op2NotInt
907 loadisFromInstruction(1, t2)
908 integerOperationAndStore(t1, t0, .slow, t2)
912 # First operand is definitely not an int, the second operand could be anything.
913 btqz t0, tagTypeNumber, .slow
914 bqaeq t1, tagTypeNumber, .op1NotIntOp2Int
915 btqz t1, tagTypeNumber, .slow
916 addq tagTypeNumber, t1
922 loadisFromInstruction(1, t2)
923 addq tagTypeNumber, t0
925 doubleOperation(ft1, ft0)
927 subq tagTypeNumber, t0
928 storeq t0, [cfr, t2, 8]
932 # First operand is definitely an int, the second is definitely not.
933 loadisFromInstruction(1, t2)
934 btqz t1, tagTypeNumber, .slow
936 addq tagTypeNumber, t1
938 doubleOperation(ft1, ft0)
940 subq tagTypeNumber, t0
941 storeq t0, [cfr, t2, 8]
945 callSlowPath(slowPath)
949 macro binaryOp(integerOperation, doubleOperation, slowPath)
951 macro (left, right, slow, index)
952 integerOperation(left, right, slow)
953 orq tagTypeNumber, right
954 storeq right, [cfr, index, 8]
956 doubleOperation, slowPath)
962 macro (left, right, slow) baddio left, right, slow end,
963 macro (left, right) addd left, right end,
970 macro (left, right, slow, index)
971 # Assume t3 is scratchable.
973 bmulio left, t3, slow
978 orq tagTypeNumber, t3
979 storeq t3, [cfr, index, 8]
981 macro (left, right) muld left, right end,
988 macro (left, right, slow) bsubio left, right, slow end,
989 macro (left, right) subd left, right end,
995 if X86_64 or X86_64_WIN
997 macro (left, right, slow, index)
998 # Assume t3 is scratchable.
1000 bineq left, -1, .notNeg2TwoThe31DivByNeg1
1001 bieq right, -2147483648, .slow
1002 .notNeg2TwoThe31DivByNeg1:
1011 orq tagTypeNumber, t0
1012 storeq t0, [cfr, index, 8]
1014 macro (left, right) divd left, right end,
1017 callSlowPath(_slow_path_div)
1022 macro bitOp(operation, slowPath, advance)
1023 loadisFromInstruction(3, t0)
1024 loadisFromInstruction(2, t2)
1025 loadisFromInstruction(1, t3)
1026 loadConstantOrVariable(t0, t1)
1027 loadConstantOrVariable(t2, t0)
1028 bqb t0, tagTypeNumber, .slow
1029 bqb t1, tagTypeNumber, .slow
1031 orq tagTypeNumber, t0
1032 storeq t0, [cfr, t3, 8]
1036 callSlowPath(slowPath)
1043 macro (left, right) lshifti left, right end,
1051 macro (left, right) rshifti left, right end,
1059 macro (left, right) urshifti left, right end,
1066 loadisFromInstruction(1, t0)
1067 loadisFromInstruction(2, t1)
1068 loadConstantOrVariable(t1, t2)
1069 bilt t2, 0, .opUnsignedSlow
1070 storeq t2, [cfr, t0, 8]
1073 callSlowPath(_slow_path_unsigned)
1080 macro (left, right) andi left, right end,
1088 macro (left, right) xori left, right end,
1096 macro (left, right) ori left, right end,
1101 _llint_op_check_has_instance:
1103 loadisFromInstruction(3, t1)
1104 loadConstantOrVariableCell(t1, t0, .opCheckHasInstanceSlow)
1105 btbz JSCell::m_flags[t0], ImplementsDefaultHasInstance, .opCheckHasInstanceSlow
1108 .opCheckHasInstanceSlow:
1109 callSlowPath(_llint_slow_path_check_has_instance)
1113 _llint_op_instanceof:
1115 # Actually do the work.
1116 loadisFromInstruction(3, t0)
1117 loadConstantOrVariableCell(t0, t1, .opInstanceofSlow)
1118 bbb JSCell::m_type[t1], ObjectType, .opInstanceofSlow
1119 loadisFromInstruction(2, t0)
1120 loadConstantOrVariableCell(t0, t2, .opInstanceofSlow)
1122 # Register state: t1 = prototype, t2 = value
1125 loadStructureAndClobberFirstArg(t2, t3)
1126 loadq Structure::m_prototype[t3], t2
1127 bqeq t2, t1, .opInstanceofDone
1128 btqz t2, tagMask, .opInstanceofLoop
1133 loadisFromInstruction(1, t3)
1134 storeq t0, [cfr, t3, 8]
1138 callSlowPath(_llint_slow_path_instanceof)
1142 _llint_op_is_undefined:
1144 loadisFromInstruction(2, t1)
1145 loadisFromInstruction(1, t2)
1146 loadConstantOrVariable(t1, t0)
1147 btqz t0, tagMask, .opIsUndefinedCell
1148 cqeq t0, ValueUndefined, t3
1150 storeq t3, [cfr, t2, 8]
1153 btbnz JSCell::m_flags[t0], MasqueradesAsUndefined, .masqueradesAsUndefined
1155 storeq t1, [cfr, t2, 8]
1157 .masqueradesAsUndefined:
1158 loadStructureWithScratch(t0, t3, t1)
1159 loadp CodeBlock[cfr], t1
1160 loadp CodeBlock::m_globalObject[t1], t1
1161 cpeq Structure::m_globalObject[t3], t1, t0
1163 storeq t0, [cfr, t2, 8]
1167 _llint_op_is_boolean:
1169 loadisFromInstruction(2, t1)
1170 loadisFromInstruction(1, t2)
1171 loadConstantOrVariable(t1, t0)
1175 storeq t0, [cfr, t2, 8]
1179 _llint_op_is_number:
1181 loadisFromInstruction(2, t1)
1182 loadisFromInstruction(1, t2)
1183 loadConstantOrVariable(t1, t0)
1184 tqnz t0, tagTypeNumber, t1
1186 storeq t1, [cfr, t2, 8]
1190 _llint_op_is_string:
1192 loadisFromInstruction(2, t1)
1193 loadisFromInstruction(1, t2)
1194 loadConstantOrVariable(t1, t0)
1195 btqnz t0, tagMask, .opIsStringNotCell
1196 cbeq JSCell::m_type[t0], StringType, t1
1198 storeq t1, [cfr, t2, 8]
1201 storeq ValueFalse, [cfr, t2, 8]
1205 _llint_op_is_object:
1207 loadisFromInstruction(2, t1)
1208 loadisFromInstruction(1, t2)
1209 loadConstantOrVariable(t1, t0)
1210 btqnz t0, tagMask, .opIsObjectNotCell
1211 cbaeq JSCell::m_type[t0], ObjectType, t1
1213 storeq t1, [cfr, t2, 8]
1216 storeq ValueFalse, [cfr, t2, 8]
1220 macro loadPropertyAtVariableOffset(propertyOffsetAsInt, objectAndStorage, value)
1221 bilt propertyOffsetAsInt, firstOutOfLineOffset, .isInline
1222 loadp JSObject::m_butterfly[objectAndStorage], objectAndStorage
1223 negi propertyOffsetAsInt
1224 sxi2q propertyOffsetAsInt, propertyOffsetAsInt
1227 addp sizeof JSObject - (firstOutOfLineOffset - 2) * 8, objectAndStorage
1229 loadq (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffsetAsInt, 8], value
1233 macro storePropertyAtVariableOffset(propertyOffsetAsInt, objectAndStorage, value)
1234 bilt propertyOffsetAsInt, firstOutOfLineOffset, .isInline
1235 loadp JSObject::m_butterfly[objectAndStorage], objectAndStorage
1236 negi propertyOffsetAsInt
1237 sxi2q propertyOffsetAsInt, propertyOffsetAsInt
1240 addp sizeof JSObject - (firstOutOfLineOffset - 2) * 8, objectAndStorage
1242 storeq value, (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffsetAsInt, 8]
1245 _llint_op_init_global_const:
1247 writeBarrierOnGlobalObject(2)
1248 loadisFromInstruction(2, t1)
1249 loadpFromInstruction(1, t0)
1250 loadConstantOrVariable(t1, t2)
1255 macro getById(getPropertyStorage)
1257 # We only do monomorphic get_by_id caching for now, and we do not modify the
1258 # opcode. We do, however, allow for the cache to change anytime if fails, since
1259 # ping-ponging is free. At best we get lucky and the get_by_id will continue
1260 # to take fast path on the new cache. At worst we take slow path, which is what
1261 # we would have been doing anyway.
1262 loadisFromInstruction(2, t0)
1263 loadConstantOrVariableCell(t0, t3, .opGetByIdSlow)
1264 loadStructureWithScratch(t3, t2, t1)
1265 loadpFromInstruction(4, t1)
1266 bpneq t2, t1, .opGetByIdSlow
1270 macro (propertyStorage, scratch)
1271 loadisFromInstruction(5, t2)
1272 loadisFromInstruction(1, t1)
1273 loadq [propertyStorage, t2], scratch
1274 storeq scratch, [cfr, t1, 8]
1275 valueProfile(scratch, 8, t1)
1280 callSlowPath(_llint_slow_path_get_by_id)
1284 _llint_op_get_by_id:
1285 getById(withInlineStorage)
1288 _llint_op_get_by_id_out_of_line:
1289 getById(withOutOfLineStorage)
1292 _llint_op_get_array_length:
1294 loadisFromInstruction(2, t0)
1295 loadpFromInstruction(4, t1)
1296 loadConstantOrVariableCell(t0, t3, .opGetArrayLengthSlow)
1298 arrayProfile(t2, t1, t0)
1299 btiz t2, IsArray, .opGetArrayLengthSlow
1300 btiz t2, IndexingShapeMask, .opGetArrayLengthSlow
1301 loadisFromInstruction(1, t1)
1302 loadp JSObject::m_butterfly[t3], t0
1303 loadi -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0], t0
1304 bilt t0, 0, .opGetArrayLengthSlow
1305 orq tagTypeNumber, t0
1306 valueProfile(t0, 8, t2)
1307 storeq t0, [cfr, t1, 8]
1310 .opGetArrayLengthSlow:
1311 callSlowPath(_llint_slow_path_get_by_id)
1315 macro putById(getPropertyStorage)
1317 writeBarrierOnOperands(1, 3)
1318 loadisFromInstruction(1, t3)
1319 loadConstantOrVariableCell(t3, t0, .opPutByIdSlow)
1320 loadStructureWithScratch(t0, t2, t1)
1321 loadpFromInstruction(4, t1)
1322 bpneq t2, t1, .opPutByIdSlow
1326 macro (propertyStorage, scratch)
1327 loadisFromInstruction(5, t1)
1328 loadisFromInstruction(3, t2)
1329 loadConstantOrVariable(t2, scratch)
1330 storeq scratch, [propertyStorage, t1]
1335 _llint_op_put_by_id:
1336 putById(withInlineStorage)
1339 callSlowPath(_llint_slow_path_put_by_id)
1343 _llint_op_put_by_id_out_of_line:
1344 putById(withOutOfLineStorage)
1347 macro putByIdTransition(additionalChecks, getPropertyStorage)
1349 writeBarrierOnOperand(1)
1350 loadisFromInstruction(1, t3)
1351 loadpFromInstruction(4, t1)
1352 loadConstantOrVariableCell(t3, t0, .opPutByIdSlow)
1353 loadStructureWithScratch(t0, t2, t3)
1354 bpneq t2, t1, .opPutByIdSlow
1355 additionalChecks(t1, t3, t2)
1356 loadisFromInstruction(3, t2)
1357 loadisFromInstruction(5, t1)
1361 macro (propertyStorage, scratch)
1362 addp t1, propertyStorage, t3
1363 loadConstantOrVariable(t2, t1)
1365 loadpFromInstruction(6, t1)
1366 loadi Structure::m_blob + StructureIDBlob::u.words.word1[t1], t1
1367 storei t1, JSCell::m_structureID[t0]
1372 macro noAdditionalChecks(oldStructure, scratch, scratch2)
1375 macro structureChainChecks(oldStructure, scratch, scratch2)
1376 const protoCell = oldStructure # Reusing the oldStructure register for the proto
1377 loadpFromInstruction(7, scratch)
1378 assert(macro (ok) btpnz scratch, ok end)
1379 loadp StructureChain::m_vector[scratch], scratch
1380 assert(macro (ok) btpnz scratch, ok end)
1381 bqeq Structure::m_prototype[oldStructure], ValueNull, .done
1383 loadq Structure::m_prototype[oldStructure], protoCell
1384 loadStructureAndClobberFirstArg(protoCell, scratch2)
1385 move scratch2, oldStructure
1386 bpneq oldStructure, [scratch], .opPutByIdSlow
1388 bqneq Structure::m_prototype[oldStructure], ValueNull, .loop
1392 _llint_op_put_by_id_transition_direct:
1393 putByIdTransition(noAdditionalChecks, withInlineStorage)
1396 _llint_op_put_by_id_transition_direct_out_of_line:
1397 putByIdTransition(noAdditionalChecks, withOutOfLineStorage)
1400 _llint_op_put_by_id_transition_normal:
1401 putByIdTransition(structureChainChecks, withInlineStorage)
1404 _llint_op_put_by_id_transition_normal_out_of_line:
1405 putByIdTransition(structureChainChecks, withOutOfLineStorage)
1408 _llint_op_get_by_val:
1410 loadisFromInstruction(2, t2)
1411 loadConstantOrVariableCell(t2, t0, .opGetByValSlow)
1412 loadpFromInstruction(4, t3)
1414 arrayProfile(t2, t3, t1)
1415 loadisFromInstruction(3, t3)
1416 loadConstantOrVariableInt32(t3, t1, .opGetByValSlow)
1418 loadp JSObject::m_butterfly[t0], t3
1419 andi IndexingShapeMask, t2
1420 bieq t2, Int32Shape, .opGetByValIsContiguous
1421 bineq t2, ContiguousShape, .opGetByValNotContiguous
1422 .opGetByValIsContiguous:
1424 biaeq t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t3], .opGetByValOutOfBounds
1425 loadisFromInstruction(1, t0)
1426 loadq [t3, t1, 8], t2
1427 btqz t2, .opGetByValOutOfBounds
1430 .opGetByValNotContiguous:
1431 bineq t2, DoubleShape, .opGetByValNotDouble
1432 biaeq t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t3], .opGetByValOutOfBounds
1433 loadis 8[PB, PC, 8], t0
1434 loadd [t3, t1, 8], ft0
1435 bdnequn ft0, ft0, .opGetByValOutOfBounds
1437 subq tagTypeNumber, t2
1440 .opGetByValNotDouble:
1441 subi ArrayStorageShape, t2
1442 bia t2, SlowPutArrayStorageShape - ArrayStorageShape, .opGetByValSlow
1443 biaeq t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.vectorLength[t3], .opGetByValOutOfBounds
1444 loadisFromInstruction(1, t0)
1445 loadq ArrayStorage::m_vector[t3, t1, 8], t2
1446 btqz t2, .opGetByValOutOfBounds
1449 storeq t2, [cfr, t0, 8]
1450 valueProfile(t2, 5, t0)
1453 .opGetByValOutOfBounds:
1454 loadpFromInstruction(4, t0)
1455 storeb 1, ArrayProfile::m_outOfBounds[t0]
1457 callSlowPath(_llint_slow_path_get_by_val)
1461 macro contiguousPutByVal(storeCallback)
1462 biaeq t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0], .outOfBounds
1464 loadisFromInstruction(3, t2)
1465 storeCallback(t2, t1, [t0, t3, 8])
1469 biaeq t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.vectorLength[t0], .opPutByValOutOfBounds
1470 loadp 32[PB, PC, 8], t2
1471 storeb 1, ArrayProfile::m_mayStoreToHole[t2]
1473 storei t2, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0]
1477 macro putByVal(slowPath)
1479 writeBarrierOnOperands(1, 3)
1480 loadisFromInstruction(1, t0)
1481 loadConstantOrVariableCell(t0, t1, .opPutByValSlow)
1482 loadpFromInstruction(4, t3)
1484 arrayProfile(t2, t3, t0)
1485 loadisFromInstruction(2, t0)
1486 loadConstantOrVariableInt32(t0, t3, .opPutByValSlow)
1488 loadp JSObject::m_butterfly[t1], t0
1489 andi IndexingShapeMask, t2
1490 bineq t2, Int32Shape, .opPutByValNotInt32
1492 macro (operand, scratch, address)
1493 loadConstantOrVariable(operand, scratch)
1494 bpb scratch, tagTypeNumber, .opPutByValSlow
1495 storep scratch, address
1498 .opPutByValNotInt32:
1499 bineq t2, DoubleShape, .opPutByValNotDouble
1501 macro (operand, scratch, address)
1502 loadConstantOrVariable(operand, scratch)
1503 bqb scratch, tagTypeNumber, .notInt
1507 addp tagTypeNumber, scratch
1509 bdnequn ft0, ft0, .opPutByValSlow
1514 .opPutByValNotDouble:
1515 bineq t2, ContiguousShape, .opPutByValNotContiguous
1517 macro (operand, scratch, address)
1518 loadConstantOrVariable(operand, scratch)
1519 storep scratch, address
1522 .opPutByValNotContiguous:
1523 bineq t2, ArrayStorageShape, .opPutByValSlow
1524 biaeq t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.vectorLength[t0], .opPutByValOutOfBounds
1525 btqz ArrayStorage::m_vector[t0, t3, 8], .opPutByValArrayStorageEmpty
1526 .opPutByValArrayStorageStoreResult:
1527 loadisFromInstruction(3, t2)
1528 loadConstantOrVariable(t2, t1)
1529 storeq t1, ArrayStorage::m_vector[t0, t3, 8]
1532 .opPutByValArrayStorageEmpty:
1533 loadpFromInstruction(4, t1)
1534 storeb 1, ArrayProfile::m_mayStoreToHole[t1]
1535 addi 1, ArrayStorage::m_numValuesInVector[t0]
1536 bib t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0], .opPutByValArrayStorageStoreResult
1538 storei t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0]
1539 jmp .opPutByValArrayStorageStoreResult
1541 .opPutByValOutOfBounds:
1542 loadpFromInstruction(4, t0)
1543 storeb 1, ArrayProfile::m_outOfBounds[t0]
1545 callSlowPath(slowPath)
1549 _llint_op_put_by_val:
1550 putByVal(_llint_slow_path_put_by_val)
1552 _llint_op_put_by_val_direct:
1553 putByVal(_llint_slow_path_put_by_val_direct)
1558 dispatchIntIndirect(1)
1561 macro jumpTrueOrFalse(conditionOp, slow)
1562 loadisFromInstruction(1, t1)
1563 loadConstantOrVariable(t1, t0)
1566 conditionOp(t0, .target)
1570 dispatchIntIndirect(2)
1578 macro equalNull(cellHandler, immediateHandler)
1579 loadisFromInstruction(1, t0)
1580 assertNotConstant(t0)
1581 loadq [cfr, t0, 8], t0
1582 btqnz t0, tagMask, .immediate
1583 loadStructureWithScratch(t0, t2, t1)
1584 cellHandler(t2, JSCell::m_flags[t0], .target)
1588 dispatchIntIndirect(2)
1591 andq ~TagBitUndefined, t0
1592 immediateHandler(t0, .target)
1599 macro (structure, value, target)
1600 btbz value, MasqueradesAsUndefined, .notMasqueradesAsUndefined
1601 loadp CodeBlock[cfr], t0
1602 loadp CodeBlock::m_globalObject[t0], t0
1603 bpeq Structure::m_globalObject[structure], t0, target
1604 .notMasqueradesAsUndefined:
1606 macro (value, target) bqeq value, ValueNull, target end)
1609 _llint_op_jneq_null:
1612 macro (structure, value, target)
1613 btbz value, MasqueradesAsUndefined, target
1614 loadp CodeBlock[cfr], t0
1615 loadp CodeBlock::m_globalObject[t0], t0
1616 bpneq Structure::m_globalObject[structure], t0, target
1618 macro (value, target) bqneq value, ValueNull, target end)
1623 loadisFromInstruction(1, t0)
1624 loadisFromInstruction(2, t1)
1625 loadp CodeBlock[cfr], t2
1626 loadp CodeBlock::m_globalObject[t2], t2
1627 loadp JSGlobalObject::m_specialPointers[t2, t1, 8], t1
1628 bpneq t1, [cfr, t0, 8], .opJneqPtrTarget
1632 dispatchIntIndirect(3)
1635 macro compare(integerCompare, doubleCompare, slowPath)
1636 loadisFromInstruction(1, t2)
1637 loadisFromInstruction(2, t3)
1638 loadConstantOrVariable(t2, t0)
1639 loadConstantOrVariable(t3, t1)
1640 bqb t0, tagTypeNumber, .op1NotInt
1641 bqb t1, tagTypeNumber, .op2NotInt
1642 integerCompare(t0, t1, .jumpTarget)
1646 btqz t0, tagTypeNumber, .slow
1647 bqb t1, tagTypeNumber, .op1NotIntOp2NotInt
1650 .op1NotIntOp2NotInt:
1651 btqz t1, tagTypeNumber, .slow
1652 addq tagTypeNumber, t1
1655 addq tagTypeNumber, t0
1657 doubleCompare(ft0, ft1, .jumpTarget)
1662 btqz t1, tagTypeNumber, .slow
1663 addq tagTypeNumber, t1
1665 doubleCompare(ft0, ft1, .jumpTarget)
1669 dispatchIntIndirect(3)
1672 callSlowPath(slowPath)
1677 _llint_op_switch_imm:
1679 loadisFromInstruction(3, t2)
1680 loadisFromInstruction(1, t3)
1681 loadConstantOrVariable(t2, t1)
1682 loadp CodeBlock[cfr], t2
1683 loadp CodeBlock::m_rareData[t2], t2
1684 muli sizeof SimpleJumpTable, t3 # FIXME: would be nice to peephole this!
1685 loadp CodeBlock::RareData::m_switchJumpTables + VectorBufferOffset[t2], t2
1687 bqb t1, tagTypeNumber, .opSwitchImmNotInt
1688 subi SimpleJumpTable::min[t2], t1
1689 biaeq t1, SimpleJumpTable::branchOffsets + VectorSizeOffset[t2], .opSwitchImmFallThrough
1690 loadp SimpleJumpTable::branchOffsets + VectorBufferOffset[t2], t3
1691 loadis [t3, t1, 4], t1
1692 btiz t1, .opSwitchImmFallThrough
1696 btqnz t1, tagTypeNumber, .opSwitchImmSlow # Go slow if it's a double.
1697 .opSwitchImmFallThrough:
1698 dispatchIntIndirect(2)
1701 callSlowPath(_llint_slow_path_switch_imm)
1705 _llint_op_switch_char:
1707 loadisFromInstruction(3, t2)
1708 loadisFromInstruction(1, t3)
1709 loadConstantOrVariable(t2, t1)
1710 loadp CodeBlock[cfr], t2
1711 loadp CodeBlock::m_rareData[t2], t2
1712 muli sizeof SimpleJumpTable, t3
1713 loadp CodeBlock::RareData::m_switchJumpTables + VectorBufferOffset[t2], t2
1715 btqnz t1, tagMask, .opSwitchCharFallThrough
1716 bbneq JSCell::m_type[t1], StringType, .opSwitchCharFallThrough
1717 bineq JSString::m_length[t1], 1, .opSwitchCharFallThrough
1718 loadp JSString::m_value[t1], t0
1719 btpz t0, .opSwitchOnRope
1720 loadp StringImpl::m_data8[t0], t1
1721 btinz StringImpl::m_hashAndFlags[t0], HashFlags8BitBuffer, .opSwitchChar8Bit
1723 jmp .opSwitchCharReady
1727 subi SimpleJumpTable::min[t2], t0
1728 biaeq t0, SimpleJumpTable::branchOffsets + VectorSizeOffset[t2], .opSwitchCharFallThrough
1729 loadp SimpleJumpTable::branchOffsets + VectorBufferOffset[t2], t2
1730 loadis [t2, t0, 4], t1
1731 btiz t1, .opSwitchCharFallThrough
1734 .opSwitchCharFallThrough:
1735 dispatchIntIndirect(2)
1738 callSlowPath(_llint_slow_path_switch_char)
1742 macro arrayProfileForCall()
1743 loadisFromInstruction(4, t3)
1745 loadq ThisArgumentOffset[cfr, t3, 8], t0
1746 btqnz t0, tagMask, .done
1747 loadpFromInstruction((CallOpCodeSize - 2), t1)
1748 loadi JSCell::m_structureID[t0], t3
1749 storei t3, ArrayProfile::m_lastSeenStructureID[t1]
1753 macro doCall(slowPath)
1754 loadisFromInstruction(2, t0)
1755 loadpFromInstruction(5, t1)
1756 loadp LLIntCallLinkInfo::callee[t1], t2
1757 loadConstantOrVariable(t0, t3)
1758 bqneq t3, t2, .opCallSlow
1759 loadisFromInstruction(4, t3)
1763 storeq t2, Callee[t3]
1764 loadisFromInstruction(3, t2)
1765 storei PC, ArgumentCount + TagOffset[cfr]
1766 storei t2, ArgumentCount + PayloadOffset[t3]
1767 addp CallerFrameAndPCSize, t3
1768 callTargetFunction(t1, t3)
1771 slowPathForCall(slowPath)
1777 checkSwitchToJITForEpilogue()
1778 loadisFromInstruction(1, t2)
1779 loadConstantOrVariable(t2, t0)
1783 _llint_op_to_primitive:
1785 loadisFromInstruction(2, t2)
1786 loadisFromInstruction(1, t3)
1787 loadConstantOrVariable(t2, t0)
1788 btqnz t0, tagMask, .opToPrimitiveIsImm
1789 bbaeq JSCell::m_type[t0], ObjectType, .opToPrimitiveSlowCase
1790 .opToPrimitiveIsImm:
1791 storeq t0, [cfr, t3, 8]
1794 .opToPrimitiveSlowCase:
1795 callSlowPath(_slow_path_to_primitive)
1800 # Gotta restore the tag registers. We could be throwing from FTL, which may
1802 move TagTypeNumber, tagTypeNumber
1803 move TagMask, tagMask
1805 # This is where we end up from the JIT's throw trampoline (because the
1806 # machine code return address will be set to _llint_op_catch), and from
1807 # the interpreter's throw trampoline (see _llint_throw_trampoline).
1808 # The throwing code must have known that we were throwing to the interpreter,
1809 # and have set VM::targetInterpreterPCForThrow.
1810 loadp Callee[cfr], t3
1811 andp MarkedBlockMask, t3
1812 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
1813 loadp VM::callFrameForThrow[t3], cfr
1814 loadp VM::vmEntryFrameForThrow[t3], t0
1815 storep t0, VM::topVMEntryFrame[t3]
1816 restoreStackPointerAfterCall()
1818 loadp CodeBlock[cfr], PB
1819 loadp CodeBlock::m_instructions[PB], PB
1820 loadp VM::targetInterpreterPCForThrow[t3], PC
1824 loadq VM::m_exception[t3], t0
1825 storeq 0, VM::m_exception[t3]
1826 loadisFromInstruction(1, t2)
1827 storeq t0, [cfr, t2, 8]
1829 loadq Exception::m_value[t0], t3
1830 loadisFromInstruction(2, t2)
1831 storeq t3, [cfr, t2, 8]
1839 checkSwitchToJITForEpilogue()
1840 loadisFromInstruction(1, t0)
1841 assertNotConstant(t0)
1842 loadq [cfr, t0, 8], t0
1846 _llint_throw_from_slow_path_trampoline:
1847 callSlowPath(_llint_slow_path_handle_exception)
1849 # When throwing from the interpreter (i.e. throwing from LLIntSlowPaths), so
1850 # the throw target is not necessarily interpreted code, we come to here.
1851 # This essentially emulates the JIT's throwing protocol.
1852 loadp Callee[cfr], t1
1853 andp MarkedBlockMask, t1
1854 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t1], t1
1855 jmp VM::targetMachinePCForThrow[t1]
1858 _llint_throw_during_call_trampoline:
1859 preserveReturnAddressAfterCall(t2)
1860 jmp _llint_throw_from_slow_path_trampoline
1863 macro nativeCallTrampoline(executableOffsetToFunction)
1866 storep 0, CodeBlock[cfr]
1867 if X86_64 or X86_64_WIN
1869 const arg1 = t4 # t4 = rdi
1870 const arg2 = t5 # t5 = rsi
1873 const arg1 = t2 # t2 = rcx
1874 const arg2 = t1 # t1 = rdx
1877 loadp Callee[cfr], t0
1878 andp MarkedBlockMask, t0, t1
1879 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t1], t1
1880 storep cfr, VM::topCallFrame[t1]
1882 loadp Callee[cfr], arg2
1883 loadp JSFunction::m_executable[arg2], temp
1884 checkStackPointerAlignment(t3, 0xdead0001)
1888 call executableOffsetToFunction[temp]
1892 loadp Callee[cfr], t3
1893 andp MarkedBlockMask, t3
1894 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
1895 elsif ARM64 or C_LOOP
1896 loadp Callee[cfr], t0
1897 andp MarkedBlockMask, t0, t1
1898 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t1], t1
1899 storep cfr, VM::topCallFrame[t1]
1900 preserveReturnAddressAfterCall(t3)
1901 storep t3, ReturnPC[cfr]
1903 loadp Callee[cfr], t1
1904 loadp JSFunction::m_executable[t1], t1
1906 cloopCallNative executableOffsetToFunction[t1]
1908 call executableOffsetToFunction[t1]
1910 restoreReturnAddressBeforeReturn(t3)
1911 loadp Callee[cfr], t3
1912 andp MarkedBlockMask, t3
1913 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
1920 btqnz VM::m_exception[t3], .handleException
1924 storep cfr, VM::topCallFrame[t3]
1925 restoreStackPointerAfterCall()
1926 jmp _llint_throw_from_slow_path_trampoline
1930 macro getGlobalObject(dst)
1931 loadp CodeBlock[cfr], t0
1932 loadp CodeBlock::m_globalObject[t0], t0
1933 loadisFromInstruction(dst, t1)
1934 storeq t0, [cfr, t1, 8]
1937 macro varInjectionCheck(slowPath)
1938 loadp CodeBlock[cfr], t0
1939 loadp CodeBlock::m_globalObject[t0], t0
1940 loadp JSGlobalObject::m_varInjectionWatchpoint[t0], t0
1941 bbeq WatchpointSet::m_state[t0], IsInvalidated, slowPath
1944 macro resolveScope()
1945 loadisFromInstruction(5, t2)
1946 loadisFromInstruction(2, t0)
1947 loadp [cfr, t0, 8], t0
1948 btiz t2, .resolveScopeLoopEnd
1951 loadp JSScope::m_next[t0], t0
1953 btinz t2, .resolveScopeLoop
1955 .resolveScopeLoopEnd:
1956 loadisFromInstruction(1, t1)
1957 storeq t0, [cfr, t1, 8]
1961 _llint_op_resolve_scope:
1963 loadisFromInstruction(4, t0)
1966 bineq t0, GlobalProperty, .rGlobalVar
1971 bineq t0, GlobalVar, .rClosureVar
1976 bineq t0, ClosureVar, .rGlobalPropertyWithVarInjectionChecks
1980 .rGlobalPropertyWithVarInjectionChecks:
1981 bineq t0, GlobalPropertyWithVarInjectionChecks, .rGlobalVarWithVarInjectionChecks
1982 varInjectionCheck(.rDynamic)
1986 .rGlobalVarWithVarInjectionChecks:
1987 bineq t0, GlobalVarWithVarInjectionChecks, .rClosureVarWithVarInjectionChecks
1988 varInjectionCheck(.rDynamic)
1992 .rClosureVarWithVarInjectionChecks:
1993 bineq t0, ClosureVarWithVarInjectionChecks, .rDynamic
1994 varInjectionCheck(.rDynamic)
1999 callSlowPath(_llint_slow_path_resolve_scope)
2003 macro loadWithStructureCheck(operand, slowPath)
2004 loadisFromInstruction(operand, t0)
2005 loadq [cfr, t0, 8], t0
2006 loadStructureWithScratch(t0, t2, t1)
2007 loadpFromInstruction(5, t1)
2008 bpneq t2, t1, slowPath
2012 loadisFromInstruction(6, t1)
2013 loadPropertyAtVariableOffset(t1, t0, t2)
2014 valueProfile(t2, 7, t0)
2015 loadisFromInstruction(1, t0)
2016 storeq t2, [cfr, t0, 8]
2019 macro getGlobalVar()
2020 loadpFromInstruction(6, t0)
2022 valueProfile(t0, 7, t1)
2023 loadisFromInstruction(1, t1)
2024 storeq t0, [cfr, t1, 8]
2027 macro getClosureVar()
2028 loadisFromInstruction(6, t1)
2029 loadq JSEnvironmentRecord_variables[t0, t1, 8], t0
2030 valueProfile(t0, 7, t1)
2031 loadisFromInstruction(1, t1)
2032 storeq t0, [cfr, t1, 8]
2035 _llint_op_get_from_scope:
2037 loadisFromInstruction(4, t0)
2038 andi ResolveModeMask, t0
2041 bineq t0, GlobalProperty, .gGlobalVar
2042 loadWithStructureCheck(2, .gDynamic)
2047 bineq t0, GlobalVar, .gClosureVar
2052 bineq t0, ClosureVar, .gGlobalPropertyWithVarInjectionChecks
2057 .gGlobalPropertyWithVarInjectionChecks:
2058 bineq t0, GlobalPropertyWithVarInjectionChecks, .gGlobalVarWithVarInjectionChecks
2059 loadWithStructureCheck(2, .gDynamic)
2063 .gGlobalVarWithVarInjectionChecks:
2064 bineq t0, GlobalVarWithVarInjectionChecks, .gClosureVarWithVarInjectionChecks
2065 varInjectionCheck(.gDynamic)
2069 .gClosureVarWithVarInjectionChecks:
2070 bineq t0, ClosureVarWithVarInjectionChecks, .gDynamic
2071 varInjectionCheck(.gDynamic)
2077 callSlowPath(_llint_slow_path_get_from_scope)
2082 loadisFromInstruction(3, t1)
2083 loadConstantOrVariable(t1, t2)
2084 loadisFromInstruction(6, t1)
2085 storePropertyAtVariableOffset(t1, t0, t2)
2088 macro putGlobalVar()
2089 loadisFromInstruction(3, t0)
2090 loadConstantOrVariable(t0, t1)
2091 loadpFromInstruction(5, t2)
2092 loadpFromInstruction(6, t0)
2093 notifyWrite(t2, .pDynamic)
2097 macro putClosureVar()
2098 loadisFromInstruction(3, t1)
2099 loadConstantOrVariable(t1, t2)
2100 loadisFromInstruction(6, t1)
2101 storeq t2, JSEnvironmentRecord_variables[t0, t1, 8]
2104 macro putLocalClosureVar()
2105 loadisFromInstruction(3, t1)
2106 loadConstantOrVariable(t1, t2)
2107 loadpFromInstruction(5, t3)
2108 btpz t3, .noVariableWatchpointSet
2109 notifyWrite(t3, .pDynamic)
2110 .noVariableWatchpointSet:
2111 loadisFromInstruction(6, t1)
2112 storeq t2, JSEnvironmentRecord_variables[t0, t1, 8]
2116 _llint_op_put_to_scope:
2118 loadisFromInstruction(4, t0)
2119 andi ResolveModeMask, t0
2122 bineq t0, LocalClosureVar, .pGlobalProperty
2123 writeBarrierOnOperands(1, 3)
2125 putLocalClosureVar()
2129 bineq t0, GlobalProperty, .pGlobalVar
2130 writeBarrierOnOperands(1, 3)
2131 loadWithStructureCheck(1, .pDynamic)
2136 bineq t0, GlobalVar, .pClosureVar
2137 writeBarrierOnGlobalObject(3)
2142 bineq t0, ClosureVar, .pGlobalPropertyWithVarInjectionChecks
2143 writeBarrierOnOperands(1, 3)
2148 .pGlobalPropertyWithVarInjectionChecks:
2149 bineq t0, GlobalPropertyWithVarInjectionChecks, .pGlobalVarWithVarInjectionChecks
2150 writeBarrierOnOperands(1, 3)
2151 loadWithStructureCheck(1, .pDynamic)
2155 .pGlobalVarWithVarInjectionChecks:
2156 bineq t0, GlobalVarWithVarInjectionChecks, .pClosureVarWithVarInjectionChecks
2157 writeBarrierOnGlobalObject(3)
2158 varInjectionCheck(.pDynamic)
2162 .pClosureVarWithVarInjectionChecks:
2163 bineq t0, ClosureVarWithVarInjectionChecks, .pDynamic
2164 writeBarrierOnOperands(1, 3)
2165 varInjectionCheck(.pDynamic)
2171 callSlowPath(_llint_slow_path_put_to_scope)
2175 _llint_op_get_from_arguments:
2178 loadi 24[PB, PC, 8], t1
2179 loadq DirectArguments_storage[t0, t1, 8], t0
2180 valueProfile(t0, 4, t1)
2181 loadisFromInstruction(1, t1)
2182 storeq t0, [cfr, t1, 8]
2186 _llint_op_put_to_arguments:
2188 writeBarrierOnOperands(1, 3)
2190 loadi 16[PB, PC, 8], t1
2191 loadisFromInstruction(3, t3)
2192 loadConstantOrVariable(t3, t2)
2193 storeq t2, DirectArguments_storage[t0, t1, 8]
2197 _llint_op_profile_type:
2199 loadp CodeBlock[cfr], t1
2200 loadp CodeBlock::m_vm[t1], t1
2201 # t1 is holding the pointer to the typeProfilerLog.
2202 loadp VM::m_typeProfilerLog[t1], t1
2203 # t2 is holding the pointer to the current log entry.
2204 loadp TypeProfilerLog::m_currentLogEntryPtr[t1], t2
2206 # t0 is holding the JSValue argument.
2207 loadisFromInstruction(1, t3)
2208 loadConstantOrVariable(t3, t0)
2210 # Store the JSValue onto the log entry.
2211 storeq t0, TypeProfilerLog::LogEntry::value[t2]
2213 # Store the TypeLocation onto the log entry.
2214 loadpFromInstruction(2, t3)
2215 storep t3, TypeProfilerLog::LogEntry::location[t2]
2217 btqz t0, tagMask, .opProfileTypeIsCell
2218 storei 0, TypeProfilerLog::LogEntry::structureID[t2]
2219 jmp .opProfileTypeSkipIsCell
2220 .opProfileTypeIsCell:
2221 loadi JSCell::m_structureID[t0], t3
2222 storei t3, TypeProfilerLog::LogEntry::structureID[t2]
2223 .opProfileTypeSkipIsCell:
2225 # Increment the current log entry.
2226 addp sizeof TypeProfilerLog::LogEntry, t2
2227 storep t2, TypeProfilerLog::m_currentLogEntryPtr[t1]
2229 loadp TypeProfilerLog::m_logEndPtr[t1], t1
2230 bpneq t2, t1, .opProfileTypeDone
2231 callSlowPath(_slow_path_profile_type_clear_log)