1 # Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3 # Redistribution and use in source and binary forms, with or without
4 # modification, are permitted provided that the following conditions
6 # 1. Redistributions of source code must retain the above copyright
7 # notice, this list of conditions and the following disclaimer.
8 # 2. Redistributions in binary form must reproduce the above copyright
9 # notice, this list of conditions and the following disclaimer in the
10 # documentation and/or other materials provided with the distribution.
12 # THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
13 # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
14 # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
15 # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
16 # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
17 # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
18 # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
19 # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
20 # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
21 # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
22 # THE POSSIBILITY OF SUCH DAMAGE.
25 # Crash course on the language that this is written in (which I just call
26 # "assembly" even though it's more than that):
28 # - Mostly gas-style operand ordering. The last operand tends to be the
29 # destination. So "a := b" is written as "mov b, a". But unlike gas,
30 # comparisons are in-order, so "if (a < b)" is written as
33 # - "b" = byte, "h" = 16-bit word, "i" = 32-bit word, "p" = pointer.
34 # Currently this is just 32-bit so "i" and "p" are interchangeable
35 # except when an op supports one but not the other.
37 # - In general, valid operands for macro invocations and instructions are
38 # registers (eg "t0"), addresses (eg "4[t0]"), base-index addresses
39 # (eg "7[t0, t1, 2]"), absolute addresses (eg "0xa0000000[]"), or labels
40 # (eg "_foo" or ".foo"). Macro invocations can also take anonymous
41 # macros as operands. Instructions cannot take anonymous macros.
43 # - Labels must have names that begin with either "_" or ".". A "." label
44 # is local and gets renamed before code gen to minimize namespace
45 # pollution. A "_" label is an extern symbol (i.e. ".globl"). The "_"
46 # may or may not be removed during code gen depending on whether the asm
47 # conventions for C name mangling on the target platform mandate a "_"
50 # - A "macro" is a lambda expression, which may be either anonymous or
51 # named. But this has caveats. "macro" can take zero or more arguments,
52 # which may be macros or any valid operands, but it can only return
53 # code. But you can do Turing-complete things via continuation passing
54 # style: "macro foo (a, b) b(a) end foo(foo, foo)". Actually, don't do
55 # that, since you'll just crash the assembler.
57 # - An "if" is a conditional on settings. Any identifier supplied in the
58 # predicate of an "if" is assumed to be a #define that is available
59 # during code gen. So you can't use "if" for computation in a macro, but
60 # you can use it to select different pieces of code for different
63 # - Arguments to macros follow lexical scoping rather than dynamic scoping.
64 # Const's also follow lexical scoping and may override (hide) arguments
65 # or other consts. All variables (arguments and constants) can be bound
66 # to operands. Additionally, arguments (but not constants) can be bound
70 # Below we have a bunch of constant declarations. Each constant must have
71 # a corresponding ASSERT() in LLIntData.cpp.
74 macro dispatch(advance)
79 macro dispatchBranchWithOffset(pcOffset)
85 macro dispatchBranch(pcOffset)
87 dispatchBranchWithOffset(t0)
90 macro dispatchAfterCall()
91 loadi ArgumentCount + TagOffset[cfr], PC
93 storei t1, TagOffset[cfr, t2, 8]
94 storei t0, PayloadOffset[cfr, t2, 8]
95 valueProfile(t1, t0, 4 * (CallOpCodeSize - 1), t3)
96 dispatch(CallOpCodeSize)
99 macro cCall2(function, arg1, arg2)
100 if ARM or ARMv7 or ARMv7_TRADITIONAL or MIPS
114 cloopCallSlowPath function, arg1, arg2
120 macro cCall2Void(function, arg1, arg2)
122 cloopCallSlowPathVoid function, arg1, arg2
124 cCall2(function, arg1, arg2)
128 # This barely works. arg3 and arg4 should probably be immediates.
129 macro cCall4(function, arg1, arg2, arg3, arg4)
130 if ARM or ARMv7 or ARMv7_TRADITIONAL or MIPS
144 setargs arg1, arg2, arg3, arg4
153 macro callSlowPath(slowPath)
154 cCall2(slowPath, cfr, PC)
158 macro doVMEntry(makeCall)
162 const protoCallFrame = t5
167 const temp4 = t3 # same as vm
168 elsif ARM or ARMv7 or ARMv7_TRADITIONAL or C_LOOP
171 const protoCallFrame = a2
176 const temp4 = t4 # Same as temp2
180 const protoCallFrame = a2
189 const protoCallFrame = a2
206 vmEntryRecord(cfr, temp1)
209 vmEntryRecord(cfr, sp)
212 storep vm, VMEntryRecord::m_vm[sp]
213 loadp VM::topCallFrame[vm], temp2
214 storep temp2, VMEntryRecord::m_prevTopCallFrame[sp]
215 loadp VM::topVMEntryFrame[vm], temp2
216 storep temp2, VMEntryRecord::m_prevTopVMEntryFrame[sp]
218 # Align stack pointer
220 addp CallFrameAlignSlots * SlotSize, sp, temp1
221 andp ~StackAlignmentMask, temp1
222 subp temp1, CallFrameAlignSlots * SlotSize, sp
223 elsif ARM or ARMv7 or ARMv7_TRADITIONAL
224 addp CallFrameAlignSlots * SlotSize, sp, temp1
225 clrbp temp1, StackAlignmentMask, temp1
227 subp temp1, CallFrameAlignSlots * SlotSize, temp1
230 subp temp1, CallFrameAlignSlots * SlotSize, sp
235 loadp 16[cfr], protoCallFrame
238 loadi ProtoCallFrame::paddedArgCount[protoCallFrame], temp2
239 addp CallFrameHeaderSlots, temp2, temp2
241 subp sp, temp2, temp1
243 # Ensure that we have enough additional stack capacity for the incoming args,
244 # and the frame for the JS code we're executing. We need to do this check
245 # before we start copying the args from the protoCallFrame below.
246 bpaeq temp1, VM::m_jsStackLimit[vm], .stackHeightOK
251 cloopCallSlowPath _llint_stack_check_at_vm_entry, vm, temp1
252 bpeq t0, 0, .stackCheckFailed
262 subp 8, sp # Align stack for cCall2() to make a call.
263 cCall2(_llint_throw_stack_overflow_error, vm, protoCallFrame)
266 vmEntryRecord(cfr, temp1)
269 vmEntryRecord(cfr, sp)
272 loadp VMEntryRecord::m_vm[sp], temp3
273 loadp VMEntryRecord::m_prevTopCallFrame[sp], temp4
274 storep temp4, VM::topCallFrame[temp3]
275 loadp VMEntryRecord::m_prevTopVMEntryFrame[sp], temp4
276 storep temp4, VM::topVMEntryFrame[temp3]
279 subp cfr, CalleeRegisterSaveSize, temp3
282 subp cfr, CalleeRegisterSaveSize, sp
295 loadi TagOffset[protoCallFrame, temp1, 8], temp3
296 storei temp3, TagOffset + CodeBlock[sp, temp1, 8]
297 loadi PayloadOffset[protoCallFrame, temp1, 8], temp3
298 storei temp3, PayloadOffset + CodeBlock[sp, temp1, 8]
299 btinz temp1, .copyHeaderLoop
301 loadi PayloadOffset + ProtoCallFrame::argCountAndCodeOriginValue[protoCallFrame], temp2
303 loadi ProtoCallFrame::paddedArgCount[protoCallFrame], temp3
306 bieq temp2, temp3, .copyArgs
309 storei UndefinedTag, ThisArgumentOffset + 8 + TagOffset[sp, temp3, 8]
310 storei 0, ThisArgumentOffset + 8 + PayloadOffset[sp, temp3, 8]
311 bineq temp2, temp3, .fillExtraArgsLoop
314 loadp ProtoCallFrame::args[protoCallFrame], temp1
317 btiz temp2, .copyArgsDone
319 loadi TagOffset[temp1, temp2, 8], temp3
320 storei temp3, ThisArgumentOffset + 8 + TagOffset[sp, temp2, 8]
321 loadi PayloadOffset[temp1, temp2, 8], temp3
322 storei temp3, ThisArgumentOffset + 8 + PayloadOffset[sp, temp2, 8]
326 storep sp, VM::topCallFrame[vm]
327 storep cfr, VM::topVMEntryFrame[vm]
329 makeCall(entry, temp1, temp2)
332 vmEntryRecord(cfr, temp1)
335 vmEntryRecord(cfr, sp)
338 loadp VMEntryRecord::m_vm[sp], temp3
339 loadp VMEntryRecord::m_prevTopCallFrame[sp], temp4
340 storep temp4, VM::topCallFrame[temp3]
341 loadp VMEntryRecord::m_prevTopVMEntryFrame[sp], temp4
342 storep temp4, VM::topVMEntryFrame[temp3]
345 subp cfr, CalleeRegisterSaveSize, temp3
348 subp cfr, CalleeRegisterSaveSize, sp
356 macro makeJavaScriptCall(entry, temp, unused)
357 addp CallerFrameAndPCSize, sp
358 checkStackPointerAlignment(t2, 0xbad0dc02)
360 cloopCallJSFunction entry
364 checkStackPointerAlignment(t2, 0xbad0dc03)
365 subp CallerFrameAndPCSize, sp
368 macro makeHostFunctionCall(entry, temp1, temp2)
373 storep lr, PtrSize[sp]
374 cloopCallNative temp1
376 # Put callee frame pointer on stack as arg0, also put it in ecx for "fastcall" targets
378 move temp2, 4[sp] # put 0 in ReturnPC
379 move sp, t2 # t2 is ecx
380 push temp2 # Push dummy arg1
390 _handleUncaughtException:
391 loadp Callee + PayloadOffset[cfr], t3
392 andp MarkedBlockMask, t3
393 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
394 loadp VM::callFrameForThrow[t3], cfr
396 loadp CallerFrame[cfr], cfr
399 vmEntryRecord(cfr, t3)
402 vmEntryRecord(cfr, sp)
405 loadp VMEntryRecord::m_vm[sp], t3
406 loadp VMEntryRecord::m_prevTopCallFrame[sp], t5
407 storep t5, VM::topCallFrame[t3]
408 loadp VMEntryRecord::m_prevTopVMEntryFrame[sp], t5
409 storep t5, VM::topVMEntryFrame[t3]
412 subp cfr, CalleeRegisterSaveSize, t3
415 subp cfr, CalleeRegisterSaveSize, sp
422 macro doReturnFromHostFunction(extraStackSpace)
423 functionEpilogue(extraStackSpace)
427 # Debugging operation if you'd like to print an operand in the instruction stream. fromWhere
428 # should be an immediate integer - any integer you like; use it to identify the place you're
429 # debugging from. operand should likewise be an immediate, and should identify the operand
430 # in the instruction stream you'd like to print out.
431 macro traceOperand(fromWhere, operand)
432 cCall4(_llint_trace_operand, cfr, PC, fromWhere, operand)
437 # Debugging operation if you'd like to print the value of an operand in the instruction
438 # stream. Same as traceOperand(), but assumes that the operand is a register, and prints its
440 macro traceValue(fromWhere, operand)
441 cCall4(_llint_trace_value, cfr, PC, fromWhere, operand)
446 # Call a slowPath for call opcodes.
447 macro callCallSlowPath(slowPath, action)
448 storep PC, ArgumentCount + TagOffset[cfr]
449 cCall2(slowPath, cfr, PC)
453 macro callWatchdogTimerHandler(throwHandler)
454 storei PC, ArgumentCount + TagOffset[cfr]
455 cCall2(_llint_slow_path_handle_watchdog_timer, cfr, PC)
456 btpnz t0, throwHandler
457 loadi ArgumentCount + TagOffset[cfr], PC
460 macro checkSwitchToJITForLoop()
464 storei PC, ArgumentCount + TagOffset[cfr]
465 cCall2(_llint_loop_osr, cfr, PC)
470 loadi ArgumentCount + TagOffset[cfr], PC
474 macro loadVariable(operand, index, tag, payload)
475 loadisFromInstruction(operand, index)
476 loadi TagOffset[cfr, index, 8], tag
477 loadi PayloadOffset[cfr, index, 8], payload
480 # Index, tag, and payload must be different registers. Index is not
482 macro loadConstantOrVariable(index, tag, payload)
483 bigteq index, FirstConstantRegisterIndex, .constant
484 loadi TagOffset[cfr, index, 8], tag
485 loadi PayloadOffset[cfr, index, 8], payload
488 loadp CodeBlock[cfr], payload
489 loadp CodeBlock::m_constantRegisters + VectorBufferOffset[payload], payload
490 # There is a bit of evil here: if the index contains a value >= FirstConstantRegisterIndex,
491 # then value << 3 will be equal to (value - FirstConstantRegisterIndex) << 3.
492 loadp TagOffset[payload, index, 8], tag
493 loadp PayloadOffset[payload, index, 8], payload
497 macro loadConstantOrVariableTag(index, tag)
498 bigteq index, FirstConstantRegisterIndex, .constant
499 loadi TagOffset[cfr, index, 8], tag
502 loadp CodeBlock[cfr], tag
503 loadp CodeBlock::m_constantRegisters + VectorBufferOffset[tag], tag
504 # There is a bit of evil here: if the index contains a value >= FirstConstantRegisterIndex,
505 # then value << 3 will be equal to (value - FirstConstantRegisterIndex) << 3.
506 loadp TagOffset[tag, index, 8], tag
510 # Index and payload may be the same register. Index may be clobbered.
511 macro loadConstantOrVariable2Reg(index, tag, payload)
512 bigteq index, FirstConstantRegisterIndex, .constant
513 loadi TagOffset[cfr, index, 8], tag
514 loadi PayloadOffset[cfr, index, 8], payload
517 loadp CodeBlock[cfr], tag
518 loadp CodeBlock::m_constantRegisters + VectorBufferOffset[tag], tag
519 # There is a bit of evil here: if the index contains a value >= FirstConstantRegisterIndex,
520 # then value << 3 will be equal to (value - FirstConstantRegisterIndex) << 3.
523 loadp PayloadOffset[tag], payload
524 loadp TagOffset[tag], tag
528 macro loadConstantOrVariablePayloadTagCustom(index, tagCheck, payload)
529 bigteq index, FirstConstantRegisterIndex, .constant
530 tagCheck(TagOffset[cfr, index, 8])
531 loadi PayloadOffset[cfr, index, 8], payload
534 loadp CodeBlock[cfr], payload
535 loadp CodeBlock::m_constantRegisters + VectorBufferOffset[payload], payload
536 # There is a bit of evil here: if the index contains a value >= FirstConstantRegisterIndex,
537 # then value << 3 will be equal to (value - FirstConstantRegisterIndex) << 3.
538 tagCheck(TagOffset[payload, index, 8])
539 loadp PayloadOffset[payload, index, 8], payload
543 # Index and payload must be different registers. Index is not mutated. Use
544 # this if you know what the tag of the variable should be. Doing the tag
545 # test as part of loading the variable reduces register use, but may not
546 # be faster than doing loadConstantOrVariable followed by a branch on the
548 macro loadConstantOrVariablePayload(index, expectedTag, payload, slow)
549 loadConstantOrVariablePayloadTagCustom(
551 macro (actualTag) bineq actualTag, expectedTag, slow end,
555 macro loadConstantOrVariablePayloadUnchecked(index, payload)
556 loadConstantOrVariablePayloadTagCustom(
558 macro (actualTag) end,
562 macro storeStructureWithTypeInfo(cell, structure, scratch)
563 storep structure, JSCell::m_structureID[cell]
565 loadi Structure::m_blob + StructureIDBlob::u.words.word2[structure], scratch
566 storei scratch, JSCell::m_indexingType[cell]
569 macro writeBarrierOnOperand(cellOperand)
571 loadisFromInstruction(cellOperand, t1)
572 loadConstantOrVariablePayload(t1, CellTag, t2, .writeBarrierDone)
573 skipIfIsRememberedOrInEden(t2, t1, t3,
575 btbnz gcData, .writeBarrierDone
577 # We make two extra slots because cCall2 will poke.
579 cCall2Void(_llint_write_barrier_slow, cfr, t2)
588 macro writeBarrierOnOperands(cellOperand, valueOperand)
590 loadisFromInstruction(valueOperand, t1)
591 loadConstantOrVariableTag(t1, t0)
592 bineq t0, CellTag, .writeBarrierDone
594 writeBarrierOnOperand(cellOperand)
599 macro writeBarrierOnGlobalObject(valueOperand)
601 loadisFromInstruction(valueOperand, t1)
602 loadConstantOrVariableTag(t1, t0)
603 bineq t0, CellTag, .writeBarrierDone
605 loadp CodeBlock[cfr], t3
606 loadp CodeBlock::m_globalObject[t3], t3
607 skipIfIsRememberedOrInEden(t3, t1, t2,
609 btbnz gcData, .writeBarrierDone
611 # We make two extra slots because cCall2 will poke.
613 cCall2Void(_llint_write_barrier_slow, cfr, t3)
622 macro valueProfile(tag, payload, operand, scratch)
623 loadp operand[PC], scratch
624 storei tag, ValueProfile::m_buckets + TagOffset[scratch]
625 storei payload, ValueProfile::m_buckets + PayloadOffset[scratch]
629 # Entrypoints into the interpreter
631 # Expects that CodeBlock is in t1, which is what prologue() leaves behind.
632 macro functionArityCheck(doneLabel, slowPath)
633 loadi PayloadOffset + ArgumentCount[cfr], t0
634 biaeq t0, CodeBlock::m_numParameters[t1], doneLabel
635 cCall2(slowPath, cfr, PC) # This slowPath has a simple protocol: t0 = 0 => no error, t0 != 0 => error
637 move t1, cfr # t1 contains caller frame
638 jmp _llint_throw_from_slow_path_trampoline
641 # t1 points to ArityCheckData.
642 loadp CommonSlowPaths::ArityCheckData::thunkToCall[t1], t2
643 btpz t2, .proceedInline
645 loadp CommonSlowPaths::ArityCheckData::returnPC[t1], t5
646 loadp CommonSlowPaths::ArityCheckData::paddedStackSpace[t1], t0
649 loadp ReturnPC[cfr], t0
655 loadi CommonSlowPaths::ArityCheckData::paddedStackSpace[t1], t1
658 // Move frame up "t1 * 2" slots
662 loadi PayloadOffset + ArgumentCount[cfr], t2
663 addi CallFrameHeaderSlots, t2
665 loadi PayloadOffset[t3], t0
666 storei t0, PayloadOffset[t3, t1, 8]
667 loadi TagOffset[t3], t0
668 storei t0, TagOffset[t3, t1, 8]
670 bsubinz 1, t2, .copyLoop
672 // Fill new slots with JSUndefined
676 storei t0, PayloadOffset[t3, t1, 8]
677 move UndefinedTag, t0
678 storei t0, TagOffset[t3, t1, 8]
680 baddinz 1, t2, .fillLoop
686 # Reload CodeBlock and PC, since the slow_path clobbered it.
687 loadp CodeBlock[cfr], t1
688 loadp CodeBlock::m_instructions[t1], PC
692 macro branchIfException(label)
693 loadp Callee + PayloadOffset[cfr], t3
694 andp MarkedBlockMask, t3
695 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
696 btiz VM::m_exception[t3], .noException
702 # Instruction implementations
706 checkStackPointerAlignment(t2, 0xdead00e1)
707 loadp CodeBlock[cfr], t2 // t2<CodeBlock> = cfr.CodeBlock
708 loadi CodeBlock::m_numVars[t2], t2 // t2<size_t> = t2<CodeBlock>.m_numVars
709 btiz t2, .opEnterDone
710 move UndefinedTag, t0
714 storei t0, TagOffset[cfr, t2, 8]
715 storei t1, PayloadOffset[cfr, t2, 8]
717 btinz t2, .opEnterLoop
719 callSlowPath(_slow_path_enter)
723 _llint_op_create_lexical_environment:
725 callSlowPath(_llint_slow_path_create_lexical_environment)
731 loadi Callee + PayloadOffset[cfr], t0
732 loadi JSCallee::m_scope[t0], t0
733 loadisFromInstruction(1, t1)
734 storei CellTag, TagOffset[cfr, t1, 8]
735 storei t0, PayloadOffset[cfr, t1, 8]
739 _llint_op_create_this:
742 loadp PayloadOffset[cfr, t0, 8], t0
743 loadp JSFunction::m_rareData[t0], t4
744 btpz t4, .opCreateThisSlow
745 loadp FunctionRareData::m_allocationProfile + ObjectAllocationProfile::m_allocator[t4], t1
746 loadp FunctionRareData::m_allocationProfile + ObjectAllocationProfile::m_structure[t4], t2
747 btpz t1, .opCreateThisSlow
748 loadpFromInstruction(4, t4)
749 bpeq t4, 1, .hasSeenMultipleCallee
750 bpneq t4, t0, .opCreateThisSlow
751 .hasSeenMultipleCallee:
752 allocateJSObject(t1, t2, t0, t3, .opCreateThisSlow)
754 storei CellTag, TagOffset[cfr, t1, 8]
755 storei t0, PayloadOffset[cfr, t1, 8]
759 callSlowPath(_slow_path_create_this)
766 bineq TagOffset[cfr, t0, 8], CellTag, .opToThisSlow
767 loadi PayloadOffset[cfr, t0, 8], t0
768 bbneq JSCell::m_type[t0], FinalObjectType, .opToThisSlow
769 loadpFromInstruction(2, t2)
770 bpneq JSCell::m_structureID[t0], t2, .opToThisSlow
774 callSlowPath(_slow_path_to_this)
778 _llint_op_new_object:
780 loadpFromInstruction(3, t0)
781 loadp ObjectAllocationProfile::m_allocator[t0], t1
782 loadp ObjectAllocationProfile::m_structure[t0], t2
783 allocateJSObject(t1, t2, t0, t3, .opNewObjectSlow)
785 storei CellTag, TagOffset[cfr, t1, 8]
786 storei t0, PayloadOffset[cfr, t1, 8]
790 callSlowPath(_llint_slow_path_new_object)
796 loadpFromInstruction(1, t0)
797 bineq TagOffset[cfr, t0, 8], EmptyValueTag, .opNotTDZ
798 callSlowPath(_slow_path_throw_tdz_error)
808 loadConstantOrVariable(t1, t2, t3)
809 storei t2, TagOffset[cfr, t0, 8]
810 storei t3, PayloadOffset[cfr, t0, 8]
818 loadConstantOrVariable(t0, t2, t3)
819 bineq t2, BooleanTag, .opNotSlow
821 storei t2, TagOffset[cfr, t1, 8]
822 storei t3, PayloadOffset[cfr, t1, 8]
826 callSlowPath(_slow_path_not)
834 loadConstantOrVariable(t2, t3, t1)
835 loadConstantOrVariable2Reg(t0, t2, t0)
836 bineq t2, t3, .opEqSlow
837 bieq t2, CellTag, .opEqSlow
838 bib t2, LowestTag, .opEqSlow
841 storei BooleanTag, TagOffset[cfr, t2, 8]
842 storei t0, PayloadOffset[cfr, t2, 8]
846 callSlowPath(_slow_path_eq)
854 assertNotConstant(t0)
855 loadi TagOffset[cfr, t0, 8], t1
856 loadi PayloadOffset[cfr, t0, 8], t0
857 bineq t1, CellTag, .opEqNullImmediate
858 btbnz JSCell::m_flags[t0], MasqueradesAsUndefined, .opEqNullMasqueradesAsUndefined
860 jmp .opEqNullNotImmediate
861 .opEqNullMasqueradesAsUndefined:
862 loadp JSCell::m_structureID[t0], t1
863 loadp CodeBlock[cfr], t0
864 loadp CodeBlock::m_globalObject[t0], t0
865 cpeq Structure::m_globalObject[t1], t0, t1
866 jmp .opEqNullNotImmediate
869 cieq t1, UndefinedTag, t1
871 .opEqNullNotImmediate:
872 storei BooleanTag, TagOffset[cfr, t3, 8]
873 storei t1, PayloadOffset[cfr, t3, 8]
881 loadConstantOrVariable(t2, t3, t1)
882 loadConstantOrVariable2Reg(t0, t2, t0)
883 bineq t2, t3, .opNeqSlow
884 bieq t2, CellTag, .opNeqSlow
885 bib t2, LowestTag, .opNeqSlow
888 storei BooleanTag, TagOffset[cfr, t2, 8]
889 storei t0, PayloadOffset[cfr, t2, 8]
893 callSlowPath(_slow_path_neq)
901 assertNotConstant(t0)
902 loadi TagOffset[cfr, t0, 8], t1
903 loadi PayloadOffset[cfr, t0, 8], t0
904 bineq t1, CellTag, .opNeqNullImmediate
905 btbnz JSCell::m_flags[t0], MasqueradesAsUndefined, .opNeqNullMasqueradesAsUndefined
907 jmp .opNeqNullNotImmediate
908 .opNeqNullMasqueradesAsUndefined:
909 loadp JSCell::m_structureID[t0], t1
910 loadp CodeBlock[cfr], t0
911 loadp CodeBlock::m_globalObject[t0], t0
912 cpneq Structure::m_globalObject[t1], t0, t1
913 jmp .opNeqNullNotImmediate
915 cineq t1, NullTag, t2
916 cineq t1, UndefinedTag, t1
918 .opNeqNullNotImmediate:
919 storei BooleanTag, TagOffset[cfr, t3, 8]
920 storei t1, PayloadOffset[cfr, t3, 8]
924 macro strictEq(equalityOperation, slowPath)
927 loadConstantOrVariable(t2, t3, t1)
928 loadConstantOrVariable2Reg(t0, t2, t0)
930 bib t2, LowestTag, .slow
931 bineq t2, CellTag, .notStringOrSymbol
932 bbaeq JSCell::m_type[t0], ObjectType, .notStringOrSymbol
933 bbb JSCell::m_type[t1], ObjectType, .slow
936 equalityOperation(t0, t1, t0)
937 storei BooleanTag, TagOffset[cfr, t2, 8]
938 storei t0, PayloadOffset[cfr, t2, 8]
942 callSlowPath(slowPath)
948 strictEq(macro (left, right, result) cieq left, right, result end, _slow_path_stricteq)
953 strictEq(macro (left, right, result) cineq left, right, result end, _slow_path_nstricteq)
959 bineq TagOffset[cfr, t0, 8], Int32Tag, .opIncSlow
960 loadi PayloadOffset[cfr, t0, 8], t1
961 baddio 1, t1, .opIncSlow
962 storei t1, PayloadOffset[cfr, t0, 8]
966 callSlowPath(_slow_path_inc)
973 bineq TagOffset[cfr, t0, 8], Int32Tag, .opDecSlow
974 loadi PayloadOffset[cfr, t0, 8], t1
975 bsubio 1, t1, .opDecSlow
976 storei t1, PayloadOffset[cfr, t0, 8]
980 callSlowPath(_slow_path_dec)
988 loadConstantOrVariable(t0, t2, t3)
989 bieq t2, Int32Tag, .opToNumberIsInt
990 biaeq t2, LowestTag, .opToNumberSlow
992 storei t2, TagOffset[cfr, t1, 8]
993 storei t3, PayloadOffset[cfr, t1, 8]
997 callSlowPath(_slow_path_to_number)
1001 _llint_op_to_string:
1005 loadConstantOrVariable(t0, t2, t3)
1006 bineq t2, CellTag, .opToStringSlow
1007 bbneq JSCell::m_type[t3], StringType, .opToStringSlow
1008 .opToStringIsString:
1009 storei t2, TagOffset[cfr, t1, 8]
1010 storei t3, PayloadOffset[cfr, t1, 8]
1014 callSlowPath(_slow_path_to_string)
1022 loadConstantOrVariable(t0, t1, t2)
1023 bineq t1, Int32Tag, .opNegateSrcNotInt
1024 btiz t2, 0x7fffffff, .opNegateSlow
1026 storei Int32Tag, TagOffset[cfr, t3, 8]
1027 storei t2, PayloadOffset[cfr, t3, 8]
1030 bia t1, LowestTag, .opNegateSlow
1032 storei t1, TagOffset[cfr, t3, 8]
1033 storei t2, PayloadOffset[cfr, t3, 8]
1037 callSlowPath(_slow_path_negate)
1041 macro binaryOpCustomStore(integerOperationAndStore, doubleOperation, slowPath)
1044 loadConstantOrVariable(t2, t3, t1)
1045 loadConstantOrVariable2Reg(t0, t2, t0)
1046 bineq t2, Int32Tag, .op1NotInt
1047 bineq t3, Int32Tag, .op2NotInt
1049 integerOperationAndStore(t3, t1, t0, .slow, t2)
1053 # First operand is definitely not an int, the second operand could be anything.
1054 bia t2, LowestTag, .slow
1055 bib t3, LowestTag, .op1NotIntOp2Double
1056 bineq t3, Int32Tag, .slow
1059 .op1NotIntOp2Double:
1064 doubleOperation(ft1, ft0)
1065 stored ft0, [cfr, t1, 8]
1069 # First operand is definitely an int, the second operand is definitely not.
1071 bia t3, LowestTag, .slow
1074 doubleOperation(ft1, ft0)
1075 stored ft0, [cfr, t2, 8]
1079 callSlowPath(slowPath)
1083 macro binaryOp(integerOperation, doubleOperation, slowPath)
1084 binaryOpCustomStore(
1085 macro (int32Tag, left, right, slow, index)
1086 integerOperation(left, right, slow)
1087 storei int32Tag, TagOffset[cfr, index, 8]
1088 storei right, PayloadOffset[cfr, index, 8]
1090 doubleOperation, slowPath)
1096 macro (left, right, slow) baddio left, right, slow end,
1097 macro (left, right) addd left, right end,
1103 binaryOpCustomStore(
1104 macro (int32Tag, left, right, slow, index)
1105 const scratch = int32Tag # We know that we can reuse the int32Tag register since it has a constant.
1107 bmulio left, scratch, slow
1108 btinz scratch, .done
1112 storei Int32Tag, TagOffset[cfr, index, 8]
1113 storei scratch, PayloadOffset[cfr, index, 8]
1115 macro (left, right) muld left, right end,
1122 macro (left, right, slow) bsubio left, right, slow end,
1123 macro (left, right) subd left, right end,
1129 binaryOpCustomStore(
1130 macro (int32Tag, left, right, slow, index)
1134 bcd2i ft1, right, .notInt
1135 storei int32Tag, TagOffset[cfr, index, 8]
1136 storei right, PayloadOffset[cfr, index, 8]
1139 stored ft1, [cfr, index, 8]
1142 macro (left, right) divd left, right end,
1146 macro bitOp(operation, slowPath, advance)
1149 loadConstantOrVariable(t2, t3, t1)
1150 loadConstantOrVariable2Reg(t0, t2, t0)
1151 bineq t3, Int32Tag, .slow
1152 bineq t2, Int32Tag, .slow
1155 storei t3, TagOffset[cfr, t2, 8]
1156 storei t0, PayloadOffset[cfr, t2, 8]
1160 callSlowPath(slowPath)
1167 macro (left, right) lshifti left, right end,
1175 macro (left, right) rshifti left, right end,
1183 macro (left, right) urshifti left, right end,
1192 loadConstantOrVariablePayload(t1, Int32Tag, t2, .opUnsignedSlow)
1193 bilt t2, 0, .opUnsignedSlow
1194 storei t2, PayloadOffset[cfr, t0, 8]
1195 storei Int32Tag, TagOffset[cfr, t0, 8]
1198 callSlowPath(_slow_path_unsigned)
1205 macro (left, right) andi left, right end,
1213 macro (left, right) xori left, right end,
1221 macro (left, right) ori left, right end,
1226 _llint_op_check_has_instance:
1229 loadConstantOrVariablePayload(t1, CellTag, t0, .opCheckHasInstanceSlow)
1230 btbz JSCell::m_flags[t0], ImplementsDefaultHasInstance, .opCheckHasInstanceSlow
1233 .opCheckHasInstanceSlow:
1234 callSlowPath(_llint_slow_path_check_has_instance)
1238 _llint_op_instanceof:
1240 # Actually do the work.
1243 loadConstantOrVariablePayload(t0, CellTag, t1, .opInstanceofSlow)
1244 bbb JSCell::m_type[t1], ObjectType, .opInstanceofSlow
1246 loadConstantOrVariablePayload(t0, CellTag, t2, .opInstanceofSlow)
1248 # Register state: t1 = prototype, t2 = value
1251 loadp JSCell::m_structureID[t2], t2
1252 loadi Structure::m_prototype + PayloadOffset[t2], t2
1253 bpeq t2, t1, .opInstanceofDone
1254 btinz t2, .opInstanceofLoop
1258 storei BooleanTag, TagOffset[cfr, t3, 8]
1259 storei t0, PayloadOffset[cfr, t3, 8]
1263 callSlowPath(_llint_slow_path_instanceof)
1267 _llint_op_is_undefined:
1271 loadConstantOrVariable(t1, t2, t3)
1272 storei BooleanTag, TagOffset[cfr, t0, 8]
1273 bieq t2, CellTag, .opIsUndefinedCell
1274 cieq t2, UndefinedTag, t3
1275 storei t3, PayloadOffset[cfr, t0, 8]
1278 btbnz JSCell::m_flags[t3], MasqueradesAsUndefined, .opIsUndefinedMasqueradesAsUndefined
1280 storei t1, PayloadOffset[cfr, t0, 8]
1282 .opIsUndefinedMasqueradesAsUndefined:
1283 loadp JSCell::m_structureID[t3], t1
1284 loadp CodeBlock[cfr], t3
1285 loadp CodeBlock::m_globalObject[t3], t3
1286 cpeq Structure::m_globalObject[t1], t3, t1
1287 storei t1, PayloadOffset[cfr, t0, 8]
1291 _llint_op_is_boolean:
1295 loadConstantOrVariableTag(t1, t0)
1296 cieq t0, BooleanTag, t0
1297 storei BooleanTag, TagOffset[cfr, t2, 8]
1298 storei t0, PayloadOffset[cfr, t2, 8]
1302 _llint_op_is_number:
1306 loadConstantOrVariableTag(t1, t0)
1307 storei BooleanTag, TagOffset[cfr, t2, 8]
1309 cib t0, LowestTag + 1, t1
1310 storei t1, PayloadOffset[cfr, t2, 8]
1314 _llint_op_is_string:
1318 loadConstantOrVariable(t1, t0, t3)
1319 storei BooleanTag, TagOffset[cfr, t2, 8]
1320 bineq t0, CellTag, .opIsStringNotCell
1321 cbeq JSCell::m_type[t3], StringType, t1
1322 storei t1, PayloadOffset[cfr, t2, 8]
1325 storep 0, PayloadOffset[cfr, t2, 8]
1329 _llint_op_is_object:
1333 loadConstantOrVariable(t1, t0, t3)
1334 storei BooleanTag, TagOffset[cfr, t2, 8]
1335 bineq t0, CellTag, .opIsObjectNotCell
1336 cbaeq JSCell::m_type[t3], ObjectType, t1
1337 storei t1, PayloadOffset[cfr, t2, 8]
1340 storep 0, PayloadOffset[cfr, t2, 8]
1344 macro loadPropertyAtVariableOffsetKnownNotInline(propertyOffset, objectAndStorage, tag, payload)
1345 assert(macro (ok) bigteq propertyOffset, firstOutOfLineOffset, ok end)
1347 loadp JSObject::m_butterfly[objectAndStorage], objectAndStorage
1348 loadi TagOffset + (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffset, 8], tag
1349 loadi PayloadOffset + (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffset, 8], payload
1352 macro loadPropertyAtVariableOffset(propertyOffset, objectAndStorage, tag, payload)
1353 bilt propertyOffset, firstOutOfLineOffset, .isInline
1354 loadp JSObject::m_butterfly[objectAndStorage], objectAndStorage
1358 addp sizeof JSObject - (firstOutOfLineOffset - 2) * 8, objectAndStorage
1360 loadi TagOffset + (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffset, 8], tag
1361 loadi PayloadOffset + (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffset, 8], payload
1364 macro storePropertyAtVariableOffset(propertyOffsetAsInt, objectAndStorage, tag, payload)
1365 bilt propertyOffsetAsInt, firstOutOfLineOffset, .isInline
1366 loadp JSObject::m_butterfly[objectAndStorage], objectAndStorage
1367 negi propertyOffsetAsInt
1370 addp sizeof JSObject - (firstOutOfLineOffset - 2) * 8, objectAndStorage
1372 storei tag, TagOffset + (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffsetAsInt, 8]
1373 storei payload, PayloadOffset + (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffsetAsInt, 8]
1377 _llint_op_init_global_const:
1379 writeBarrierOnGlobalObject(2)
1382 loadConstantOrVariable(t1, t2, t3)
1383 storei t2, TagOffset[t0]
1384 storei t3, PayloadOffset[t0]
1388 # We only do monomorphic get_by_id caching for now, and we do not modify the
1389 # opcode. We do, however, allow for the cache to change anytime if fails, since
1390 # ping-ponging is free. At best we get lucky and the get_by_id will continue
1391 # to take fast path on the new cache. At worst we take slow path, which is what
1392 # we would have been doing anyway.
1394 macro getById(getPropertyStorage)
1398 loadConstantOrVariablePayload(t0, CellTag, t3, .opGetByIdSlow)
1403 macro (propertyStorage, scratch)
1404 bpneq JSCell::m_structureID[t3], t1, .opGetByIdSlow
1406 loadi TagOffset[propertyStorage, t2], scratch
1407 loadi PayloadOffset[propertyStorage, t2], t2
1408 storei scratch, TagOffset[cfr, t1, 8]
1409 storei t2, PayloadOffset[cfr, t1, 8]
1410 valueProfile(scratch, t2, 32, t1)
1415 callSlowPath(_llint_slow_path_get_by_id)
1419 _llint_op_get_by_id:
1420 getById(withInlineStorage)
1423 _llint_op_get_by_id_out_of_line:
1424 getById(withOutOfLineStorage)
1427 _llint_op_get_array_length:
1431 loadConstantOrVariablePayload(t0, CellTag, t3, .opGetArrayLengthSlow)
1433 arrayProfile(t2, t1, t0)
1434 btiz t2, IsArray, .opGetArrayLengthSlow
1435 btiz t2, IndexingShapeMask, .opGetArrayLengthSlow
1437 loadp JSObject::m_butterfly[t3], t0
1438 loadi -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0], t0
1439 bilt t0, 0, .opGetArrayLengthSlow
1440 valueProfile(Int32Tag, t0, 32, t2)
1441 storep t0, PayloadOffset[cfr, t1, 8]
1442 storep Int32Tag, TagOffset[cfr, t1, 8]
1445 .opGetArrayLengthSlow:
1446 callSlowPath(_llint_slow_path_get_by_id)
1450 macro putById(getPropertyStorage)
1452 writeBarrierOnOperands(1, 3)
1455 loadConstantOrVariablePayload(t3, CellTag, t0, .opPutByIdSlow)
1460 macro (propertyStorage, scratch)
1461 bpneq JSCell::m_structureID[t0], t1, .opPutByIdSlow
1463 loadConstantOrVariable2Reg(t2, scratch, t2)
1464 storei scratch, TagOffset[propertyStorage, t1]
1465 storei t2, PayloadOffset[propertyStorage, t1]
1470 callSlowPath(_llint_slow_path_put_by_id)
1474 _llint_op_put_by_id:
1475 putById(withInlineStorage)
1478 _llint_op_put_by_id_out_of_line:
1479 putById(withOutOfLineStorage)
1482 macro putByIdTransition(additionalChecks, getPropertyStorage)
1484 writeBarrierOnOperand(1)
1487 loadConstantOrVariablePayload(t3, CellTag, t0, .opPutByIdSlow)
1489 bpneq JSCell::m_structureID[t0], t1, .opPutByIdSlow
1490 additionalChecks(t1, t3, .opPutByIdSlow)
1495 macro (propertyStorage, scratch)
1496 addp t1, propertyStorage, t3
1497 loadConstantOrVariable2Reg(t2, t1, t2)
1498 storei t1, TagOffset[t3]
1500 storei t2, PayloadOffset[t3]
1501 storep t1, JSCell::m_structureID[t0]
1506 callSlowPath(_llint_slow_path_put_by_id)
1510 macro noAdditionalChecks(oldStructure, scratch, slowPath)
1513 macro structureChainChecks(oldStructure, scratch, slowPath)
1514 const protoCell = oldStructure # Reusing the oldStructure register for the proto
1516 loadp 28[PC], scratch
1517 assert(macro (ok) btpnz scratch, ok end)
1518 loadp StructureChain::m_vector[scratch], scratch
1519 assert(macro (ok) btpnz scratch, ok end)
1520 bieq Structure::m_prototype + TagOffset[oldStructure], NullTag, .done
1522 loadi Structure::m_prototype + PayloadOffset[oldStructure], protoCell
1523 loadp JSCell::m_structureID[protoCell], oldStructure
1524 bpneq oldStructure, [scratch], slowPath
1526 bineq Structure::m_prototype + TagOffset[oldStructure], NullTag, .loop
1530 _llint_op_put_by_id_transition_direct:
1531 putByIdTransition(noAdditionalChecks, withInlineStorage)
1534 _llint_op_put_by_id_transition_direct_out_of_line:
1535 putByIdTransition(noAdditionalChecks, withOutOfLineStorage)
1538 _llint_op_put_by_id_transition_normal:
1539 putByIdTransition(structureChainChecks, withInlineStorage)
1542 _llint_op_put_by_id_transition_normal_out_of_line:
1543 putByIdTransition(structureChainChecks, withOutOfLineStorage)
1546 _llint_op_get_by_val:
1549 loadConstantOrVariablePayload(t2, CellTag, t0, .opGetByValSlow)
1552 arrayProfile(t2, t3, t1)
1554 loadConstantOrVariablePayload(t3, Int32Tag, t1, .opGetByValSlow)
1555 loadp JSObject::m_butterfly[t0], t3
1556 andi IndexingShapeMask, t2
1557 bieq t2, Int32Shape, .opGetByValIsContiguous
1558 bineq t2, ContiguousShape, .opGetByValNotContiguous
1559 .opGetByValIsContiguous:
1561 biaeq t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t3], .opGetByValOutOfBounds
1562 loadi TagOffset[t3, t1, 8], t2
1563 loadi PayloadOffset[t3, t1, 8], t1
1566 .opGetByValNotContiguous:
1567 bineq t2, DoubleShape, .opGetByValNotDouble
1568 biaeq t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t3], .opGetByValOutOfBounds
1569 loadd [t3, t1, 8], ft0
1570 bdnequn ft0, ft0, .opGetByValSlow
1571 # FIXME: This could be massively optimized.
1574 jmp .opGetByValNotEmpty
1576 .opGetByValNotDouble:
1577 subi ArrayStorageShape, t2
1578 bia t2, SlowPutArrayStorageShape - ArrayStorageShape, .opGetByValSlow
1579 biaeq t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.vectorLength[t3], .opGetByValOutOfBounds
1580 loadi ArrayStorage::m_vector + TagOffset[t3, t1, 8], t2
1581 loadi ArrayStorage::m_vector + PayloadOffset[t3, t1, 8], t1
1585 bieq t2, EmptyValueTag, .opGetByValOutOfBounds
1586 .opGetByValNotEmpty:
1587 storei t2, TagOffset[cfr, t0, 8]
1588 storei t1, PayloadOffset[cfr, t0, 8]
1589 valueProfile(t2, t1, 20, t0)
1592 .opGetByValOutOfBounds:
1593 loadpFromInstruction(4, t0)
1594 storeb 1, ArrayProfile::m_outOfBounds[t0]
1596 callSlowPath(_llint_slow_path_get_by_val)
1600 macro contiguousPutByVal(storeCallback)
1601 biaeq t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0], .outOfBounds
1604 storeCallback(t2, t1, t0, t3)
1608 biaeq t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.vectorLength[t0], .opPutByValOutOfBounds
1610 storeb 1, ArrayProfile::m_mayStoreToHole[t2]
1612 storei t2, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0]
1616 macro putByVal(slowPath)
1618 writeBarrierOnOperands(1, 3)
1620 loadConstantOrVariablePayload(t0, CellTag, t1, .opPutByValSlow)
1623 arrayProfile(t2, t3, t0)
1625 loadConstantOrVariablePayload(t0, Int32Tag, t3, .opPutByValSlow)
1626 loadp JSObject::m_butterfly[t1], t0
1627 andi IndexingShapeMask, t2
1628 bineq t2, Int32Shape, .opPutByValNotInt32
1630 macro (operand, scratch, base, index)
1631 loadConstantOrVariablePayload(operand, Int32Tag, scratch, .opPutByValSlow)
1632 storei Int32Tag, TagOffset[base, index, 8]
1633 storei scratch, PayloadOffset[base, index, 8]
1636 .opPutByValNotInt32:
1637 bineq t2, DoubleShape, .opPutByValNotDouble
1639 macro (operand, scratch, base, index)
1641 const payload = operand
1642 loadConstantOrVariable2Reg(operand, tag, payload)
1643 bineq tag, Int32Tag, .notInt
1647 fii2d payload, tag, ft0
1648 bdnequn ft0, ft0, .opPutByValSlow
1650 stored ft0, [base, index, 8]
1653 .opPutByValNotDouble:
1654 bineq t2, ContiguousShape, .opPutByValNotContiguous
1656 macro (operand, scratch, base, index)
1658 const payload = operand
1659 loadConstantOrVariable2Reg(operand, tag, payload)
1660 storei tag, TagOffset[base, index, 8]
1661 storei payload, PayloadOffset[base, index, 8]
1664 .opPutByValNotContiguous:
1665 bineq t2, ArrayStorageShape, .opPutByValSlow
1666 biaeq t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.vectorLength[t0], .opPutByValOutOfBounds
1667 bieq ArrayStorage::m_vector + TagOffset[t0, t3, 8], EmptyValueTag, .opPutByValArrayStorageEmpty
1668 .opPutByValArrayStorageStoreResult:
1670 loadConstantOrVariable2Reg(t2, t1, t2)
1671 storei t1, ArrayStorage::m_vector + TagOffset[t0, t3, 8]
1672 storei t2, ArrayStorage::m_vector + PayloadOffset[t0, t3, 8]
1675 .opPutByValArrayStorageEmpty:
1677 storeb 1, ArrayProfile::m_mayStoreToHole[t1]
1678 addi 1, ArrayStorage::m_numValuesInVector[t0]
1679 bib t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0], .opPutByValArrayStorageStoreResult
1681 storei t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0]
1682 jmp .opPutByValArrayStorageStoreResult
1684 .opPutByValOutOfBounds:
1685 loadpFromInstruction(4, t0)
1686 storeb 1, ArrayProfile::m_outOfBounds[t0]
1688 callSlowPath(slowPath)
1692 _llint_op_put_by_val:
1693 putByVal(_llint_slow_path_put_by_val)
1695 _llint_op_put_by_val_direct:
1696 putByVal(_llint_slow_path_put_by_val_direct)
1700 dispatchBranch(4[PC])
1703 macro jumpTrueOrFalse(conditionOp, slow)
1705 loadConstantOrVariablePayload(t1, BooleanTag, t0, .slow)
1706 conditionOp(t0, .target)
1710 dispatchBranch(8[PC])
1718 macro equalNull(cellHandler, immediateHandler)
1720 assertNotConstant(t0)
1721 loadi TagOffset[cfr, t0, 8], t1
1722 loadi PayloadOffset[cfr, t0, 8], t0
1723 bineq t1, CellTag, .immediate
1724 loadp JSCell::m_structureID[t0], t2
1725 cellHandler(t2, JSCell::m_flags[t0], .target)
1729 dispatchBranch(8[PC])
1733 immediateHandler(t1, .target)
1740 macro (structure, value, target)
1741 btbz value, MasqueradesAsUndefined, .opJeqNullNotMasqueradesAsUndefined
1742 loadp CodeBlock[cfr], t0
1743 loadp CodeBlock::m_globalObject[t0], t0
1744 bpeq Structure::m_globalObject[structure], t0, target
1745 .opJeqNullNotMasqueradesAsUndefined:
1747 macro (value, target) bieq value, NullTag, target end)
1750 _llint_op_jneq_null:
1753 macro (structure, value, target)
1754 btbz value, MasqueradesAsUndefined, target
1755 loadp CodeBlock[cfr], t0
1756 loadp CodeBlock::m_globalObject[t0], t0
1757 bpneq Structure::m_globalObject[structure], t0, target
1759 macro (value, target) bineq value, NullTag, target end)
1766 loadp CodeBlock[cfr], t2
1767 loadp CodeBlock::m_globalObject[t2], t2
1768 bineq TagOffset[cfr, t0, 8], CellTag, .opJneqPtrBranch
1769 loadp JSGlobalObject::m_specialPointers[t2, t1, 4], t1
1770 bpeq PayloadOffset[cfr, t0, 8], t1, .opJneqPtrFallThrough
1772 dispatchBranch(12[PC])
1773 .opJneqPtrFallThrough:
1777 macro compare(integerCompare, doubleCompare, slowPath)
1780 loadConstantOrVariable(t2, t0, t1)
1781 loadConstantOrVariable2Reg(t3, t2, t3)
1782 bineq t0, Int32Tag, .op1NotInt
1783 bineq t2, Int32Tag, .op2NotInt
1784 integerCompare(t1, t3, .jumpTarget)
1788 bia t0, LowestTag, .slow
1789 bib t2, LowestTag, .op1NotIntOp2Double
1790 bineq t2, Int32Tag, .slow
1793 .op1NotIntOp2Double:
1797 doubleCompare(ft0, ft1, .jumpTarget)
1802 bia t2, LowestTag, .slow
1804 doubleCompare(ft0, ft1, .jumpTarget)
1808 dispatchBranch(12[PC])
1811 callSlowPath(slowPath)
1816 _llint_op_switch_imm:
1820 loadConstantOrVariable(t2, t1, t0)
1821 loadp CodeBlock[cfr], t2
1822 loadp CodeBlock::m_rareData[t2], t2
1823 muli sizeof SimpleJumpTable, t3 # FIXME: would be nice to peephole this!
1824 loadp CodeBlock::RareData::m_switchJumpTables + VectorBufferOffset[t2], t2
1826 bineq t1, Int32Tag, .opSwitchImmNotInt
1827 subi SimpleJumpTable::min[t2], t0
1828 biaeq t0, SimpleJumpTable::branchOffsets + VectorSizeOffset[t2], .opSwitchImmFallThrough
1829 loadp SimpleJumpTable::branchOffsets + VectorBufferOffset[t2], t3
1830 loadi [t3, t0, 4], t1
1831 btiz t1, .opSwitchImmFallThrough
1832 dispatchBranchWithOffset(t1)
1835 bib t1, LowestTag, .opSwitchImmSlow # Go to slow path if it's a double.
1836 .opSwitchImmFallThrough:
1837 dispatchBranch(8[PC])
1840 callSlowPath(_llint_slow_path_switch_imm)
1844 _llint_op_switch_char:
1848 loadConstantOrVariable(t2, t1, t0)
1849 loadp CodeBlock[cfr], t2
1850 loadp CodeBlock::m_rareData[t2], t2
1851 muli sizeof SimpleJumpTable, t3
1852 loadp CodeBlock::RareData::m_switchJumpTables + VectorBufferOffset[t2], t2
1854 bineq t1, CellTag, .opSwitchCharFallThrough
1855 bbneq JSCell::m_type[t0], StringType, .opSwitchCharFallThrough
1856 bineq JSString::m_length[t0], 1, .opSwitchCharFallThrough
1857 loadp JSString::m_value[t0], t0
1858 btpz t0, .opSwitchOnRope
1859 loadp StringImpl::m_data8[t0], t1
1860 btinz StringImpl::m_hashAndFlags[t0], HashFlags8BitBuffer, .opSwitchChar8Bit
1862 jmp .opSwitchCharReady
1866 subi SimpleJumpTable::min[t2], t0
1867 biaeq t0, SimpleJumpTable::branchOffsets + VectorSizeOffset[t2], .opSwitchCharFallThrough
1868 loadp SimpleJumpTable::branchOffsets + VectorBufferOffset[t2], t2
1869 loadi [t2, t0, 4], t1
1870 btiz t1, .opSwitchCharFallThrough
1871 dispatchBranchWithOffset(t1)
1873 .opSwitchCharFallThrough:
1874 dispatchBranch(8[PC])
1877 callSlowPath(_llint_slow_path_switch_char)
1881 macro arrayProfileForCall()
1884 bineq ThisArgumentOffset + TagOffset[cfr, t3, 8], CellTag, .done
1885 loadi ThisArgumentOffset + PayloadOffset[cfr, t3, 8], t0
1886 loadp JSCell::m_structureID[t0], t0
1887 loadpFromInstruction(CallOpCodeSize - 2, t1)
1888 storep t0, ArrayProfile::m_lastSeenStructureID[t1]
1892 macro doCall(slowPath)
1895 loadp LLIntCallLinkInfo::callee[t1], t2
1896 loadConstantOrVariablePayload(t0, CellTag, t3, .opCallSlow)
1897 bineq t3, t2, .opCallSlow
1901 addp cfr, t3 # t3 contains the new value of cfr
1902 storei t2, Callee + PayloadOffset[t3]
1904 storei PC, ArgumentCount + TagOffset[cfr]
1905 storei t2, ArgumentCount + PayloadOffset[t3]
1906 storei CellTag, Callee + TagOffset[t3]
1907 addp CallerFrameAndPCSize, t3
1908 callTargetFunction(t1, t3)
1911 slowPathForCall(slowPath)
1917 checkSwitchToJITForEpilogue()
1919 loadConstantOrVariable(t2, t1, t0)
1923 _llint_op_to_primitive:
1927 loadConstantOrVariable(t2, t1, t0)
1928 bineq t1, CellTag, .opToPrimitiveIsImm
1929 bbaeq JSCell::m_type[t0], ObjectType, .opToPrimitiveSlowCase
1930 .opToPrimitiveIsImm:
1931 storei t1, TagOffset[cfr, t3, 8]
1932 storei t0, PayloadOffset[cfr, t3, 8]
1935 .opToPrimitiveSlowCase:
1936 callSlowPath(_slow_path_to_primitive)
1941 # This is where we end up from the JIT's throw trampoline (because the
1942 # machine code return address will be set to _llint_op_catch), and from
1943 # the interpreter's throw trampoline (see _llint_throw_trampoline).
1944 # The throwing code must have known that we were throwing to the interpreter,
1945 # and have set VM::targetInterpreterPCForThrow.
1946 loadp Callee + PayloadOffset[cfr], t3
1947 andp MarkedBlockMask, t3
1948 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
1949 loadp VM::callFrameForThrow[t3], cfr
1950 loadp VM::vmEntryFrameForThrow[t3], t0
1951 storep t0, VM::topVMEntryFrame[t3]
1952 restoreStackPointerAfterCall()
1954 loadi VM::targetInterpreterPCForThrow[t3], PC
1955 loadi VM::m_exception[t3], t0
1956 storei 0, VM::m_exception[t3]
1958 storei t0, PayloadOffset[cfr, t2, 8]
1959 storei CellTag, TagOffset[cfr, t2, 8]
1961 loadi Exception::m_value + TagOffset[t0], t1
1962 loadi Exception::m_value + PayloadOffset[t0], t0
1964 storei t0, PayloadOffset[cfr, t2, 8]
1965 storei t1, TagOffset[cfr, t2, 8]
1967 traceExecution() # This needs to be here because we don't want to clobber t0, t1, t2, t3 above.
1972 checkSwitchToJITForEpilogue()
1974 assertNotConstant(t0)
1975 loadi TagOffset[cfr, t0, 8], t1
1976 loadi PayloadOffset[cfr, t0, 8], t0
1980 _llint_throw_from_slow_path_trampoline:
1981 callSlowPath(_llint_slow_path_handle_exception)
1983 # When throwing from the interpreter (i.e. throwing from LLIntSlowPaths), so
1984 # the throw target is not necessarily interpreted code, we come to here.
1985 # This essentially emulates the JIT's throwing protocol.
1986 loadp Callee[cfr], t1
1987 andp MarkedBlockMask, t1
1988 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t1], t1
1989 jmp VM::targetMachinePCForThrow[t1]
1992 _llint_throw_during_call_trampoline:
1993 preserveReturnAddressAfterCall(t2)
1994 jmp _llint_throw_from_slow_path_trampoline
1997 macro nativeCallTrampoline(executableOffsetToFunction)
2000 storep 0, CodeBlock[cfr]
2001 loadi Callee + PayloadOffset[cfr], t1
2002 // Callee is still in t1 for code below
2004 subp 8, sp # align stack pointer
2005 andp MarkedBlockMask, t1
2006 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t1], t3
2007 storep cfr, VM::topCallFrame[t3]
2008 move cfr, t2 # t2 = ecx
2010 loadi Callee + PayloadOffset[cfr], t1
2011 loadp JSFunction::m_executable[t1], t1
2012 checkStackPointerAlignment(t3, 0xdead0001)
2013 call executableOffsetToFunction[t1]
2014 loadp Callee + PayloadOffset[cfr], t3
2015 andp MarkedBlockMask, t3
2016 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
2018 elsif ARM or ARMv7 or ARMv7_TRADITIONAL or C_LOOP or MIPS or SH4
2019 subp 8, sp # align stack pointer
2020 # t1 already contains the Callee.
2021 andp MarkedBlockMask, t1
2022 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t1], t1
2023 storep cfr, VM::topCallFrame[t1]
2029 loadi Callee + PayloadOffset[cfr], t1
2030 loadp JSFunction::m_executable[t1], t1
2031 checkStackPointerAlignment(t3, 0xdead0001)
2033 cloopCallNative executableOffsetToFunction[t1]
2035 call executableOffsetToFunction[t1]
2037 loadp Callee + PayloadOffset[cfr], t3
2038 andp MarkedBlockMask, t3
2039 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
2046 btinz VM::m_exception[t3], .handleException
2050 storep cfr, VM::topCallFrame[t3]
2051 restoreStackPointerAfterCall()
2052 jmp _llint_throw_from_slow_path_trampoline
2056 macro getGlobalObject(dst)
2057 loadp CodeBlock[cfr], t0
2058 loadp CodeBlock::m_globalObject[t0], t0
2059 loadisFromInstruction(dst, t1)
2060 storei CellTag, TagOffset[cfr, t1, 8]
2061 storei t0, PayloadOffset[cfr, t1, 8]
2064 macro varInjectionCheck(slowPath)
2065 loadp CodeBlock[cfr], t0
2066 loadp CodeBlock::m_globalObject[t0], t0
2067 loadp JSGlobalObject::m_varInjectionWatchpoint[t0], t0
2068 bbeq WatchpointSet::m_state[t0], IsInvalidated, slowPath
2071 macro resolveScope()
2072 loadp CodeBlock[cfr], t0
2073 loadisFromInstruction(5, t2)
2075 loadisFromInstruction(2, t0)
2076 loadp PayloadOffset[cfr, t0, 8], t0
2077 btiz t2, .resolveScopeLoopEnd
2080 loadp JSScope::m_next[t0], t0
2082 btinz t2, .resolveScopeLoop
2084 .resolveScopeLoopEnd:
2085 loadisFromInstruction(1, t1)
2086 storei CellTag, TagOffset[cfr, t1, 8]
2087 storei t0, PayloadOffset[cfr, t1, 8]
2091 _llint_op_resolve_scope:
2093 loadisFromInstruction(4, t0)
2096 bineq t0, GlobalProperty, .rGlobalVar
2101 bineq t0, GlobalVar, .rClosureVar
2106 bineq t0, ClosureVar, .rGlobalPropertyWithVarInjectionChecks
2110 .rGlobalPropertyWithVarInjectionChecks:
2111 bineq t0, GlobalPropertyWithVarInjectionChecks, .rGlobalVarWithVarInjectionChecks
2112 varInjectionCheck(.rDynamic)
2116 .rGlobalVarWithVarInjectionChecks:
2117 bineq t0, GlobalVarWithVarInjectionChecks, .rClosureVarWithVarInjectionChecks
2118 varInjectionCheck(.rDynamic)
2122 .rClosureVarWithVarInjectionChecks:
2123 bineq t0, ClosureVarWithVarInjectionChecks, .rDynamic
2124 varInjectionCheck(.rDynamic)
2129 callSlowPath(_llint_slow_path_resolve_scope)
2133 macro loadWithStructureCheck(operand, slowPath)
2134 loadisFromInstruction(operand, t0)
2135 loadp PayloadOffset[cfr, t0, 8], t0
2136 loadpFromInstruction(5, t1)
2137 bpneq JSCell::m_structureID[t0], t1, slowPath
2141 loadisFromInstruction(6, t3)
2142 loadPropertyAtVariableOffset(t3, t0, t1, t2)
2143 valueProfile(t1, t2, 28, t0)
2144 loadisFromInstruction(1, t0)
2145 storei t1, TagOffset[cfr, t0, 8]
2146 storei t2, PayloadOffset[cfr, t0, 8]
2149 macro getGlobalVar()
2150 loadpFromInstruction(6, t0)
2151 loadp TagOffset[t0], t1
2152 loadp PayloadOffset[t0], t2
2153 valueProfile(t1, t2, 28, t0)
2154 loadisFromInstruction(1, t0)
2155 storei t1, TagOffset[cfr, t0, 8]
2156 storei t2, PayloadOffset[cfr, t0, 8]
2159 macro getClosureVar()
2160 loadisFromInstruction(6, t3)
2161 loadp JSEnvironmentRecord_variables + TagOffset[t0, t3, 8], t1
2162 loadp JSEnvironmentRecord_variables + PayloadOffset[t0, t3, 8], t2
2163 valueProfile(t1, t2, 28, t0)
2164 loadisFromInstruction(1, t0)
2165 storei t1, TagOffset[cfr, t0, 8]
2166 storei t2, PayloadOffset[cfr, t0, 8]
2169 _llint_op_get_from_scope:
2171 loadisFromInstruction(4, t0)
2172 andi ResolveModeMask, t0
2175 bineq t0, GlobalProperty, .gGlobalVar
2176 loadWithStructureCheck(2, .gDynamic)
2181 bineq t0, GlobalVar, .gClosureVar
2186 bineq t0, ClosureVar, .gGlobalPropertyWithVarInjectionChecks
2187 loadVariable(2, t2, t1, t0)
2191 .gGlobalPropertyWithVarInjectionChecks:
2192 bineq t0, GlobalPropertyWithVarInjectionChecks, .gGlobalVarWithVarInjectionChecks
2193 loadWithStructureCheck(2, .gDynamic)
2197 .gGlobalVarWithVarInjectionChecks:
2198 bineq t0, GlobalVarWithVarInjectionChecks, .gClosureVarWithVarInjectionChecks
2199 varInjectionCheck(.gDynamic)
2203 .gClosureVarWithVarInjectionChecks:
2204 bineq t0, ClosureVarWithVarInjectionChecks, .gDynamic
2205 varInjectionCheck(.gDynamic)
2206 loadVariable(2, t2, t1, t0)
2211 callSlowPath(_llint_slow_path_get_from_scope)
2216 loadisFromInstruction(3, t1)
2217 loadConstantOrVariable(t1, t2, t3)
2218 loadisFromInstruction(6, t1)
2219 storePropertyAtVariableOffset(t1, t0, t2, t3)
2222 macro putGlobalVar()
2223 loadisFromInstruction(3, t0)
2224 loadConstantOrVariable(t0, t1, t2)
2225 loadpFromInstruction(5, t3)
2226 notifyWrite(t3, .pDynamic)
2227 loadpFromInstruction(6, t0)
2228 storei t1, TagOffset[t0]
2229 storei t2, PayloadOffset[t0]
2232 macro putClosureVar()
2233 loadisFromInstruction(3, t1)
2234 loadConstantOrVariable(t1, t2, t3)
2235 loadisFromInstruction(6, t1)
2236 storei t2, JSEnvironmentRecord_variables + TagOffset[t0, t1, 8]
2237 storei t3, JSEnvironmentRecord_variables + PayloadOffset[t0, t1, 8]
2240 macro putLocalClosureVar()
2241 loadisFromInstruction(3, t1)
2242 loadConstantOrVariable(t1, t2, t3)
2243 loadpFromInstruction(5, t4)
2244 btpz t4, .noVariableWatchpointSet
2245 notifyWrite(t4, .pDynamic)
2246 .noVariableWatchpointSet:
2247 loadisFromInstruction(6, t1)
2248 storei t2, JSEnvironmentRecord_variables + TagOffset[t0, t1, 8]
2249 storei t3, JSEnvironmentRecord_variables + PayloadOffset[t0, t1, 8]
2253 _llint_op_put_to_scope:
2255 loadisFromInstruction(4, t0)
2256 andi ResolveModeMask, t0
2259 bineq t0, LocalClosureVar, .pGlobalProperty
2260 writeBarrierOnOperands(1, 3)
2261 loadVariable(1, t2, t1, t0)
2262 putLocalClosureVar()
2266 bineq t0, GlobalProperty, .pGlobalVar
2267 writeBarrierOnOperands(1, 3)
2268 loadWithStructureCheck(1, .pDynamic)
2273 bineq t0, GlobalVar, .pClosureVar
2274 writeBarrierOnGlobalObject(3)
2279 bineq t0, ClosureVar, .pGlobalPropertyWithVarInjectionChecks
2280 writeBarrierOnOperands(1, 3)
2281 loadVariable(1, t2, t1, t0)
2285 .pGlobalPropertyWithVarInjectionChecks:
2286 bineq t0, GlobalPropertyWithVarInjectionChecks, .pGlobalVarWithVarInjectionChecks
2287 writeBarrierOnOperands(1, 3)
2288 loadWithStructureCheck(1, .pDynamic)
2292 .pGlobalVarWithVarInjectionChecks:
2293 bineq t0, GlobalVarWithVarInjectionChecks, .pClosureVarWithVarInjectionChecks
2294 writeBarrierOnGlobalObject(3)
2295 varInjectionCheck(.pDynamic)
2299 .pClosureVarWithVarInjectionChecks:
2300 bineq t0, ClosureVarWithVarInjectionChecks, .pDynamic
2301 writeBarrierOnOperands(1, 3)
2302 varInjectionCheck(.pDynamic)
2303 loadVariable(1, t2, t1, t0)
2308 callSlowPath(_llint_slow_path_put_to_scope)
2312 _llint_op_get_from_arguments:
2314 loadisFromInstruction(2, t0)
2315 loadi PayloadOffset[cfr, t0, 8], t0
2317 loadi DirectArguments_storage + TagOffset[t0, t1, 8], t2
2318 loadi DirectArguments_storage + PayloadOffset[t0, t1, 8], t3
2319 loadisFromInstruction(1, t1)
2320 valueProfile(t2, t3, 16, t0)
2321 storei t2, TagOffset[cfr, t1, 8]
2322 storei t3, PayloadOffset[cfr, t1, 8]
2326 _llint_op_put_to_arguments:
2328 writeBarrierOnOperands(1, 3)
2329 loadisFromInstruction(1, t0)
2330 loadi PayloadOffset[cfr, t0, 8], t0
2331 loadisFromInstruction(3, t1)
2332 loadConstantOrVariable(t1, t2, t3)
2334 storei t2, DirectArguments_storage + TagOffset[t0, t1, 8]
2335 storei t3, DirectArguments_storage + PayloadOffset[t0, t1, 8]
2339 _llint_op_profile_type:
2341 loadp CodeBlock[cfr], t1
2342 loadp CodeBlock::m_vm[t1], t1
2343 # t1 is holding the pointer to the typeProfilerLog.
2344 loadp VM::m_typeProfilerLog[t1], t1
2346 # t0 is holding the payload, t4 is holding the tag.
2347 loadisFromInstruction(1, t2)
2348 loadConstantOrVariable(t2, t4, t0)
2350 # t2 is holding the pointer to the current log entry.
2351 loadp TypeProfilerLog::m_currentLogEntryPtr[t1], t2
2353 # Store the JSValue onto the log entry.
2354 storei t4, TypeProfilerLog::LogEntry::value + TagOffset[t2]
2355 storei t0, TypeProfilerLog::LogEntry::value + PayloadOffset[t2]
2357 # Store the TypeLocation onto the log entry.
2358 loadpFromInstruction(2, t3)
2359 storep t3, TypeProfilerLog::LogEntry::location[t2]
2361 bieq t4, CellTag, .opProfileTypeIsCell
2362 storei 0, TypeProfilerLog::LogEntry::structureID[t2]
2363 jmp .opProfileTypeSkipIsCell
2364 .opProfileTypeIsCell:
2365 loadi JSCell::m_structureID[t0], t3
2366 storei t3, TypeProfilerLog::LogEntry::structureID[t2]
2367 .opProfileTypeSkipIsCell:
2369 # Increment the current log entry.
2370 addp sizeof TypeProfilerLog::LogEntry, t2
2371 storep t2, TypeProfilerLog::m_currentLogEntryPtr[t1]
2373 loadp TypeProfilerLog::m_logEndPtr[t1], t1
2374 bpneq t2, t1, .opProfileTypeDone
2375 callSlowPath(_slow_path_profile_type_clear_log)