1 # Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved.
3 # Redistribution and use in source and binary forms, with or without
4 # modification, are permitted provided that the following conditions
6 # 1. Redistributions of source code must retain the above copyright
7 # notice, this list of conditions and the following disclaimer.
8 # 2. Redistributions in binary form must reproduce the above copyright
9 # notice, this list of conditions and the following disclaimer in the
10 # documentation and/or other materials provided with the distribution.
12 # THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
13 # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
14 # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
15 # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
16 # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
17 # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
18 # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
19 # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
20 # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
21 # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
22 # THE POSSIBILITY OF SUCH DAMAGE.
25 # Crash course on the language that this is written in (which I just call
26 # "assembly" even though it's more than that):
28 # - Mostly gas-style operand ordering. The last operand tends to be the
29 # destination. So "a := b" is written as "mov b, a". But unlike gas,
30 # comparisons are in-order, so "if (a < b)" is written as
33 # - "b" = byte, "h" = 16-bit word, "i" = 32-bit word, "p" = pointer.
34 # Currently this is just 32-bit so "i" and "p" are interchangeable
35 # except when an op supports one but not the other.
37 # - In general, valid operands for macro invocations and instructions are
38 # registers (eg "t0"), addresses (eg "4[t0]"), base-index addresses
39 # (eg "7[t0, t1, 2]"), absolute addresses (eg "0xa0000000[]"), or labels
40 # (eg "_foo" or ".foo"). Macro invocations can also take anonymous
41 # macros as operands. Instructions cannot take anonymous macros.
43 # - Labels must have names that begin with either "_" or ".". A "." label
44 # is local and gets renamed before code gen to minimize namespace
45 # pollution. A "_" label is an extern symbol (i.e. ".globl"). The "_"
46 # may or may not be removed during code gen depending on whether the asm
47 # conventions for C name mangling on the target platform mandate a "_"
50 # - A "macro" is a lambda expression, which may be either anonymous or
51 # named. But this has caveats. "macro" can take zero or more arguments,
52 # which may be macros or any valid operands, but it can only return
53 # code. But you can do Turing-complete things via continuation passing
54 # style: "macro foo (a, b) b(a) end foo(foo, foo)". Actually, don't do
55 # that, since you'll just crash the assembler.
57 # - An "if" is a conditional on settings. Any identifier supplied in the
58 # predicate of an "if" is assumed to be a #define that is available
59 # during code gen. So you can't use "if" for computation in a macro, but
60 # you can use it to select different pieces of code for different
63 # - Arguments to macros follow lexical scoping rather than dynamic scoping.
64 # Const's also follow lexical scoping and may override (hide) arguments
65 # or other consts. All variables (arguments and constants) can be bound
66 # to operands. Additionally, arguments (but not constants) can be bound
70 # Below we have a bunch of constant declarations. Each constant must have
71 # a corresponding ASSERT() in LLIntData.cpp.
74 macro dispatch(advance)
79 macro dispatchBranchWithOffset(pcOffset)
85 macro dispatchBranch(pcOffset)
87 dispatchBranchWithOffset(t0)
90 macro dispatchAfterCall()
91 loadi ArgumentCount + TagOffset[cfr], PC
93 storei t1, TagOffset[cfr, t2, 8]
94 storei t0, PayloadOffset[cfr, t2, 8]
95 valueProfile(t1, t0, 4 * (CallOpCodeSize - 1), t3)
96 dispatch(CallOpCodeSize)
99 macro cCall2(function, arg1, arg2)
100 if ARM or ARMv7 or ARMv7_TRADITIONAL or MIPS
114 cloopCallSlowPath function, arg1, arg2
120 macro cCall2Void(function, arg1, arg2)
122 cloopCallSlowPathVoid function, arg1, arg2
124 cCall2(function, arg1, arg2)
128 # This barely works. arg3 and arg4 should probably be immediates.
129 macro cCall4(function, arg1, arg2, arg3, arg4)
130 if ARM or ARMv7 or ARMv7_TRADITIONAL or MIPS
144 setargs arg1, arg2, arg3, arg4
153 macro callSlowPath(slowPath)
154 cCall2(slowPath, cfr, PC)
158 macro doCallToJavaScript(makeCall)
162 const protoCallFrame = t5
164 const previousCFR = t0
165 const previousPC = t1
166 const temp1 = t0 # Same as previousCFR
167 const temp2 = t1 # Same as previousPC
169 const temp4 = t3 # same as vm
170 elsif ARM or ARMv7 or ARMv7_TRADITIONAL or C_LOOP
173 const protoCallFrame = a2
175 const previousCFR = t3
176 const previousPC = lr
177 const temp1 = t3 # Same as previousCFR
180 const temp4 = t4 # Same as temp2
183 const vmTopCallFrame = a1
184 const protoCallFrame = a2
185 const topOfStack = a3
187 const previousCFR = t2
188 const previousPC = lr
196 const protoCallFrame = a2
198 const previousCFR = t3
199 const previousPC = lr
200 const temp1 = t3 # Same as previousCFR
206 callToJavaScriptPrologue()
212 loadp 40[sp, temp3], vm
213 loadp 36[sp, temp3], entry
215 move cfr, previousCFR
218 checkStackPointerAlignment(temp2, 0xbad0dc01)
220 # The stack reserved zone ensures that we have adequate space for the
221 # VMEntrySentinelFrame. Proceed with allocating and initializing the
224 subp CallFrameHeaderSlots * 8, cfr
225 storep 0, ArgumentCount[cfr]
226 storep vm, Callee[cfr]
227 loadp VM::topCallFrame[vm], temp2
228 storep temp2, ScopeChain[cfr]
229 storep 1, CodeBlock[cfr]
231 loadp 28[sp], previousPC
232 loadp 24[sp], previousCFR
234 loadp 32[sp, temp3], previousPC
235 loadp 28[sp, temp3], previousCFR
237 storep previousPC, ReturnPC[cfr]
238 storep previousCFR, CallerFrame[cfr]
241 loadp 40[sp], protoCallFrame
243 loadp 44[sp, temp3], protoCallFrame
246 loadi ProtoCallFrame::paddedArgCount[protoCallFrame], temp2
247 addp CallFrameHeaderSlots, temp2, temp2
249 subp cfr, temp2, temp1
251 # Ensure that we have enough additional stack capacity for the incoming args,
252 # and the frame for the JS code we're executing. We need to do this check
253 # before we start copying the args from the protoCallFrame below.
254 bpaeq temp1, VM::m_jsStackLimit[vm], .stackHeightOK
266 cloopCallSlowPath _llint_stack_check_at_vm_entry, vm, temp1
267 bpeq t0, 0, .stackCheckFailed
277 cCall2(_llint_throw_stack_overflow_error, vm, protoCallFrame)
278 callToJavaScriptEpilogue()
287 loadi TagOffset[protoCallFrame, temp1, 8], temp3
288 storei temp3, TagOffset + CodeBlock[sp, temp1, 8]
289 loadi PayloadOffset[protoCallFrame, temp1, 8], temp3
290 storei temp3, PayloadOffset + CodeBlock[sp, temp1, 8]
291 btinz temp1, .copyHeaderLoop
293 loadi PayloadOffset + ProtoCallFrame::argCountAndCodeOriginValue[protoCallFrame], temp2
295 loadi ProtoCallFrame::paddedArgCount[protoCallFrame], temp3
298 bieq temp2, temp3, .copyArgs
301 storei UndefinedTag, ThisArgumentOffset + 8 + TagOffset[sp, temp3, 8]
302 storei 0, ThisArgumentOffset + 8 + PayloadOffset[sp, temp3, 8]
303 bineq temp2, temp3, .fillExtraArgsLoop
306 loadp ProtoCallFrame::args[protoCallFrame], temp1
309 btiz temp2, .copyArgsDone
311 loadi TagOffset[temp1, temp2, 8], temp3
312 storei temp3, ThisArgumentOffset + 8 + TagOffset[sp, temp2, 8]
313 loadi PayloadOffset[temp1, temp2, 8], temp3
314 storei temp3, ThisArgumentOffset + 8 + PayloadOffset[sp, temp2, 8]
318 storep sp, VM::topCallFrame[vm]
320 makeCall(entry, temp1, temp2)
322 bpeq CodeBlock[cfr], 1, .calleeFramePopped
323 loadp CallerFrame[cfr], cfr
326 loadp Callee[cfr], temp3 # VM
327 loadp ScopeChain[cfr], temp4 # previous topCallFrame
328 storep temp4, VM::topCallFrame[temp3]
330 callToJavaScriptEpilogue()
334 macro makeJavaScriptCall(entry, temp, unused)
335 addp CallerFrameAndPCSize, sp
336 checkStackPointerAlignment(t2, 0xbad0dc02)
338 cloopCallJSFunction entry
342 checkStackPointerAlignment(t2, 0xbad0dc03)
343 subp CallerFrameAndPCSize, sp
346 macro makeHostFunctionCall(entry, temp1, temp2)
351 storep lr, PtrSize[sp]
352 cloopCallNative temp1
355 # Put callee frame pointer on stack as arg0, also put it in ecx for "fastcall" targets
357 move temp2, 4[sp] # put 0 in ReturnPC
358 move cfr, [sp] # put caller frame pointer into callee frame since callee prologue can't
359 move sp, t2 # t2 is ecx
360 push temp2 # Push dummy arg1
364 addp CallerFrameAndPCSize, sp
370 subp CallerFrameAndPCSize, sp
375 _handleUncaughtException:
376 loadp ScopeChain + PayloadOffset[cfr], t3
377 andp MarkedBlockMask, t3
378 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
379 loadp VM::callFrameForThrow[t3], cfr
381 # So far, we've unwound the stack to the frame just below the sentinel frame, except
382 # in the case of stack overflow in the first function called from callToJavaScript.
383 # Check if we need to pop to the sentinel frame and do the necessary clean up for
384 # returning to the caller C frame.
385 bpeq CodeBlock[cfr], 1, .handleUncaughtExceptionAlreadyIsSentinel
386 loadp CallerFrame + PayloadOffset[cfr], cfr
387 .handleUncaughtExceptionAlreadyIsSentinel:
389 loadp Callee + PayloadOffset[cfr], t3 # VM
390 loadp ScopeChain + PayloadOffset[cfr], t5 # previous topCallFrame
391 storep t5, VM::topCallFrame[t3]
393 callToJavaScriptEpilogue()
396 macro doReturnFromHostFunction(extraStackSpace)
397 functionEpilogue(extraStackSpace)
401 # Debugging operation if you'd like to print an operand in the instruction stream. fromWhere
402 # should be an immediate integer - any integer you like; use it to identify the place you're
403 # debugging from. operand should likewise be an immediate, and should identify the operand
404 # in the instruction stream you'd like to print out.
405 macro traceOperand(fromWhere, operand)
406 cCall4(_llint_trace_operand, cfr, PC, fromWhere, operand)
411 # Debugging operation if you'd like to print the value of an operand in the instruction
412 # stream. Same as traceOperand(), but assumes that the operand is a register, and prints its
414 macro traceValue(fromWhere, operand)
415 cCall4(_llint_trace_value, cfr, PC, fromWhere, operand)
420 # Call a slowPath for call opcodes.
421 macro callCallSlowPath(slowPath, action)
422 storep PC, ArgumentCount + TagOffset[cfr]
423 cCall2(slowPath, cfr, PC)
427 macro callWatchdogTimerHandler(throwHandler)
428 storei PC, ArgumentCount + TagOffset[cfr]
429 cCall2(_llint_slow_path_handle_watchdog_timer, cfr, PC)
430 btpnz t0, throwHandler
431 loadi ArgumentCount + TagOffset[cfr], PC
434 macro checkSwitchToJITForLoop()
438 storei PC, ArgumentCount + TagOffset[cfr]
439 cCall2(_llint_loop_osr, cfr, PC)
444 loadi ArgumentCount + TagOffset[cfr], PC
448 macro loadVariable(operand, index, tag, payload)
449 loadisFromInstruction(operand, index)
450 loadi TagOffset[cfr, index, 8], tag
451 loadi PayloadOffset[cfr, index, 8], payload
454 # Index, tag, and payload must be different registers. Index is not
456 macro loadConstantOrVariable(index, tag, payload)
457 bigteq index, FirstConstantRegisterIndex, .constant
458 loadi TagOffset[cfr, index, 8], tag
459 loadi PayloadOffset[cfr, index, 8], payload
462 loadp CodeBlock[cfr], payload
463 loadp CodeBlock::m_constantRegisters + VectorBufferOffset[payload], payload
464 # There is a bit of evil here: if the index contains a value >= FirstConstantRegisterIndex,
465 # then value << 3 will be equal to (value - FirstConstantRegisterIndex) << 3.
466 loadp TagOffset[payload, index, 8], tag
467 loadp PayloadOffset[payload, index, 8], payload
471 macro loadConstantOrVariableTag(index, tag)
472 bigteq index, FirstConstantRegisterIndex, .constant
473 loadi TagOffset[cfr, index, 8], tag
476 loadp CodeBlock[cfr], tag
477 loadp CodeBlock::m_constantRegisters + VectorBufferOffset[tag], tag
478 # There is a bit of evil here: if the index contains a value >= FirstConstantRegisterIndex,
479 # then value << 3 will be equal to (value - FirstConstantRegisterIndex) << 3.
480 loadp TagOffset[tag, index, 8], tag
484 # Index and payload may be the same register. Index may be clobbered.
485 macro loadConstantOrVariable2Reg(index, tag, payload)
486 bigteq index, FirstConstantRegisterIndex, .constant
487 loadi TagOffset[cfr, index, 8], tag
488 loadi PayloadOffset[cfr, index, 8], payload
491 loadp CodeBlock[cfr], tag
492 loadp CodeBlock::m_constantRegisters + VectorBufferOffset[tag], tag
493 # There is a bit of evil here: if the index contains a value >= FirstConstantRegisterIndex,
494 # then value << 3 will be equal to (value - FirstConstantRegisterIndex) << 3.
497 loadp PayloadOffset[tag], payload
498 loadp TagOffset[tag], tag
502 macro loadConstantOrVariablePayloadTagCustom(index, tagCheck, payload)
503 bigteq index, FirstConstantRegisterIndex, .constant
504 tagCheck(TagOffset[cfr, index, 8])
505 loadi PayloadOffset[cfr, index, 8], payload
508 loadp CodeBlock[cfr], payload
509 loadp CodeBlock::m_constantRegisters + VectorBufferOffset[payload], payload
510 # There is a bit of evil here: if the index contains a value >= FirstConstantRegisterIndex,
511 # then value << 3 will be equal to (value - FirstConstantRegisterIndex) << 3.
512 tagCheck(TagOffset[payload, index, 8])
513 loadp PayloadOffset[payload, index, 8], payload
517 # Index and payload must be different registers. Index is not mutated. Use
518 # this if you know what the tag of the variable should be. Doing the tag
519 # test as part of loading the variable reduces register use, but may not
520 # be faster than doing loadConstantOrVariable followed by a branch on the
522 macro loadConstantOrVariablePayload(index, expectedTag, payload, slow)
523 loadConstantOrVariablePayloadTagCustom(
525 macro (actualTag) bineq actualTag, expectedTag, slow end,
529 macro loadConstantOrVariablePayloadUnchecked(index, payload)
530 loadConstantOrVariablePayloadTagCustom(
532 macro (actualTag) end,
536 macro storeStructureWithTypeInfo(cell, structure, scratch)
537 storep structure, JSCell::m_structureID[cell]
539 loadi Structure::m_blob + StructureIDBlob::u.words.word2[structure], scratch
540 storei scratch, JSCell::m_indexingType[cell]
543 macro writeBarrierOnOperand(cellOperand)
545 loadisFromInstruction(cellOperand, t1)
546 loadConstantOrVariablePayload(t1, CellTag, t2, .writeBarrierDone)
547 checkMarkByte(t2, t1, t3,
549 btbnz gcData, .writeBarrierDone
551 # We make two extra slots because cCall2 will poke.
553 cCall2Void(_llint_write_barrier_slow, cfr, t2)
562 macro writeBarrierOnOperands(cellOperand, valueOperand)
564 loadisFromInstruction(valueOperand, t1)
565 loadConstantOrVariableTag(t1, t0)
566 bineq t0, CellTag, .writeBarrierDone
568 writeBarrierOnOperand(cellOperand)
573 macro writeBarrierOnGlobalObject(valueOperand)
575 loadisFromInstruction(valueOperand, t1)
576 loadConstantOrVariableTag(t1, t0)
577 bineq t0, CellTag, .writeBarrierDone
579 loadp CodeBlock[cfr], t3
580 loadp CodeBlock::m_globalObject[t3], t3
581 checkMarkByte(t3, t1, t2,
583 btbnz gcData, .writeBarrierDone
585 # We make two extra slots because cCall2 will poke.
587 cCall2Void(_llint_write_barrier_slow, cfr, t3)
596 macro valueProfile(tag, payload, operand, scratch)
597 loadp operand[PC], scratch
598 storei tag, ValueProfile::m_buckets + TagOffset[scratch]
599 storei payload, ValueProfile::m_buckets + PayloadOffset[scratch]
603 # Entrypoints into the interpreter
605 # Expects that CodeBlock is in t1, which is what prologue() leaves behind.
606 macro functionArityCheck(doneLabel, slowPath)
607 loadi PayloadOffset + ArgumentCount[cfr], t0
608 biaeq t0, CodeBlock::m_numParameters[t1], doneLabel
609 cCall2(slowPath, cfr, PC) # This slowPath has a simple protocol: t0 = 0 => no error, t0 != 0 => error
611 move t1, cfr # t1 contains caller frame
612 jmp _llint_throw_from_slow_path_trampoline
615 # t1 points to ArityCheckData.
616 loadp CommonSlowPaths::ArityCheckData::thunkToCall[t1], t2
617 btpz t2, .proceedInline
619 loadp CommonSlowPaths::ArityCheckData::returnPC[t1], t5
620 loadp CommonSlowPaths::ArityCheckData::paddedStackSpace[t1], t0
623 loadp ReturnPC[cfr], t0
629 loadi CommonSlowPaths::ArityCheckData::paddedStackSpace[t1], t1
632 // Move frame up "t1 * 2" slots
636 loadi PayloadOffset + ArgumentCount[cfr], t2
637 addi CallFrameHeaderSlots, t2
639 loadi PayloadOffset[t3], t0
640 storei t0, PayloadOffset[t3, t1, 8]
641 loadi TagOffset[t3], t0
642 storei t0, TagOffset[t3, t1, 8]
644 bsubinz 1, t2, .copyLoop
646 // Fill new slots with JSUndefined
650 storei t0, PayloadOffset[t3, t1, 8]
651 move UndefinedTag, t0
652 storei t0, TagOffset[t3, t1, 8]
654 baddinz 1, t2, .fillLoop
660 # Reload CodeBlock and PC, since the slow_path clobbered it.
661 loadp CodeBlock[cfr], t1
662 loadp CodeBlock::m_instructions[t1], PC
666 macro branchIfException(label)
667 loadp ScopeChain[cfr], t3
668 andp MarkedBlockMask, t3
669 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
670 bieq VM::m_exception + TagOffset[t3], EmptyValueTag, .noException
676 # Instruction implementations
680 checkStackPointerAlignment(t2, 0xdead00e1)
681 loadp CodeBlock[cfr], t2 // t2<CodeBlock> = cfr.CodeBlock
682 loadi CodeBlock::m_numVars[t2], t2 // t2<size_t> = t2<CodeBlock>.m_numVars
683 btiz t2, .opEnterDone
684 move UndefinedTag, t0
688 storei t0, TagOffset[cfr, t2, 8]
689 storei t1, PayloadOffset[cfr, t2, 8]
691 btinz t2, .opEnterLoop
693 callSlowPath(_slow_path_enter)
697 _llint_op_create_activation:
700 bineq TagOffset[cfr, t0, 8], EmptyValueTag, .opCreateActivationDone
701 callSlowPath(_llint_slow_path_create_activation)
702 .opCreateActivationDone:
706 _llint_op_init_lazy_reg:
709 storei EmptyValueTag, TagOffset[cfr, t0, 8]
710 storei 0, PayloadOffset[cfr, t0, 8]
714 _llint_op_create_arguments:
717 bineq TagOffset[cfr, t0, 8], EmptyValueTag, .opCreateArgumentsDone
718 callSlowPath(_slow_path_create_arguments)
719 .opCreateArgumentsDone:
723 _llint_op_create_this:
726 loadp PayloadOffset[cfr, t0, 8], t0
727 loadp JSFunction::m_allocationProfile + ObjectAllocationProfile::m_allocator[t0], t1
728 loadp JSFunction::m_allocationProfile + ObjectAllocationProfile::m_structure[t0], t2
729 btpz t1, .opCreateThisSlow
730 allocateJSObject(t1, t2, t0, t3, .opCreateThisSlow)
732 storei CellTag, TagOffset[cfr, t1, 8]
733 storei t0, PayloadOffset[cfr, t1, 8]
737 callSlowPath(_slow_path_create_this)
741 _llint_op_get_callee:
744 loadp PayloadOffset + Callee[cfr], t1
745 loadpFromInstruction(2, t2)
746 bpneq t1, t2, .opGetCalleeSlow
747 storei CellTag, TagOffset[cfr, t0, 8]
748 storei t1, PayloadOffset[cfr, t0, 8]
752 callSlowPath(_slow_path_get_callee)
758 bineq TagOffset[cfr, t0, 8], CellTag, .opToThisSlow
759 loadi PayloadOffset[cfr, t0, 8], t0
760 bbneq JSCell::m_type[t0], FinalObjectType, .opToThisSlow
761 loadpFromInstruction(2, t2)
762 bpneq JSCell::m_structureID[t0], t2, .opToThisSlow
766 callSlowPath(_slow_path_to_this)
770 _llint_op_new_object:
772 loadpFromInstruction(3, t0)
773 loadp ObjectAllocationProfile::m_allocator[t0], t1
774 loadp ObjectAllocationProfile::m_structure[t0], t2
775 allocateJSObject(t1, t2, t0, t3, .opNewObjectSlow)
777 storei CellTag, TagOffset[cfr, t1, 8]
778 storei t0, PayloadOffset[cfr, t1, 8]
782 callSlowPath(_llint_slow_path_new_object)
790 loadConstantOrVariable(t1, t2, t3)
791 storei t2, TagOffset[cfr, t0, 8]
792 storei t3, PayloadOffset[cfr, t0, 8]
796 macro notifyWrite(set, valueTag, valuePayload, scratch, slow)
797 loadb VariableWatchpointSet::m_state[set], scratch
798 bieq scratch, IsInvalidated, .done
799 bineq valuePayload, VariableWatchpointSet::m_inferredValue + PayloadOffset[set], slow
800 bineq valueTag, VariableWatchpointSet::m_inferredValue + TagOffset[set], slow
804 _llint_op_captured_mov:
807 loadConstantOrVariable(t1, t2, t3)
808 loadpFromInstruction(3, t0)
809 btpz t0, .opCapturedMovReady
810 notifyWrite(t0, t2, t3, t1, .opCapturedMovSlow)
813 storei t2, TagOffset[cfr, t0, 8]
814 storei t3, PayloadOffset[cfr, t0, 8]
818 callSlowPath(_slow_path_captured_mov)
826 loadConstantOrVariable(t0, t2, t3)
827 bineq t2, BooleanTag, .opNotSlow
829 storei t2, TagOffset[cfr, t1, 8]
830 storei t3, PayloadOffset[cfr, t1, 8]
834 callSlowPath(_slow_path_not)
842 loadConstantOrVariable(t2, t3, t1)
843 loadConstantOrVariable2Reg(t0, t2, t0)
844 bineq t2, t3, .opEqSlow
845 bieq t2, CellTag, .opEqSlow
846 bib t2, LowestTag, .opEqSlow
849 storei BooleanTag, TagOffset[cfr, t2, 8]
850 storei t0, PayloadOffset[cfr, t2, 8]
854 callSlowPath(_slow_path_eq)
862 assertNotConstant(t0)
863 loadi TagOffset[cfr, t0, 8], t1
864 loadi PayloadOffset[cfr, t0, 8], t0
865 bineq t1, CellTag, .opEqNullImmediate
866 btbnz JSCell::m_flags[t0], MasqueradesAsUndefined, .opEqNullMasqueradesAsUndefined
868 jmp .opEqNullNotImmediate
869 .opEqNullMasqueradesAsUndefined:
870 loadp JSCell::m_structureID[t0], t1
871 loadp CodeBlock[cfr], t0
872 loadp CodeBlock::m_globalObject[t0], t0
873 cpeq Structure::m_globalObject[t1], t0, t1
874 jmp .opEqNullNotImmediate
877 cieq t1, UndefinedTag, t1
879 .opEqNullNotImmediate:
880 storei BooleanTag, TagOffset[cfr, t3, 8]
881 storei t1, PayloadOffset[cfr, t3, 8]
889 loadConstantOrVariable(t2, t3, t1)
890 loadConstantOrVariable2Reg(t0, t2, t0)
891 bineq t2, t3, .opNeqSlow
892 bieq t2, CellTag, .opNeqSlow
893 bib t2, LowestTag, .opNeqSlow
896 storei BooleanTag, TagOffset[cfr, t2, 8]
897 storei t0, PayloadOffset[cfr, t2, 8]
901 callSlowPath(_slow_path_neq)
909 assertNotConstant(t0)
910 loadi TagOffset[cfr, t0, 8], t1
911 loadi PayloadOffset[cfr, t0, 8], t0
912 bineq t1, CellTag, .opNeqNullImmediate
913 btbnz JSCell::m_flags[t0], MasqueradesAsUndefined, .opNeqNullMasqueradesAsUndefined
915 jmp .opNeqNullNotImmediate
916 .opNeqNullMasqueradesAsUndefined:
917 loadp JSCell::m_structureID[t0], t1
918 loadp CodeBlock[cfr], t0
919 loadp CodeBlock::m_globalObject[t0], t0
920 cpneq Structure::m_globalObject[t1], t0, t1
921 jmp .opNeqNullNotImmediate
923 cineq t1, NullTag, t2
924 cineq t1, UndefinedTag, t1
926 .opNeqNullNotImmediate:
927 storei BooleanTag, TagOffset[cfr, t3, 8]
928 storei t1, PayloadOffset[cfr, t3, 8]
932 macro strictEq(equalityOperation, slowPath)
935 loadConstantOrVariable(t2, t3, t1)
936 loadConstantOrVariable2Reg(t0, t2, t0)
938 bib t2, LowestTag, .slow
939 bineq t2, CellTag, .notString
940 bbneq JSCell::m_type[t0], StringType, .notString
941 bbeq JSCell::m_type[t1], StringType, .slow
944 equalityOperation(t0, t1, t0)
945 storei BooleanTag, TagOffset[cfr, t2, 8]
946 storei t0, PayloadOffset[cfr, t2, 8]
950 callSlowPath(slowPath)
956 strictEq(macro (left, right, result) cieq left, right, result end, _slow_path_stricteq)
961 strictEq(macro (left, right, result) cineq left, right, result end, _slow_path_nstricteq)
967 bineq TagOffset[cfr, t0, 8], Int32Tag, .opIncSlow
968 loadi PayloadOffset[cfr, t0, 8], t1
969 baddio 1, t1, .opIncSlow
970 storei t1, PayloadOffset[cfr, t0, 8]
974 callSlowPath(_slow_path_inc)
981 bineq TagOffset[cfr, t0, 8], Int32Tag, .opDecSlow
982 loadi PayloadOffset[cfr, t0, 8], t1
983 bsubio 1, t1, .opDecSlow
984 storei t1, PayloadOffset[cfr, t0, 8]
988 callSlowPath(_slow_path_dec)
996 loadConstantOrVariable(t0, t2, t3)
997 bieq t2, Int32Tag, .opToNumberIsInt
998 biaeq t2, LowestTag, .opToNumberSlow
1000 storei t2, TagOffset[cfr, t1, 8]
1001 storei t3, PayloadOffset[cfr, t1, 8]
1005 callSlowPath(_slow_path_to_number)
1013 loadConstantOrVariable(t0, t1, t2)
1014 bineq t1, Int32Tag, .opNegateSrcNotInt
1015 btiz t2, 0x7fffffff, .opNegateSlow
1017 storei Int32Tag, TagOffset[cfr, t3, 8]
1018 storei t2, PayloadOffset[cfr, t3, 8]
1021 bia t1, LowestTag, .opNegateSlow
1023 storei t1, TagOffset[cfr, t3, 8]
1024 storei t2, PayloadOffset[cfr, t3, 8]
1028 callSlowPath(_slow_path_negate)
1032 macro binaryOpCustomStore(integerOperationAndStore, doubleOperation, slowPath)
1035 loadConstantOrVariable(t2, t3, t1)
1036 loadConstantOrVariable2Reg(t0, t2, t0)
1037 bineq t2, Int32Tag, .op1NotInt
1038 bineq t3, Int32Tag, .op2NotInt
1040 integerOperationAndStore(t3, t1, t0, .slow, t2)
1044 # First operand is definitely not an int, the second operand could be anything.
1045 bia t2, LowestTag, .slow
1046 bib t3, LowestTag, .op1NotIntOp2Double
1047 bineq t3, Int32Tag, .slow
1050 .op1NotIntOp2Double:
1055 doubleOperation(ft1, ft0)
1056 stored ft0, [cfr, t1, 8]
1060 # First operand is definitely an int, the second operand is definitely not.
1062 bia t3, LowestTag, .slow
1065 doubleOperation(ft1, ft0)
1066 stored ft0, [cfr, t2, 8]
1070 callSlowPath(slowPath)
1074 macro binaryOp(integerOperation, doubleOperation, slowPath)
1075 binaryOpCustomStore(
1076 macro (int32Tag, left, right, slow, index)
1077 integerOperation(left, right, slow)
1078 storei int32Tag, TagOffset[cfr, index, 8]
1079 storei right, PayloadOffset[cfr, index, 8]
1081 doubleOperation, slowPath)
1087 macro (left, right, slow) baddio left, right, slow end,
1088 macro (left, right) addd left, right end,
1094 binaryOpCustomStore(
1095 macro (int32Tag, left, right, slow, index)
1096 const scratch = int32Tag # We know that we can reuse the int32Tag register since it has a constant.
1098 bmulio left, scratch, slow
1099 btinz scratch, .done
1103 storei Int32Tag, TagOffset[cfr, index, 8]
1104 storei scratch, PayloadOffset[cfr, index, 8]
1106 macro (left, right) muld left, right end,
1113 macro (left, right, slow) bsubio left, right, slow end,
1114 macro (left, right) subd left, right end,
1120 binaryOpCustomStore(
1121 macro (int32Tag, left, right, slow, index)
1125 bcd2i ft1, right, .notInt
1126 storei int32Tag, TagOffset[cfr, index, 8]
1127 storei right, PayloadOffset[cfr, index, 8]
1130 stored ft1, [cfr, index, 8]
1133 macro (left, right) divd left, right end,
1137 macro bitOp(operation, slowPath, advance)
1140 loadConstantOrVariable(t2, t3, t1)
1141 loadConstantOrVariable2Reg(t0, t2, t0)
1142 bineq t3, Int32Tag, .slow
1143 bineq t2, Int32Tag, .slow
1146 storei t3, TagOffset[cfr, t2, 8]
1147 storei t0, PayloadOffset[cfr, t2, 8]
1151 callSlowPath(slowPath)
1158 macro (left, right) lshifti left, right end,
1166 macro (left, right) rshifti left, right end,
1174 macro (left, right) urshifti left, right end,
1183 loadConstantOrVariablePayload(t1, Int32Tag, t2, .opUnsignedSlow)
1184 bilt t2, 0, .opUnsignedSlow
1185 storei t2, PayloadOffset[cfr, t0, 8]
1186 storei Int32Tag, TagOffset[cfr, t0, 8]
1189 callSlowPath(_slow_path_unsigned)
1196 macro (left, right) andi left, right end,
1204 macro (left, right) xori left, right end,
1212 macro (left, right) ori left, right end,
1217 _llint_op_check_has_instance:
1220 loadConstantOrVariablePayload(t1, CellTag, t0, .opCheckHasInstanceSlow)
1221 btbz JSCell::m_flags[t0], ImplementsDefaultHasInstance, .opCheckHasInstanceSlow
1224 .opCheckHasInstanceSlow:
1225 callSlowPath(_llint_slow_path_check_has_instance)
1229 _llint_op_instanceof:
1231 # Actually do the work.
1234 loadConstantOrVariablePayload(t0, CellTag, t1, .opInstanceofSlow)
1235 bbb JSCell::m_type[t1], ObjectType, .opInstanceofSlow
1237 loadConstantOrVariablePayload(t0, CellTag, t2, .opInstanceofSlow)
1239 # Register state: t1 = prototype, t2 = value
1242 loadp JSCell::m_structureID[t2], t2
1243 loadi Structure::m_prototype + PayloadOffset[t2], t2
1244 bpeq t2, t1, .opInstanceofDone
1245 btinz t2, .opInstanceofLoop
1249 storei BooleanTag, TagOffset[cfr, t3, 8]
1250 storei t0, PayloadOffset[cfr, t3, 8]
1254 callSlowPath(_llint_slow_path_instanceof)
1258 _llint_op_is_undefined:
1262 loadConstantOrVariable(t1, t2, t3)
1263 storei BooleanTag, TagOffset[cfr, t0, 8]
1264 bieq t2, CellTag, .opIsUndefinedCell
1265 cieq t2, UndefinedTag, t3
1266 storei t3, PayloadOffset[cfr, t0, 8]
1269 btbnz JSCell::m_flags[t3], MasqueradesAsUndefined, .opIsUndefinedMasqueradesAsUndefined
1271 storei t1, PayloadOffset[cfr, t0, 8]
1273 .opIsUndefinedMasqueradesAsUndefined:
1274 loadp JSCell::m_structureID[t3], t1
1275 loadp CodeBlock[cfr], t3
1276 loadp CodeBlock::m_globalObject[t3], t3
1277 cpeq Structure::m_globalObject[t1], t3, t1
1278 storei t1, PayloadOffset[cfr, t0, 8]
1282 _llint_op_is_boolean:
1286 loadConstantOrVariableTag(t1, t0)
1287 cieq t0, BooleanTag, t0
1288 storei BooleanTag, TagOffset[cfr, t2, 8]
1289 storei t0, PayloadOffset[cfr, t2, 8]
1293 _llint_op_is_number:
1297 loadConstantOrVariableTag(t1, t0)
1298 storei BooleanTag, TagOffset[cfr, t2, 8]
1300 cib t0, LowestTag + 1, t1
1301 storei t1, PayloadOffset[cfr, t2, 8]
1305 _llint_op_is_string:
1309 loadConstantOrVariable(t1, t0, t3)
1310 storei BooleanTag, TagOffset[cfr, t2, 8]
1311 bineq t0, CellTag, .opIsStringNotCell
1312 cbeq JSCell::m_type[t3], StringType, t1
1313 storei t1, PayloadOffset[cfr, t2, 8]
1316 storep 0, PayloadOffset[cfr, t2, 8]
1320 macro loadPropertyAtVariableOffsetKnownNotInline(propertyOffset, objectAndStorage, tag, payload)
1321 assert(macro (ok) bigteq propertyOffset, firstOutOfLineOffset, ok end)
1323 loadp JSObject::m_butterfly[objectAndStorage], objectAndStorage
1324 loadi TagOffset + (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffset, 8], tag
1325 loadi PayloadOffset + (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffset, 8], payload
1328 macro loadPropertyAtVariableOffset(propertyOffset, objectAndStorage, tag, payload)
1329 bilt propertyOffset, firstOutOfLineOffset, .isInline
1330 loadp JSObject::m_butterfly[objectAndStorage], objectAndStorage
1334 addp sizeof JSObject - (firstOutOfLineOffset - 2) * 8, objectAndStorage
1336 loadi TagOffset + (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffset, 8], tag
1337 loadi PayloadOffset + (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffset, 8], payload
1340 macro storePropertyAtVariableOffset(propertyOffsetAsInt, objectAndStorage, tag, payload)
1341 bilt propertyOffsetAsInt, firstOutOfLineOffset, .isInline
1342 loadp JSObject::m_butterfly[objectAndStorage], objectAndStorage
1343 negi propertyOffsetAsInt
1346 addp sizeof JSObject - (firstOutOfLineOffset - 2) * 8, objectAndStorage
1348 storei tag, TagOffset + (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffsetAsInt, 8]
1349 storei payload, PayloadOffset + (firstOutOfLineOffset - 2) * 8[objectAndStorage, propertyOffsetAsInt, 8]
1353 _llint_op_init_global_const:
1355 writeBarrierOnGlobalObject(2)
1358 loadConstantOrVariable(t1, t2, t3)
1359 storei t2, TagOffset[t0]
1360 storei t3, PayloadOffset[t0]
1364 # We only do monomorphic get_by_id caching for now, and we do not modify the
1365 # opcode. We do, however, allow for the cache to change anytime if fails, since
1366 # ping-ponging is free. At best we get lucky and the get_by_id will continue
1367 # to take fast path on the new cache. At worst we take slow path, which is what
1368 # we would have been doing anyway.
1370 macro getById(getPropertyStorage)
1374 loadConstantOrVariablePayload(t0, CellTag, t3, .opGetByIdSlow)
1379 macro (propertyStorage, scratch)
1380 bpneq JSCell::m_structureID[t3], t1, .opGetByIdSlow
1382 loadi TagOffset[propertyStorage, t2], scratch
1383 loadi PayloadOffset[propertyStorage, t2], t2
1384 storei scratch, TagOffset[cfr, t1, 8]
1385 storei t2, PayloadOffset[cfr, t1, 8]
1386 valueProfile(scratch, t2, 32, t1)
1391 callSlowPath(_llint_slow_path_get_by_id)
1395 _llint_op_get_by_id:
1396 getById(withInlineStorage)
1399 _llint_op_get_by_id_out_of_line:
1400 getById(withOutOfLineStorage)
1403 _llint_op_get_array_length:
1407 loadConstantOrVariablePayload(t0, CellTag, t3, .opGetArrayLengthSlow)
1409 arrayProfile(t2, t1, t0)
1410 btiz t2, IsArray, .opGetArrayLengthSlow
1411 btiz t2, IndexingShapeMask, .opGetArrayLengthSlow
1413 loadp JSObject::m_butterfly[t3], t0
1414 loadi -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0], t0
1415 bilt t0, 0, .opGetArrayLengthSlow
1416 valueProfile(Int32Tag, t0, 32, t2)
1417 storep t0, PayloadOffset[cfr, t1, 8]
1418 storep Int32Tag, TagOffset[cfr, t1, 8]
1421 .opGetArrayLengthSlow:
1422 callSlowPath(_llint_slow_path_get_by_id)
1426 _llint_op_get_arguments_length:
1430 bineq TagOffset[cfr, t0, 8], EmptyValueTag, .opGetArgumentsLengthSlow
1431 loadi ArgumentCount + PayloadOffset[cfr], t2
1433 storei Int32Tag, TagOffset[cfr, t1, 8]
1434 storei t2, PayloadOffset[cfr, t1, 8]
1437 .opGetArgumentsLengthSlow:
1438 callSlowPath(_llint_slow_path_get_arguments_length)
1442 macro putById(getPropertyStorage)
1444 writeBarrierOnOperands(1, 3)
1447 loadConstantOrVariablePayload(t3, CellTag, t0, .opPutByIdSlow)
1452 macro (propertyStorage, scratch)
1453 bpneq JSCell::m_structureID[t0], t1, .opPutByIdSlow
1455 loadConstantOrVariable2Reg(t2, scratch, t2)
1456 storei scratch, TagOffset[propertyStorage, t1]
1457 storei t2, PayloadOffset[propertyStorage, t1]
1462 callSlowPath(_llint_slow_path_put_by_id)
1466 _llint_op_put_by_id:
1467 putById(withInlineStorage)
1470 _llint_op_put_by_id_out_of_line:
1471 putById(withOutOfLineStorage)
1474 macro putByIdTransition(additionalChecks, getPropertyStorage)
1476 writeBarrierOnOperand(1)
1479 loadConstantOrVariablePayload(t3, CellTag, t0, .opPutByIdSlow)
1481 bpneq JSCell::m_structureID[t0], t1, .opPutByIdSlow
1482 additionalChecks(t1, t3, .opPutByIdSlow)
1487 macro (propertyStorage, scratch)
1488 addp t1, propertyStorage, t3
1489 loadConstantOrVariable2Reg(t2, t1, t2)
1490 storei t1, TagOffset[t3]
1492 storei t2, PayloadOffset[t3]
1493 storep t1, JSCell::m_structureID[t0]
1498 callSlowPath(_llint_slow_path_put_by_id)
1502 macro noAdditionalChecks(oldStructure, scratch, slowPath)
1505 macro structureChainChecks(oldStructure, scratch, slowPath)
1506 const protoCell = oldStructure # Reusing the oldStructure register for the proto
1508 loadp 28[PC], scratch
1509 assert(macro (ok) btpnz scratch, ok end)
1510 loadp StructureChain::m_vector[scratch], scratch
1511 assert(macro (ok) btpnz scratch, ok end)
1512 bieq Structure::m_prototype + TagOffset[oldStructure], NullTag, .done
1514 loadi Structure::m_prototype + PayloadOffset[oldStructure], protoCell
1515 loadp JSCell::m_structureID[protoCell], oldStructure
1516 bpneq oldStructure, [scratch], slowPath
1518 bineq Structure::m_prototype + TagOffset[oldStructure], NullTag, .loop
1522 _llint_op_put_by_id_transition_direct:
1523 putByIdTransition(noAdditionalChecks, withInlineStorage)
1526 _llint_op_put_by_id_transition_direct_out_of_line:
1527 putByIdTransition(noAdditionalChecks, withOutOfLineStorage)
1530 _llint_op_put_by_id_transition_normal:
1531 putByIdTransition(structureChainChecks, withInlineStorage)
1534 _llint_op_put_by_id_transition_normal_out_of_line:
1535 putByIdTransition(structureChainChecks, withOutOfLineStorage)
1538 _llint_op_get_by_val:
1541 loadConstantOrVariablePayload(t2, CellTag, t0, .opGetByValSlow)
1544 arrayProfile(t2, t3, t1)
1546 loadConstantOrVariablePayload(t3, Int32Tag, t1, .opGetByValSlow)
1547 loadp JSObject::m_butterfly[t0], t3
1548 andi IndexingShapeMask, t2
1549 bieq t2, Int32Shape, .opGetByValIsContiguous
1550 bineq t2, ContiguousShape, .opGetByValNotContiguous
1551 .opGetByValIsContiguous:
1553 biaeq t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t3], .opGetByValOutOfBounds
1554 loadi TagOffset[t3, t1, 8], t2
1555 loadi PayloadOffset[t3, t1, 8], t1
1558 .opGetByValNotContiguous:
1559 bineq t2, DoubleShape, .opGetByValNotDouble
1560 biaeq t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t3], .opGetByValOutOfBounds
1561 loadd [t3, t1, 8], ft0
1562 bdnequn ft0, ft0, .opGetByValSlow
1563 # FIXME: This could be massively optimized.
1566 jmp .opGetByValNotEmpty
1568 .opGetByValNotDouble:
1569 subi ArrayStorageShape, t2
1570 bia t2, SlowPutArrayStorageShape - ArrayStorageShape, .opGetByValSlow
1571 biaeq t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.vectorLength[t3], .opGetByValOutOfBounds
1572 loadi ArrayStorage::m_vector + TagOffset[t3, t1, 8], t2
1573 loadi ArrayStorage::m_vector + PayloadOffset[t3, t1, 8], t1
1577 bieq t2, EmptyValueTag, .opGetByValOutOfBounds
1578 .opGetByValNotEmpty:
1579 storei t2, TagOffset[cfr, t0, 8]
1580 storei t1, PayloadOffset[cfr, t0, 8]
1581 valueProfile(t2, t1, 20, t0)
1584 .opGetByValOutOfBounds:
1585 loadpFromInstruction(4, t0)
1586 storeb 1, ArrayProfile::m_outOfBounds[t0]
1588 callSlowPath(_llint_slow_path_get_by_val)
1592 _llint_op_get_argument_by_val:
1593 # FIXME: At some point we should array profile this. Right now it isn't necessary
1594 # since the DFG will never turn a get_argument_by_val into a GetByVal.
1598 bineq TagOffset[cfr, t0, 8], EmptyValueTag, .opGetArgumentByValSlow
1599 loadConstantOrVariablePayload(t1, Int32Tag, t2, .opGetArgumentByValSlow)
1601 loadi ArgumentCount + PayloadOffset[cfr], t1
1602 biaeq t2, t1, .opGetArgumentByValSlow
1604 loadi ThisArgumentOffset + TagOffset[cfr, t2, 8], t0
1605 loadi ThisArgumentOffset + PayloadOffset[cfr, t2, 8], t1
1606 storei t0, TagOffset[cfr, t3, 8]
1607 storei t1, PayloadOffset[cfr, t3, 8]
1608 valueProfile(t0, t1, 20, t2)
1611 .opGetArgumentByValSlow:
1612 callSlowPath(_llint_slow_path_get_argument_by_val)
1616 _llint_op_get_by_pname:
1619 loadConstantOrVariablePayload(t0, CellTag, t1, .opGetByPnameSlow)
1621 bpneq t1, PayloadOffset[cfr, t0, 8], .opGetByPnameSlow
1623 loadConstantOrVariablePayload(t0, CellTag, t2, .opGetByPnameSlow)
1625 loadi PayloadOffset[cfr, t0, 8], t3
1626 loadp JSCell::m_structureID[t2], t0
1627 bpneq t0, JSPropertyNameIterator::m_cachedStructure[t3], .opGetByPnameSlow
1629 loadi [cfr, t0, 8], t0
1631 biaeq t0, JSPropertyNameIterator::m_numCacheableSlots[t3], .opGetByPnameSlow
1632 bilt t0, JSPropertyNameIterator::m_cachedStructureInlineCapacity[t3], .opGetByPnameInlineProperty
1633 addi firstOutOfLineOffset, t0
1634 subi JSPropertyNameIterator::m_cachedStructureInlineCapacity[t3], t0
1635 .opGetByPnameInlineProperty:
1636 loadPropertyAtVariableOffset(t0, t2, t1, t3)
1638 storei t1, TagOffset[cfr, t0, 8]
1639 storei t3, PayloadOffset[cfr, t0, 8]
1643 callSlowPath(_llint_slow_path_get_by_pname)
1647 macro contiguousPutByVal(storeCallback)
1648 biaeq t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0], .outOfBounds
1651 storeCallback(t2, t1, t0, t3)
1655 biaeq t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.vectorLength[t0], .opPutByValOutOfBounds
1657 storeb 1, ArrayProfile::m_mayStoreToHole[t2]
1659 storei t2, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0]
1663 macro putByVal(slowPath)
1665 writeBarrierOnOperands(1, 3)
1667 loadConstantOrVariablePayload(t0, CellTag, t1, .opPutByValSlow)
1670 arrayProfile(t2, t3, t0)
1672 loadConstantOrVariablePayload(t0, Int32Tag, t3, .opPutByValSlow)
1673 loadp JSObject::m_butterfly[t1], t0
1674 andi IndexingShapeMask, t2
1675 bineq t2, Int32Shape, .opPutByValNotInt32
1677 macro (operand, scratch, base, index)
1678 loadConstantOrVariablePayload(operand, Int32Tag, scratch, .opPutByValSlow)
1679 storei Int32Tag, TagOffset[base, index, 8]
1680 storei scratch, PayloadOffset[base, index, 8]
1683 .opPutByValNotInt32:
1684 bineq t2, DoubleShape, .opPutByValNotDouble
1686 macro (operand, scratch, base, index)
1688 const payload = operand
1689 loadConstantOrVariable2Reg(operand, tag, payload)
1690 bineq tag, Int32Tag, .notInt
1694 fii2d payload, tag, ft0
1695 bdnequn ft0, ft0, .opPutByValSlow
1697 stored ft0, [base, index, 8]
1700 .opPutByValNotDouble:
1701 bineq t2, ContiguousShape, .opPutByValNotContiguous
1703 macro (operand, scratch, base, index)
1705 const payload = operand
1706 loadConstantOrVariable2Reg(operand, tag, payload)
1707 storei tag, TagOffset[base, index, 8]
1708 storei payload, PayloadOffset[base, index, 8]
1711 .opPutByValNotContiguous:
1712 bineq t2, ArrayStorageShape, .opPutByValSlow
1713 biaeq t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.vectorLength[t0], .opPutByValOutOfBounds
1714 bieq ArrayStorage::m_vector + TagOffset[t0, t3, 8], EmptyValueTag, .opPutByValArrayStorageEmpty
1715 .opPutByValArrayStorageStoreResult:
1717 loadConstantOrVariable2Reg(t2, t1, t2)
1718 storei t1, ArrayStorage::m_vector + TagOffset[t0, t3, 8]
1719 storei t2, ArrayStorage::m_vector + PayloadOffset[t0, t3, 8]
1722 .opPutByValArrayStorageEmpty:
1724 storeb 1, ArrayProfile::m_mayStoreToHole[t1]
1725 addi 1, ArrayStorage::m_numValuesInVector[t0]
1726 bib t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0], .opPutByValArrayStorageStoreResult
1728 storei t1, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0]
1729 jmp .opPutByValArrayStorageStoreResult
1731 .opPutByValOutOfBounds:
1732 loadpFromInstruction(4, t0)
1733 storeb 1, ArrayProfile::m_outOfBounds[t0]
1735 callSlowPath(slowPath)
1739 _llint_op_put_by_val:
1740 putByVal(_llint_slow_path_put_by_val)
1742 _llint_op_put_by_val_direct:
1743 putByVal(_llint_slow_path_put_by_val_direct)
1747 dispatchBranch(4[PC])
1750 macro jumpTrueOrFalse(conditionOp, slow)
1752 loadConstantOrVariablePayload(t1, BooleanTag, t0, .slow)
1753 conditionOp(t0, .target)
1757 dispatchBranch(8[PC])
1765 macro equalNull(cellHandler, immediateHandler)
1767 assertNotConstant(t0)
1768 loadi TagOffset[cfr, t0, 8], t1
1769 loadi PayloadOffset[cfr, t0, 8], t0
1770 bineq t1, CellTag, .immediate
1771 loadp JSCell::m_structureID[t0], t2
1772 cellHandler(t2, JSCell::m_flags[t0], .target)
1776 dispatchBranch(8[PC])
1780 immediateHandler(t1, .target)
1787 macro (structure, value, target)
1788 btbz value, MasqueradesAsUndefined, .opJeqNullNotMasqueradesAsUndefined
1789 loadp CodeBlock[cfr], t0
1790 loadp CodeBlock::m_globalObject[t0], t0
1791 bpeq Structure::m_globalObject[structure], t0, target
1792 .opJeqNullNotMasqueradesAsUndefined:
1794 macro (value, target) bieq value, NullTag, target end)
1797 _llint_op_jneq_null:
1800 macro (structure, value, target)
1801 btbz value, MasqueradesAsUndefined, target
1802 loadp CodeBlock[cfr], t0
1803 loadp CodeBlock::m_globalObject[t0], t0
1804 bpneq Structure::m_globalObject[structure], t0, target
1806 macro (value, target) bineq value, NullTag, target end)
1813 loadp CodeBlock[cfr], t2
1814 loadp CodeBlock::m_globalObject[t2], t2
1815 bineq TagOffset[cfr, t0, 8], CellTag, .opJneqPtrBranch
1816 loadp JSGlobalObject::m_specialPointers[t2, t1, 4], t1
1817 bpeq PayloadOffset[cfr, t0, 8], t1, .opJneqPtrFallThrough
1819 dispatchBranch(12[PC])
1820 .opJneqPtrFallThrough:
1824 macro compare(integerCompare, doubleCompare, slowPath)
1827 loadConstantOrVariable(t2, t0, t1)
1828 loadConstantOrVariable2Reg(t3, t2, t3)
1829 bineq t0, Int32Tag, .op1NotInt
1830 bineq t2, Int32Tag, .op2NotInt
1831 integerCompare(t1, t3, .jumpTarget)
1835 bia t0, LowestTag, .slow
1836 bib t2, LowestTag, .op1NotIntOp2Double
1837 bineq t2, Int32Tag, .slow
1840 .op1NotIntOp2Double:
1844 doubleCompare(ft0, ft1, .jumpTarget)
1849 bia t2, LowestTag, .slow
1851 doubleCompare(ft0, ft1, .jumpTarget)
1855 dispatchBranch(12[PC])
1858 callSlowPath(slowPath)
1863 _llint_op_switch_imm:
1867 loadConstantOrVariable(t2, t1, t0)
1868 loadp CodeBlock[cfr], t2
1869 loadp CodeBlock::m_rareData[t2], t2
1870 muli sizeof SimpleJumpTable, t3 # FIXME: would be nice to peephole this!
1871 loadp CodeBlock::RareData::m_switchJumpTables + VectorBufferOffset[t2], t2
1873 bineq t1, Int32Tag, .opSwitchImmNotInt
1874 subi SimpleJumpTable::min[t2], t0
1875 biaeq t0, SimpleJumpTable::branchOffsets + VectorSizeOffset[t2], .opSwitchImmFallThrough
1876 loadp SimpleJumpTable::branchOffsets + VectorBufferOffset[t2], t3
1877 loadi [t3, t0, 4], t1
1878 btiz t1, .opSwitchImmFallThrough
1879 dispatchBranchWithOffset(t1)
1882 bib t1, LowestTag, .opSwitchImmSlow # Go to slow path if it's a double.
1883 .opSwitchImmFallThrough:
1884 dispatchBranch(8[PC])
1887 callSlowPath(_llint_slow_path_switch_imm)
1891 _llint_op_switch_char:
1895 loadConstantOrVariable(t2, t1, t0)
1896 loadp CodeBlock[cfr], t2
1897 loadp CodeBlock::m_rareData[t2], t2
1898 muli sizeof SimpleJumpTable, t3
1899 loadp CodeBlock::RareData::m_switchJumpTables + VectorBufferOffset[t2], t2
1901 bineq t1, CellTag, .opSwitchCharFallThrough
1902 bbneq JSCell::m_type[t0], StringType, .opSwitchCharFallThrough
1903 bineq JSString::m_length[t0], 1, .opSwitchCharFallThrough
1904 loadp JSString::m_value[t0], t0
1905 btpz t0, .opSwitchOnRope
1906 loadp StringImpl::m_data8[t0], t1
1907 btinz StringImpl::m_hashAndFlags[t0], HashFlags8BitBuffer, .opSwitchChar8Bit
1909 jmp .opSwitchCharReady
1913 subi SimpleJumpTable::min[t2], t0
1914 biaeq t0, SimpleJumpTable::branchOffsets + VectorSizeOffset[t2], .opSwitchCharFallThrough
1915 loadp SimpleJumpTable::branchOffsets + VectorBufferOffset[t2], t2
1916 loadi [t2, t0, 4], t1
1917 btiz t1, .opSwitchCharFallThrough
1918 dispatchBranchWithOffset(t1)
1920 .opSwitchCharFallThrough:
1921 dispatchBranch(8[PC])
1924 callSlowPath(_llint_slow_path_switch_char)
1930 btiz 12[PC], .opNewFuncUnchecked
1932 bineq TagOffset[cfr, t1, 8], EmptyValueTag, .opNewFuncDone
1933 .opNewFuncUnchecked:
1934 callSlowPath(_llint_slow_path_new_func)
1939 _llint_op_new_captured_func:
1941 callSlowPath(_slow_path_new_captured_func)
1945 macro arrayProfileForCall()
1948 bineq ThisArgumentOffset + TagOffset[cfr, t3, 8], CellTag, .done
1949 loadi ThisArgumentOffset + PayloadOffset[cfr, t3, 8], t0
1950 loadp JSCell::m_structureID[t0], t0
1951 loadpFromInstruction(CallOpCodeSize - 2, t1)
1952 storep t0, ArrayProfile::m_lastSeenStructureID[t1]
1956 macro doCall(slowPath)
1959 loadp LLIntCallLinkInfo::callee[t1], t2
1960 loadConstantOrVariablePayload(t0, CellTag, t3, .opCallSlow)
1961 bineq t3, t2, .opCallSlow
1965 addp cfr, t3 # t3 contains the new value of cfr
1966 loadp JSFunction::m_scope[t2], t0
1967 storei t2, Callee + PayloadOffset[t3]
1968 storei t0, ScopeChain + PayloadOffset[t3]
1970 storei PC, ArgumentCount + TagOffset[cfr]
1971 storei t2, ArgumentCount + PayloadOffset[t3]
1972 storei CellTag, Callee + TagOffset[t3]
1973 storei CellTag, ScopeChain + TagOffset[t3]
1974 addp CallerFrameAndPCSize, t3
1975 callTargetFunction(t1, t3)
1978 slowPathForCall(slowPath)
1982 _llint_op_tear_off_activation:
1985 bieq TagOffset[cfr, t0, 8], EmptyValueTag, .opTearOffActivationNotCreated
1986 callSlowPath(_llint_slow_path_tear_off_activation)
1987 .opTearOffActivationNotCreated:
1991 _llint_op_tear_off_arguments:
1994 addi 1, t0 # Get the unmodifiedArgumentsRegister
1995 bieq TagOffset[cfr, t0, 8], EmptyValueTag, .opTearOffArgumentsNotCreated
1996 callSlowPath(_llint_slow_path_tear_off_arguments)
1997 .opTearOffArgumentsNotCreated:
2003 checkSwitchToJITForEpilogue()
2005 loadConstantOrVariable(t2, t1, t0)
2009 _llint_op_ret_object_or_this:
2011 checkSwitchToJITForEpilogue()
2013 loadConstantOrVariable(t2, t1, t0)
2014 bineq t1, CellTag, .opRetObjectOrThisNotObject
2015 bbb JSCell::m_type[t0], ObjectType, .opRetObjectOrThisNotObject
2018 .opRetObjectOrThisNotObject:
2020 loadConstantOrVariable(t2, t1, t0)
2024 _llint_op_to_primitive:
2028 loadConstantOrVariable(t2, t1, t0)
2029 bineq t1, CellTag, .opToPrimitiveIsImm
2030 bbneq JSCell::m_type[t0], StringType, .opToPrimitiveSlowCase
2031 .opToPrimitiveIsImm:
2032 storei t1, TagOffset[cfr, t3, 8]
2033 storei t0, PayloadOffset[cfr, t3, 8]
2036 .opToPrimitiveSlowCase:
2037 callSlowPath(_slow_path_to_primitive)
2041 _llint_op_next_pname:
2045 loadi PayloadOffset[cfr, t1, 8], t0
2046 bieq t0, PayloadOffset[cfr, t2, 8], .opNextPnameEnd
2048 loadi PayloadOffset[cfr, t2, 8], t2
2049 loadp JSPropertyNameIterator::m_jsStrings[t2], t3
2050 loadi [t3, t0, 8], t3
2052 storei t0, PayloadOffset[cfr, t1, 8]
2054 storei CellTag, TagOffset[cfr, t1, 8]
2055 storei t3, PayloadOffset[cfr, t1, 8]
2057 loadi PayloadOffset[cfr, t3, 8], t3
2058 loadp JSCell::m_structureID[t3], t1
2059 bpneq t1, JSPropertyNameIterator::m_cachedStructure[t2], .opNextPnameSlow
2060 loadp JSPropertyNameIterator::m_cachedPrototypeChain[t2], t0
2061 loadp StructureChain::m_vector[t0], t0
2062 btpz [t0], .opNextPnameTarget
2063 .opNextPnameCheckPrototypeLoop:
2064 bieq Structure::m_prototype + TagOffset[t1], NullTag, .opNextPnameSlow
2065 loadp Structure::m_prototype + PayloadOffset[t1], t2
2066 loadp JSCell::m_structureID[t2], t1
2067 bpneq t1, [t0], .opNextPnameSlow
2069 btpnz [t0], .opNextPnameCheckPrototypeLoop
2071 dispatchBranch(24[PC])
2077 callSlowPath(_llint_slow_path_next_pname) # This either keeps the PC where it was (causing us to loop) or sets it to target.
2082 # This is where we end up from the JIT's throw trampoline (because the
2083 # machine code return address will be set to _llint_op_catch), and from
2084 # the interpreter's throw trampoline (see _llint_throw_trampoline).
2085 # The throwing code must have known that we were throwing to the interpreter,
2086 # and have set VM::targetInterpreterPCForThrow.
2087 loadp ScopeChain + PayloadOffset[cfr], t3
2088 andp MarkedBlockMask, t3
2089 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
2090 loadp VM::callFrameForThrow[t3], cfr
2091 restoreStackPointerAfterCall()
2093 loadi VM::targetInterpreterPCForThrow[t3], PC
2094 loadi VM::m_exception + PayloadOffset[t3], t0
2095 loadi VM::m_exception + TagOffset[t3], t1
2096 storei 0, VM::m_exception + PayloadOffset[t3]
2097 storei EmptyValueTag, VM::m_exception + TagOffset[t3]
2099 storei t0, PayloadOffset[cfr, t2, 8]
2100 storei t1, TagOffset[cfr, t2, 8]
2101 traceExecution() # This needs to be here because we don't want to clobber t0, t1, t2, t3 above.
2105 # Gives you the scope in t0, while allowing you to optionally perform additional checks on the
2106 # scopes as they are traversed. scopeCheck() is called with two arguments: the register
2107 # holding the scope, and a register that can be used for scratch. Note that this does not
2108 # use t3, so you can hold stuff in t3 if need be.
2109 macro getDeBruijnScope(deBruijinIndexOperand, scopeCheck)
2110 loadp ScopeChain + PayloadOffset[cfr], t0
2111 loadi deBruijinIndexOperand, t2
2115 loadp CodeBlock[cfr], t1
2116 bineq CodeBlock::m_codeType[t1], FunctionCode, .loop
2117 btbz CodeBlock::m_needsActivation[t1], .loop
2119 loadi CodeBlock::m_activationRegister[t1], t1
2121 # Need to conditionally skip over one scope.
2122 bieq TagOffset[cfr, t1, 8], EmptyValueTag, .noActivation
2124 loadp JSScope::m_next[t0], t0
2131 loadp JSScope::m_next[t0], t0
2141 checkSwitchToJITForEpilogue()
2143 assertNotConstant(t0)
2144 loadi TagOffset[cfr, t0, 8], t1
2145 loadi PayloadOffset[cfr, t0, 8], t0
2149 _llint_throw_from_slow_path_trampoline:
2150 callSlowPath(_llint_slow_path_handle_exception)
2152 # When throwing from the interpreter (i.e. throwing from LLIntSlowPaths), so
2153 # the throw target is not necessarily interpreted code, we come to here.
2154 # This essentially emulates the JIT's throwing protocol.
2155 loadp CodeBlock[cfr], t1
2156 loadp CodeBlock::m_vm[t1], t1
2157 jmp VM::targetMachinePCForThrow[t1]
2160 _llint_throw_during_call_trampoline:
2161 preserveReturnAddressAfterCall(t2)
2162 jmp _llint_throw_from_slow_path_trampoline
2165 macro nativeCallTrampoline(executableOffsetToFunction)
2168 storep 0, CodeBlock[cfr]
2169 loadp CallerFrame[cfr], t0
2170 loadi ScopeChain + PayloadOffset[t0], t1
2171 storei CellTag, ScopeChain + TagOffset[cfr]
2172 storei t1, ScopeChain + PayloadOffset[cfr]
2174 subp 8, sp # align stack pointer
2175 andp MarkedBlockMask, t1
2176 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t1], t3
2177 storep cfr, VM::topCallFrame[t3]
2178 move cfr, t2 # t2 = ecx
2180 loadi Callee + PayloadOffset[cfr], t1
2181 loadp JSFunction::m_executable[t1], t1
2182 checkStackPointerAlignment(t3, 0xdead0001)
2183 call executableOffsetToFunction[t1]
2184 loadp ScopeChain[cfr], t3
2185 andp MarkedBlockMask, t3
2186 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
2188 elsif ARM or ARMv7 or ARMv7_TRADITIONAL or C_LOOP or MIPS or SH4
2189 subp 8, sp # align stack pointer
2190 # t1 already contains the ScopeChain.
2191 andp MarkedBlockMask, t1
2192 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t1], t1
2193 storep cfr, VM::topCallFrame[t1]
2199 loadi Callee + PayloadOffset[cfr], t1
2200 loadp JSFunction::m_executable[t1], t1
2201 checkStackPointerAlignment(t3, 0xdead0001)
2203 cloopCallNative executableOffsetToFunction[t1]
2205 call executableOffsetToFunction[t1]
2207 loadp ScopeChain[cfr], t3
2208 andp MarkedBlockMask, t3
2209 loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
2216 bineq VM::m_exception + TagOffset[t3], EmptyValueTag, .handleException
2220 storep cfr, VM::topCallFrame[t3]
2221 restoreStackPointerAfterCall()
2222 jmp _llint_throw_from_slow_path_trampoline
2226 macro getGlobalObject(dst)
2227 loadp CodeBlock[cfr], t0
2228 loadp CodeBlock::m_globalObject[t0], t0
2229 loadisFromInstruction(dst, t1)
2230 storei CellTag, TagOffset[cfr, t1, 8]
2231 storei t0, PayloadOffset[cfr, t1, 8]
2234 macro varInjectionCheck(slowPath)
2235 loadp CodeBlock[cfr], t0
2236 loadp CodeBlock::m_globalObject[t0], t0
2237 loadp JSGlobalObject::m_varInjectionWatchpoint[t0], t0
2238 bbeq WatchpointSet::m_state[t0], IsInvalidated, slowPath
2241 macro resolveScope()
2242 loadp CodeBlock[cfr], t0
2243 loadisFromInstruction(4, t2)
2244 btbz CodeBlock::m_needsActivation[t0], .resolveScopeAfterActivationCheck
2245 loadis CodeBlock::m_activationRegister[t0], t1
2246 btpz PayloadOffset[cfr, t1, 8], .resolveScopeAfterActivationCheck
2249 .resolveScopeAfterActivationCheck:
2250 loadp ScopeChain[cfr], t0
2251 btiz t2, .resolveScopeLoopEnd
2254 loadp JSScope::m_next[t0], t0
2256 btinz t2, .resolveScopeLoop
2258 .resolveScopeLoopEnd:
2259 loadisFromInstruction(1, t1)
2260 storei CellTag, TagOffset[cfr, t1, 8]
2261 storei t0, PayloadOffset[cfr, t1, 8]
2265 _llint_op_resolve_scope:
2267 loadisFromInstruction(3, t0)
2270 bineq t0, GlobalProperty, .rGlobalVar
2275 bineq t0, GlobalVar, .rClosureVar
2280 bineq t0, ClosureVar, .rGlobalPropertyWithVarInjectionChecks
2284 .rGlobalPropertyWithVarInjectionChecks:
2285 bineq t0, GlobalPropertyWithVarInjectionChecks, .rGlobalVarWithVarInjectionChecks
2286 varInjectionCheck(.rDynamic)
2290 .rGlobalVarWithVarInjectionChecks:
2291 bineq t0, GlobalVarWithVarInjectionChecks, .rClosureVarWithVarInjectionChecks
2292 varInjectionCheck(.rDynamic)
2296 .rClosureVarWithVarInjectionChecks:
2297 bineq t0, ClosureVarWithVarInjectionChecks, .rDynamic
2298 varInjectionCheck(.rDynamic)
2303 callSlowPath(_llint_slow_path_resolve_scope)
2307 macro loadWithStructureCheck(operand, slowPath)
2308 loadisFromInstruction(operand, t0)
2309 loadp [cfr, t0, 8], t0
2310 loadpFromInstruction(5, t1)
2311 bpneq JSCell::m_structureID[t0], t1, slowPath
2315 loadisFromInstruction(6, t3)
2316 loadPropertyAtVariableOffset(t3, t0, t1, t2)
2317 valueProfile(t1, t2, 28, t0)
2318 loadisFromInstruction(1, t0)
2319 storei t1, TagOffset[cfr, t0, 8]
2320 storei t2, PayloadOffset[cfr, t0, 8]
2323 macro getGlobalVar()
2324 loadpFromInstruction(6, t0)
2325 loadp TagOffset[t0], t1
2326 loadp PayloadOffset[t0], t2
2327 valueProfile(t1, t2, 28, t0)
2328 loadisFromInstruction(1, t0)
2329 storei t1, TagOffset[cfr, t0, 8]
2330 storei t2, PayloadOffset[cfr, t0, 8]
2333 macro getClosureVar()
2334 loadp JSVariableObject::m_registers[t0], t0
2335 loadisFromInstruction(6, t3)
2336 loadp TagOffset[t0, t3, 8], t1
2337 loadp PayloadOffset[t0, t3, 8], t2
2338 valueProfile(t1, t2, 28, t0)
2339 loadisFromInstruction(1, t0)
2340 storei t1, TagOffset[cfr, t0, 8]
2341 storei t2, PayloadOffset[cfr, t0, 8]
2344 _llint_op_get_from_scope:
2346 loadisFromInstruction(4, t0)
2347 andi ResolveModeMask, t0
2350 bineq t0, GlobalProperty, .gGlobalVar
2351 loadWithStructureCheck(2, .gDynamic)
2356 bineq t0, GlobalVar, .gClosureVar
2361 bineq t0, ClosureVar, .gGlobalPropertyWithVarInjectionChecks
2362 loadVariable(2, t2, t1, t0)
2366 .gGlobalPropertyWithVarInjectionChecks:
2367 bineq t0, GlobalPropertyWithVarInjectionChecks, .gGlobalVarWithVarInjectionChecks
2368 loadWithStructureCheck(2, .gDynamic)
2372 .gGlobalVarWithVarInjectionChecks:
2373 bineq t0, GlobalVarWithVarInjectionChecks, .gClosureVarWithVarInjectionChecks
2374 varInjectionCheck(.gDynamic)
2375 loadVariable(2, t2, t1, t0)
2379 .gClosureVarWithVarInjectionChecks:
2380 bineq t0, ClosureVarWithVarInjectionChecks, .gDynamic
2381 varInjectionCheck(.gDynamic)
2382 loadVariable(2, t2, t1, t0)
2387 callSlowPath(_llint_slow_path_get_from_scope)
2392 loadisFromInstruction(3, t1)
2393 loadConstantOrVariable(t1, t2, t3)
2394 loadisFromInstruction(6, t1)
2395 storePropertyAtVariableOffset(t1, t0, t2, t3)
2398 macro putGlobalVar()
2399 loadisFromInstruction(3, t0)
2400 loadConstantOrVariable(t0, t1, t2)
2401 loadpFromInstruction(5, t3)
2402 notifyWrite(t3, t1, t2, t0, .pDynamic)
2403 loadpFromInstruction(6, t0)
2404 storei t1, TagOffset[t0]
2405 storei t2, PayloadOffset[t0]
2408 macro putClosureVar()
2409 loadisFromInstruction(3, t1)
2410 loadConstantOrVariable(t1, t2, t3)
2411 loadp JSVariableObject::m_registers[t0], t0
2412 loadisFromInstruction(6, t1)
2413 storei t2, TagOffset[t0, t1, 8]
2414 storei t3, PayloadOffset[t0, t1, 8]
2418 _llint_op_put_to_scope:
2420 loadisFromInstruction(4, t0)
2421 andi ResolveModeMask, t0
2424 bineq t0, GlobalProperty, .pGlobalVar
2425 writeBarrierOnOperands(1, 3)
2426 loadWithStructureCheck(1, .pDynamic)
2431 bineq t0, GlobalVar, .pClosureVar
2432 writeBarrierOnGlobalObject(3)
2437 bineq t0, ClosureVar, .pGlobalPropertyWithVarInjectionChecks
2438 writeBarrierOnOperands(1, 3)
2439 loadVariable(1, t2, t1, t0)
2443 .pGlobalPropertyWithVarInjectionChecks:
2444 bineq t0, GlobalPropertyWithVarInjectionChecks, .pGlobalVarWithVarInjectionChecks
2445 writeBarrierOnOperands(1, 3)
2446 loadWithStructureCheck(1, .pDynamic)
2450 .pGlobalVarWithVarInjectionChecks:
2451 bineq t0, GlobalVarWithVarInjectionChecks, .pClosureVarWithVarInjectionChecks
2452 writeBarrierOnGlobalObject(3)
2453 varInjectionCheck(.pDynamic)
2457 .pClosureVarWithVarInjectionChecks:
2458 bineq t0, ClosureVarWithVarInjectionChecks, .pDynamic
2459 writeBarrierOnOperands(1, 3)
2460 varInjectionCheck(.pDynamic)
2461 loadVariable(1, t2, t1, t0)
2466 callSlowPath(_llint_slow_path_put_to_scope)