-# Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved.
+# Copyright (C) 2011-2015 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
move t0, PC
end
-macro doCallToJavaScript(makeCall)
+macro doVMEntry(makeCall)
if X86 or X86_WIN
const entry = t4
const vm = t3
const protoCallFrame = t5
- const previousCFR = t0
- const previousPC = t1
- const temp1 = t0 # Same as previousCFR
- const temp2 = t1 # Same as previousPC
+ const temp1 = t0
+ const temp2 = t1
const temp3 = t2
const temp4 = t3 # same as vm
elsif ARM or ARMv7 or ARMv7_TRADITIONAL or C_LOOP
const vm = a1
const protoCallFrame = a2
- const previousCFR = t3
- const previousPC = lr
- const temp1 = t3 # Same as previousCFR
+ const temp1 = t3
const temp2 = t4
const temp3 = t5
const temp4 = t4 # Same as temp2
elsif MIPS
const entry = a0
- const vmTopCallFrame = a1
+ const vm = a1
const protoCallFrame = a2
- const topOfStack = a3
- const previousCFR = t2
- const previousPC = lr
const temp1 = t3
const temp2 = t5
const temp3 = t4
const vm = a1
const protoCallFrame = a2
- const previousCFR = t3
- const previousPC = lr
- const temp1 = t3 # Same as previousCFR
+ const temp1 = t3
const temp2 = a3
const temp3 = t8
const temp4 = t9
end
- callToJavaScriptPrologue()
+ functionPrologue()
+ pushCalleeSaves()
- if X86
- loadp 36[sp], vm
- loadp 32[sp], entry
- elsif X86_WIN
- loadp 40[sp, temp3], vm
- loadp 36[sp, temp3], entry
- else
- move cfr, previousCFR
+ if X86 or X86_WIN
+ loadp 12[cfr], vm
+ loadp 8[cfr], entry
end
- checkStackPointerAlignment(temp2, 0xbad0dc01)
+ if ARMv7
+ vmEntryRecord(cfr, temp1)
+ move temp1, sp
+ else
+ vmEntryRecord(cfr, sp)
+ end
- # The stack reserved zone ensures that we have adequate space for the
- # VMEntrySentinelFrame. Proceed with allocating and initializing the
- # sentinel frame.
- move sp, cfr
- subp CallFrameHeaderSlots * 8, cfr
- storep 0, ArgumentCount[cfr]
- storep vm, Callee[cfr]
+ storep vm, VMEntryRecord::m_vm[sp]
loadp VM::topCallFrame[vm], temp2
- storep temp2, ScopeChain[cfr]
- storep 1, CodeBlock[cfr]
- if X86
- loadp 28[sp], previousPC
- loadp 24[sp], previousCFR
- elsif X86_WIN
- loadp 32[sp, temp3], previousPC
- loadp 28[sp, temp3], previousCFR
+ storep temp2, VMEntryRecord::m_prevTopCallFrame[sp]
+ loadp VM::topVMEntryFrame[vm], temp2
+ storep temp2, VMEntryRecord::m_prevTopVMEntryFrame[sp]
+
+ # Align stack pointer
+ if X86_WIN
+ addp CallFrameAlignSlots * SlotSize, sp, temp1
+ andp ~StackAlignmentMask, temp1
+ subp temp1, CallFrameAlignSlots * SlotSize, sp
+ elsif ARM or ARMv7 or ARMv7_TRADITIONAL
+ addp CallFrameAlignSlots * SlotSize, sp, temp1
+ clrbp temp1, StackAlignmentMask, temp1
+ if ARMv7
+ subp temp1, CallFrameAlignSlots * SlotSize, temp1
+ move temp1, sp
+ else
+ subp temp1, CallFrameAlignSlots * SlotSize, sp
+ end
end
- storep previousPC, ReturnPC[cfr]
- storep previousCFR, CallerFrame[cfr]
- if X86
- loadp 40[sp], protoCallFrame
- elsif X86_WIN
- loadp 44[sp, temp3], protoCallFrame
+ if X86 or X86_WIN
+ loadp 16[cfr], protoCallFrame
end
loadi ProtoCallFrame::paddedArgCount[protoCallFrame], temp2
addp CallFrameHeaderSlots, temp2, temp2
lshiftp 3, temp2
- subp cfr, temp2, temp1
+ subp sp, temp2, temp1
# Ensure that we have enough additional stack capacity for the incoming args,
# and the frame for the JS code we're executing. We need to do this check
# before we start copying the args from the protoCallFrame below.
bpaeq temp1, VM::m_jsStackLimit[vm], .stackHeightOK
- if ARMv7
- subp cfr, 8, temp2
- move temp2, sp
- else
- subp cfr, 8, sp
- end
-
if C_LOOP
move entry, temp2
move vm, temp3
move temp3, vm
end
+ subp 8, sp # Align stack for cCall2() to make a call.
cCall2(_llint_throw_stack_overflow_error, vm, protoCallFrame)
- callToJavaScriptEpilogue()
+
+ if ARMv7
+ vmEntryRecord(cfr, temp1)
+ move temp1, sp
+ else
+ vmEntryRecord(cfr, sp)
+ end
+
+ loadp VMEntryRecord::m_vm[sp], temp3
+ loadp VMEntryRecord::m_prevTopCallFrame[sp], temp4
+ storep temp4, VM::topCallFrame[temp3]
+ loadp VMEntryRecord::m_prevTopVMEntryFrame[sp], temp4
+ storep temp4, VM::topVMEntryFrame[temp3]
+
+ if ARMv7
+ subp cfr, CalleeRegisterSaveSize, temp3
+ move temp3, sp
+ else
+ subp cfr, CalleeRegisterSaveSize, sp
+ end
+
+ popCalleeSaves()
+ functionEpilogue()
ret
.stackHeightOK:
move temp1, sp
- move 5, temp1
+ move 4, temp1
.copyHeaderLoop:
subi 1, temp1
.copyArgsDone:
storep sp, VM::topCallFrame[vm]
+ storep cfr, VM::topVMEntryFrame[vm]
makeCall(entry, temp1, temp2)
- bpeq CodeBlock[cfr], 1, .calleeFramePopped
- loadp CallerFrame[cfr], cfr
+ if ARMv7
+ vmEntryRecord(cfr, temp1)
+ move temp1, sp
+ else
+ vmEntryRecord(cfr, sp)
+ end
-.calleeFramePopped:
- loadp Callee[cfr], temp3 # VM
- loadp ScopeChain[cfr], temp4 # previous topCallFrame
+ loadp VMEntryRecord::m_vm[sp], temp3
+ loadp VMEntryRecord::m_prevTopCallFrame[sp], temp4
storep temp4, VM::topCallFrame[temp3]
+ loadp VMEntryRecord::m_prevTopVMEntryFrame[sp], temp4
+ storep temp4, VM::topVMEntryFrame[temp3]
+
+ if ARMv7
+ subp cfr, CalleeRegisterSaveSize, temp3
+ move temp3, sp
+ else
+ subp cfr, CalleeRegisterSaveSize, sp
+ end
- callToJavaScriptEpilogue()
+ popCalleeSaves()
+ functionEpilogue()
ret
end
macro makeHostFunctionCall(entry, temp1, temp2)
move entry, temp1
+ storep cfr, [sp]
if C_LOOP
move sp, a0
- storep cfr, [sp]
storep lr, PtrSize[sp]
cloopCallNative temp1
+ elsif X86 or X86_WIN
+ # Put callee frame pointer on stack as arg0, also put it in ecx for "fastcall" targets
+ move 0, temp2
+ move temp2, 4[sp] # put 0 in ReturnPC
+ move sp, t2 # t2 is ecx
+ push temp2 # Push dummy arg1
+ push t2
+ call temp1
+ addp 8, sp
else
- if X86 or X86_WIN
- # Put callee frame pointer on stack as arg0, also put it in ecx for "fastcall" targets
- move 0, temp2
- move temp2, 4[sp] # put 0 in ReturnPC
- move cfr, [sp] # put caller frame pointer into callee frame since callee prologue can't
- move sp, t2 # t2 is ecx
- push temp2 # Push dummy arg1
- push t2
- else
- move sp, a0
- addp CallerFrameAndPCSize, sp
- end
+ move sp, a0
call temp1
- if X86 or X86_WIN
- addp 8, sp
- else
- subp CallerFrameAndPCSize, sp
- end
end
end
_handleUncaughtException:
- loadp ScopeChain + PayloadOffset[cfr], t3
+ loadp Callee + PayloadOffset[cfr], t3
andp MarkedBlockMask, t3
loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
loadp VM::callFrameForThrow[t3], cfr
- # So far, we've unwound the stack to the frame just below the sentinel frame, except
- # in the case of stack overflow in the first function called from callToJavaScript.
- # Check if we need to pop to the sentinel frame and do the necessary clean up for
- # returning to the caller C frame.
- bpeq CodeBlock[cfr], 1, .handleUncaughtExceptionAlreadyIsSentinel
- loadp CallerFrame + PayloadOffset[cfr], cfr
-.handleUncaughtExceptionAlreadyIsSentinel:
+ loadp CallerFrame[cfr], cfr
+
+ if ARMv7
+ vmEntryRecord(cfr, t3)
+ move t3, sp
+ else
+ vmEntryRecord(cfr, sp)
+ end
- loadp Callee + PayloadOffset[cfr], t3 # VM
- loadp ScopeChain + PayloadOffset[cfr], t5 # previous topCallFrame
+ loadp VMEntryRecord::m_vm[sp], t3
+ loadp VMEntryRecord::m_prevTopCallFrame[sp], t5
storep t5, VM::topCallFrame[t3]
+ loadp VMEntryRecord::m_prevTopVMEntryFrame[sp], t5
+ storep t5, VM::topVMEntryFrame[t3]
+
+ if ARMv7
+ subp cfr, CalleeRegisterSaveSize, t3
+ move t3, sp
+ else
+ subp cfr, CalleeRegisterSaveSize, sp
+ end
- callToJavaScriptEpilogue()
+ popCalleeSaves()
+ functionEpilogue()
ret
macro doReturnFromHostFunction(extraStackSpace)
if GGC
loadisFromInstruction(cellOperand, t1)
loadConstantOrVariablePayload(t1, CellTag, t2, .writeBarrierDone)
- checkMarkByte(t2, t1, t3,
+ skipIfIsRememberedOrInEden(t2, t1, t3,
macro(gcData)
btbnz gcData, .writeBarrierDone
push cfr, PC
loadp CodeBlock[cfr], t3
loadp CodeBlock::m_globalObject[t3], t3
- checkMarkByte(t3, t1, t2,
+ skipIfIsRememberedOrInEden(t3, t1, t2,
macro(gcData)
btbnz gcData, .writeBarrierDone
push cfr, PC
end
macro branchIfException(label)
- loadp ScopeChain[cfr], t3
+ loadp Callee + PayloadOffset[cfr], t3
andp MarkedBlockMask, t3
loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
- bieq VM::m_exception + TagOffset[t3], EmptyValueTag, .noException
+ btiz VM::m_exception[t3], .noException
jmp label
.noException:
end
dispatch(1)
-_llint_op_create_activation:
+_llint_op_create_lexical_environment:
traceExecution()
- loadi 4[PC], t0
- bineq TagOffset[cfr, t0, 8], EmptyValueTag, .opCreateActivationDone
- callSlowPath(_llint_slow_path_create_activation)
-.opCreateActivationDone:
- dispatch(2)
-
-
-_llint_op_init_lazy_reg:
- traceExecution()
- loadi 4[PC], t0
- storei EmptyValueTag, TagOffset[cfr, t0, 8]
- storei 0, PayloadOffset[cfr, t0, 8]
- dispatch(2)
+ callSlowPath(_llint_slow_path_create_lexical_environment)
+ dispatch(3)
-_llint_op_create_arguments:
+_llint_op_get_scope:
traceExecution()
- loadi 4[PC], t0
- bineq TagOffset[cfr, t0, 8], EmptyValueTag, .opCreateArgumentsDone
- callSlowPath(_slow_path_create_arguments)
-.opCreateArgumentsDone:
+ loadi Callee + PayloadOffset[cfr], t0
+ loadi JSCallee::m_scope[t0], t0
+ loadisFromInstruction(1, t1)
+ storei CellTag, TagOffset[cfr, t1, 8]
+ storei t0, PayloadOffset[cfr, t1, 8]
dispatch(2)
traceExecution()
loadi 8[PC], t0
loadp PayloadOffset[cfr, t0, 8], t0
- loadp JSFunction::m_allocationProfile + ObjectAllocationProfile::m_allocator[t0], t1
- loadp JSFunction::m_allocationProfile + ObjectAllocationProfile::m_structure[t0], t2
+ loadp JSFunction::m_rareData[t0], t4
+ btpz t4, .opCreateThisSlow
+ loadp FunctionRareData::m_allocationProfile + ObjectAllocationProfile::m_allocator[t4], t1
+ loadp FunctionRareData::m_allocationProfile + ObjectAllocationProfile::m_structure[t4], t2
btpz t1, .opCreateThisSlow
+ loadpFromInstruction(4, t4)
+ bpeq t4, 1, .hasSeenMultipleCallee
+ bpneq t4, t0, .opCreateThisSlow
+.hasSeenMultipleCallee:
allocateJSObject(t1, t2, t0, t3, .opCreateThisSlow)
loadi 4[PC], t1
storei CellTag, TagOffset[cfr, t1, 8]
storei t0, PayloadOffset[cfr, t1, 8]
- dispatch(4)
+ dispatch(5)
.opCreateThisSlow:
callSlowPath(_slow_path_create_this)
- dispatch(4)
-
-
-_llint_op_get_callee:
- traceExecution()
- loadi 4[PC], t0
- loadp PayloadOffset + Callee[cfr], t1
- loadpFromInstruction(2, t2)
- bpneq t1, t2, .opGetCalleeSlow
- storei CellTag, TagOffset[cfr, t0, 8]
- storei t1, PayloadOffset[cfr, t0, 8]
- dispatch(3)
+ dispatch(5)
-.opGetCalleeSlow:
- callSlowPath(_slow_path_get_callee)
- dispatch(3)
_llint_op_to_this:
traceExecution()
bbneq JSCell::m_type[t0], FinalObjectType, .opToThisSlow
loadpFromInstruction(2, t2)
bpneq JSCell::m_structureID[t0], t2, .opToThisSlow
- dispatch(3)
+ dispatch(4)
.opToThisSlow:
callSlowPath(_slow_path_to_this)
- dispatch(3)
+ dispatch(4)
_llint_op_new_object:
dispatch(4)
-_llint_op_mov:
+_llint_op_check_tdz:
traceExecution()
- loadi 8[PC], t1
- loadi 4[PC], t0
- loadConstantOrVariable(t1, t2, t3)
- storei t2, TagOffset[cfr, t0, 8]
- storei t3, PayloadOffset[cfr, t0, 8]
- dispatch(3)
+ loadpFromInstruction(1, t0)
+ bineq TagOffset[cfr, t0, 8], EmptyValueTag, .opNotTDZ
+ callSlowPath(_slow_path_throw_tdz_error)
+.opNotTDZ:
+ dispatch(2)
-macro notifyWrite(set, valueTag, valuePayload, scratch, slow)
- loadb VariableWatchpointSet::m_state[set], scratch
- bieq scratch, IsInvalidated, .done
- bineq valuePayload, VariableWatchpointSet::m_inferredValue + PayloadOffset[set], slow
- bineq valueTag, VariableWatchpointSet::m_inferredValue + TagOffset[set], slow
-.done:
-end
-_llint_op_captured_mov:
+_llint_op_mov:
traceExecution()
loadi 8[PC], t1
- loadConstantOrVariable(t1, t2, t3)
- loadpFromInstruction(3, t0)
- btpz t0, .opCapturedMovReady
- notifyWrite(t0, t2, t3, t1, .opCapturedMovSlow)
-.opCapturedMovReady:
loadi 4[PC], t0
+ loadConstantOrVariable(t1, t2, t3)
storei t2, TagOffset[cfr, t0, 8]
storei t3, PayloadOffset[cfr, t0, 8]
- dispatch(4)
-
-.opCapturedMovSlow:
- callSlowPath(_slow_path_captured_mov)
- dispatch(4)
+ dispatch(3)
_llint_op_not:
loadConstantOrVariable2Reg(t0, t2, t0)
bineq t2, t3, .slow
bib t2, LowestTag, .slow
- bineq t2, CellTag, .notString
- bbneq JSCell::m_type[t0], StringType, .notString
- bbeq JSCell::m_type[t1], StringType, .slow
-.notString:
+ bineq t2, CellTag, .notStringOrSymbol
+ bbaeq JSCell::m_type[t0], ObjectType, .notStringOrSymbol
+ bbb JSCell::m_type[t1], ObjectType, .slow
+.notStringOrSymbol:
loadi 4[PC], t2
equalityOperation(t0, t1, t0)
storei BooleanTag, TagOffset[cfr, t2, 8]
dispatch(3)
+_llint_op_to_string:
+ traceExecution()
+ loadi 8[PC], t0
+ loadi 4[PC], t1
+ loadConstantOrVariable(t0, t2, t3)
+ bineq t2, CellTag, .opToStringSlow
+ bbneq JSCell::m_type[t3], StringType, .opToStringSlow
+.opToStringIsString:
+ storei t2, TagOffset[cfr, t1, 8]
+ storei t3, PayloadOffset[cfr, t1, 8]
+ dispatch(3)
+
+.opToStringSlow:
+ callSlowPath(_slow_path_to_string)
+ dispatch(3)
+
+
_llint_op_negate:
traceExecution()
loadi 8[PC], t0
dispatch(3)
+_llint_op_is_object:
+ traceExecution()
+ loadi 8[PC], t1
+ loadi 4[PC], t2
+ loadConstantOrVariable(t1, t0, t3)
+ storei BooleanTag, TagOffset[cfr, t2, 8]
+ bineq t0, CellTag, .opIsObjectNotCell
+ cbaeq JSCell::m_type[t3], ObjectType, t1
+ storei t1, PayloadOffset[cfr, t2, 8]
+ dispatch(3)
+.opIsObjectNotCell:
+ storep 0, PayloadOffset[cfr, t2, 8]
+ dispatch(3)
+
+
macro loadPropertyAtVariableOffsetKnownNotInline(propertyOffset, objectAndStorage, tag, payload)
assert(macro (ok) bigteq propertyOffset, firstOutOfLineOffset, ok end)
negi propertyOffset
dispatch(9)
-_llint_op_get_arguments_length:
- traceExecution()
- loadi 8[PC], t0
- loadi 4[PC], t1
- bineq TagOffset[cfr, t0, 8], EmptyValueTag, .opGetArgumentsLengthSlow
- loadi ArgumentCount + PayloadOffset[cfr], t2
- subi 1, t2
- storei Int32Tag, TagOffset[cfr, t1, 8]
- storei t2, PayloadOffset[cfr, t1, 8]
- dispatch(4)
-
-.opGetArgumentsLengthSlow:
- callSlowPath(_llint_slow_path_get_arguments_length)
- dispatch(4)
-
-
macro putById(getPropertyStorage)
traceExecution()
writeBarrierOnOperands(1, 3)
dispatch(6)
-_llint_op_get_argument_by_val:
- # FIXME: At some point we should array profile this. Right now it isn't necessary
- # since the DFG will never turn a get_argument_by_val into a GetByVal.
- traceExecution()
- loadi 8[PC], t0
- loadi 12[PC], t1
- bineq TagOffset[cfr, t0, 8], EmptyValueTag, .opGetArgumentByValSlow
- loadConstantOrVariablePayload(t1, Int32Tag, t2, .opGetArgumentByValSlow)
- addi 1, t2
- loadi ArgumentCount + PayloadOffset[cfr], t1
- biaeq t2, t1, .opGetArgumentByValSlow
- loadi 4[PC], t3
- loadi ThisArgumentOffset + TagOffset[cfr, t2, 8], t0
- loadi ThisArgumentOffset + PayloadOffset[cfr, t2, 8], t1
- storei t0, TagOffset[cfr, t3, 8]
- storei t1, PayloadOffset[cfr, t3, 8]
- valueProfile(t0, t1, 20, t2)
- dispatch(6)
-
-.opGetArgumentByValSlow:
- callSlowPath(_llint_slow_path_get_argument_by_val)
- dispatch(6)
-
-
-_llint_op_get_by_pname:
- traceExecution()
- loadi 12[PC], t0
- loadConstantOrVariablePayload(t0, CellTag, t1, .opGetByPnameSlow)
- loadi 16[PC], t0
- bpneq t1, PayloadOffset[cfr, t0, 8], .opGetByPnameSlow
- loadi 8[PC], t0
- loadConstantOrVariablePayload(t0, CellTag, t2, .opGetByPnameSlow)
- loadi 20[PC], t0
- loadi PayloadOffset[cfr, t0, 8], t3
- loadp JSCell::m_structureID[t2], t0
- bpneq t0, JSPropertyNameIterator::m_cachedStructure[t3], .opGetByPnameSlow
- loadi 24[PC], t0
- loadi [cfr, t0, 8], t0
- subi 1, t0
- biaeq t0, JSPropertyNameIterator::m_numCacheableSlots[t3], .opGetByPnameSlow
- bilt t0, JSPropertyNameIterator::m_cachedStructureInlineCapacity[t3], .opGetByPnameInlineProperty
- addi firstOutOfLineOffset, t0
- subi JSPropertyNameIterator::m_cachedStructureInlineCapacity[t3], t0
-.opGetByPnameInlineProperty:
- loadPropertyAtVariableOffset(t0, t2, t1, t3)
- loadi 4[PC], t0
- storei t1, TagOffset[cfr, t0, 8]
- storei t3, PayloadOffset[cfr, t0, 8]
- dispatch(7)
-
-.opGetByPnameSlow:
- callSlowPath(_llint_slow_path_get_by_pname)
- dispatch(7)
-
-
macro contiguousPutByVal(storeCallback)
biaeq t3, -sizeof IndexingHeader + IndexingHeader::u.lengths.publicLength[t0], .outOfBounds
.storeResult:
dispatch(0)
-_llint_op_new_func:
- traceExecution()
- btiz 12[PC], .opNewFuncUnchecked
- loadi 4[PC], t1
- bineq TagOffset[cfr, t1, 8], EmptyValueTag, .opNewFuncDone
-.opNewFuncUnchecked:
- callSlowPath(_llint_slow_path_new_func)
-.opNewFuncDone:
- dispatch(4)
-
-
-_llint_op_new_captured_func:
- traceExecution()
- callSlowPath(_slow_path_new_captured_func)
- dispatch(4)
-
-
macro arrayProfileForCall()
loadi 16[PC], t3
negi t3
lshifti 3, t3
negi t3
addp cfr, t3 # t3 contains the new value of cfr
- loadp JSFunction::m_scope[t2], t0
storei t2, Callee + PayloadOffset[t3]
- storei t0, ScopeChain + PayloadOffset[t3]
loadi 12[PC], t2
storei PC, ArgumentCount + TagOffset[cfr]
storei t2, ArgumentCount + PayloadOffset[t3]
storei CellTag, Callee + TagOffset[t3]
- storei CellTag, ScopeChain + TagOffset[t3]
addp CallerFrameAndPCSize, t3
callTargetFunction(t1, t3)
end
-_llint_op_tear_off_activation:
- traceExecution()
- loadi 4[PC], t0
- bieq TagOffset[cfr, t0, 8], EmptyValueTag, .opTearOffActivationNotCreated
- callSlowPath(_llint_slow_path_tear_off_activation)
-.opTearOffActivationNotCreated:
- dispatch(2)
-
-
-_llint_op_tear_off_arguments:
- traceExecution()
- loadi 4[PC], t0
- addi 1, t0 # Get the unmodifiedArgumentsRegister
- bieq TagOffset[cfr, t0, 8], EmptyValueTag, .opTearOffArgumentsNotCreated
- callSlowPath(_llint_slow_path_tear_off_arguments)
-.opTearOffArgumentsNotCreated:
- dispatch(3)
-
-
_llint_op_ret:
traceExecution()
checkSwitchToJITForEpilogue()
doReturn()
-_llint_op_ret_object_or_this:
- traceExecution()
- checkSwitchToJITForEpilogue()
- loadi 4[PC], t2
- loadConstantOrVariable(t2, t1, t0)
- bineq t1, CellTag, .opRetObjectOrThisNotObject
- bbb JSCell::m_type[t0], ObjectType, .opRetObjectOrThisNotObject
- doReturn()
-
-.opRetObjectOrThisNotObject:
- loadi 8[PC], t2
- loadConstantOrVariable(t2, t1, t0)
- doReturn()
-
-
_llint_op_to_primitive:
traceExecution()
loadi 8[PC], t2
loadi 4[PC], t3
loadConstantOrVariable(t2, t1, t0)
bineq t1, CellTag, .opToPrimitiveIsImm
- bbneq JSCell::m_type[t0], StringType, .opToPrimitiveSlowCase
+ bbaeq JSCell::m_type[t0], ObjectType, .opToPrimitiveSlowCase
.opToPrimitiveIsImm:
storei t1, TagOffset[cfr, t3, 8]
storei t0, PayloadOffset[cfr, t3, 8]
dispatch(3)
-_llint_op_next_pname:
- traceExecution()
- loadi 12[PC], t1
- loadi 16[PC], t2
- loadi PayloadOffset[cfr, t1, 8], t0
- bieq t0, PayloadOffset[cfr, t2, 8], .opNextPnameEnd
- loadi 20[PC], t2
- loadi PayloadOffset[cfr, t2, 8], t2
- loadp JSPropertyNameIterator::m_jsStrings[t2], t3
- loadi [t3, t0, 8], t3
- addi 1, t0
- storei t0, PayloadOffset[cfr, t1, 8]
- loadi 4[PC], t1
- storei CellTag, TagOffset[cfr, t1, 8]
- storei t3, PayloadOffset[cfr, t1, 8]
- loadi 8[PC], t3
- loadi PayloadOffset[cfr, t3, 8], t3
- loadp JSCell::m_structureID[t3], t1
- bpneq t1, JSPropertyNameIterator::m_cachedStructure[t2], .opNextPnameSlow
- loadp JSPropertyNameIterator::m_cachedPrototypeChain[t2], t0
- loadp StructureChain::m_vector[t0], t0
- btpz [t0], .opNextPnameTarget
-.opNextPnameCheckPrototypeLoop:
- bieq Structure::m_prototype + TagOffset[t1], NullTag, .opNextPnameSlow
- loadp Structure::m_prototype + PayloadOffset[t1], t2
- loadp JSCell::m_structureID[t2], t1
- bpneq t1, [t0], .opNextPnameSlow
- addp 4, t0
- btpnz [t0], .opNextPnameCheckPrototypeLoop
-.opNextPnameTarget:
- dispatchBranch(24[PC])
-
-.opNextPnameEnd:
- dispatch(7)
-
-.opNextPnameSlow:
- callSlowPath(_llint_slow_path_next_pname) # This either keeps the PC where it was (causing us to loop) or sets it to target.
- dispatch(0)
-
-
_llint_op_catch:
# This is where we end up from the JIT's throw trampoline (because the
# machine code return address will be set to _llint_op_catch), and from
# the interpreter's throw trampoline (see _llint_throw_trampoline).
# The throwing code must have known that we were throwing to the interpreter,
# and have set VM::targetInterpreterPCForThrow.
- loadp ScopeChain + PayloadOffset[cfr], t3
+ loadp Callee + PayloadOffset[cfr], t3
andp MarkedBlockMask, t3
loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
loadp VM::callFrameForThrow[t3], cfr
+ loadp VM::vmEntryFrameForThrow[t3], t0
+ storep t0, VM::topVMEntryFrame[t3]
restoreStackPointerAfterCall()
loadi VM::targetInterpreterPCForThrow[t3], PC
- loadi VM::m_exception + PayloadOffset[t3], t0
- loadi VM::m_exception + TagOffset[t3], t1
- storei 0, VM::m_exception + PayloadOffset[t3]
- storei EmptyValueTag, VM::m_exception + TagOffset[t3]
+ loadi VM::m_exception[t3], t0
+ storei 0, VM::m_exception[t3]
loadi 4[PC], t2
storei t0, PayloadOffset[cfr, t2, 8]
- storei t1, TagOffset[cfr, t2, 8]
- traceExecution() # This needs to be here because we don't want to clobber t0, t1, t2, t3 above.
- dispatch(2)
-
-
-# Gives you the scope in t0, while allowing you to optionally perform additional checks on the
-# scopes as they are traversed. scopeCheck() is called with two arguments: the register
-# holding the scope, and a register that can be used for scratch. Note that this does not
-# use t3, so you can hold stuff in t3 if need be.
-macro getDeBruijnScope(deBruijinIndexOperand, scopeCheck)
- loadp ScopeChain + PayloadOffset[cfr], t0
- loadi deBruijinIndexOperand, t2
-
- btiz t2, .done
-
- loadp CodeBlock[cfr], t1
- bineq CodeBlock::m_codeType[t1], FunctionCode, .loop
- btbz CodeBlock::m_needsActivation[t1], .loop
-
- loadi CodeBlock::m_activationRegister[t1], t1
-
- # Need to conditionally skip over one scope.
- bieq TagOffset[cfr, t1, 8], EmptyValueTag, .noActivation
- scopeCheck(t0, t1)
- loadp JSScope::m_next[t0], t0
-.noActivation:
- subi 1, t2
+ storei CellTag, TagOffset[cfr, t2, 8]
- btiz t2, .done
-.loop:
- scopeCheck(t0, t1)
- loadp JSScope::m_next[t0], t0
- subi 1, t2
- btinz t2, .loop
-
-.done:
+ loadi Exception::m_value + TagOffset[t0], t1
+ loadi Exception::m_value + PayloadOffset[t0], t0
+ loadi 8[PC], t2
+ storei t0, PayloadOffset[cfr, t2, 8]
+ storei t1, TagOffset[cfr, t2, 8]
-end
+ traceExecution() # This needs to be here because we don't want to clobber t0, t1, t2, t3 above.
+ dispatch(3)
_llint_op_end:
traceExecution()
# When throwing from the interpreter (i.e. throwing from LLIntSlowPaths), so
# the throw target is not necessarily interpreted code, we come to here.
# This essentially emulates the JIT's throwing protocol.
- loadp CodeBlock[cfr], t1
- loadp CodeBlock::m_vm[t1], t1
+ loadp Callee[cfr], t1
+ andp MarkedBlockMask, t1
+ loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t1], t1
jmp VM::targetMachinePCForThrow[t1]
functionPrologue()
storep 0, CodeBlock[cfr]
- loadp CallerFrame[cfr], t0
- loadi ScopeChain + PayloadOffset[t0], t1
- storei CellTag, ScopeChain + TagOffset[cfr]
- storei t1, ScopeChain + PayloadOffset[cfr]
+ loadi Callee + PayloadOffset[cfr], t1
+ // Callee is still in t1 for code below
if X86 or X86_WIN
subp 8, sp # align stack pointer
andp MarkedBlockMask, t1
loadp JSFunction::m_executable[t1], t1
checkStackPointerAlignment(t3, 0xdead0001)
call executableOffsetToFunction[t1]
- loadp ScopeChain[cfr], t3
+ loadp Callee + PayloadOffset[cfr], t3
andp MarkedBlockMask, t3
loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
addp 8, sp
elsif ARM or ARMv7 or ARMv7_TRADITIONAL or C_LOOP or MIPS or SH4
subp 8, sp # align stack pointer
- # t1 already contains the ScopeChain.
+ # t1 already contains the Callee.
andp MarkedBlockMask, t1
loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t1], t1
storep cfr, VM::topCallFrame[t1]
else
call executableOffsetToFunction[t1]
end
- loadp ScopeChain[cfr], t3
+ loadp Callee + PayloadOffset[cfr], t3
andp MarkedBlockMask, t3
loadp MarkedBlock::m_weakSet + WeakSet::m_vm[t3], t3
addp 8, sp
end
functionEpilogue()
- bineq VM::m_exception + TagOffset[t3], EmptyValueTag, .handleException
+ btinz VM::m_exception[t3], .handleException
ret
.handleException:
macro resolveScope()
loadp CodeBlock[cfr], t0
- loadisFromInstruction(4, t2)
- btbz CodeBlock::m_needsActivation[t0], .resolveScopeAfterActivationCheck
- loadis CodeBlock::m_activationRegister[t0], t1
- btpz PayloadOffset[cfr, t1, 8], .resolveScopeAfterActivationCheck
- addi 1, t2
+ loadisFromInstruction(5, t2)
-.resolveScopeAfterActivationCheck:
- loadp ScopeChain[cfr], t0
+ loadisFromInstruction(2, t0)
+ loadp PayloadOffset[cfr, t0, 8], t0
btiz t2, .resolveScopeLoopEnd
.resolveScopeLoop:
_llint_op_resolve_scope:
traceExecution()
- loadisFromInstruction(3, t0)
+ loadisFromInstruction(4, t0)
#rGlobalProperty:
bineq t0, GlobalProperty, .rGlobalVar
getGlobalObject(1)
- dispatch(6)
+ dispatch(7)
.rGlobalVar:
bineq t0, GlobalVar, .rClosureVar
getGlobalObject(1)
- dispatch(6)
+ dispatch(7)
.rClosureVar:
bineq t0, ClosureVar, .rGlobalPropertyWithVarInjectionChecks
resolveScope()
- dispatch(6)
+ dispatch(7)
.rGlobalPropertyWithVarInjectionChecks:
bineq t0, GlobalPropertyWithVarInjectionChecks, .rGlobalVarWithVarInjectionChecks
varInjectionCheck(.rDynamic)
getGlobalObject(1)
- dispatch(6)
+ dispatch(7)
.rGlobalVarWithVarInjectionChecks:
bineq t0, GlobalVarWithVarInjectionChecks, .rClosureVarWithVarInjectionChecks
varInjectionCheck(.rDynamic)
getGlobalObject(1)
- dispatch(6)
+ dispatch(7)
.rClosureVarWithVarInjectionChecks:
bineq t0, ClosureVarWithVarInjectionChecks, .rDynamic
varInjectionCheck(.rDynamic)
resolveScope()
- dispatch(6)
+ dispatch(7)
.rDynamic:
callSlowPath(_llint_slow_path_resolve_scope)
- dispatch(6)
+ dispatch(7)
macro loadWithStructureCheck(operand, slowPath)
loadisFromInstruction(operand, t0)
- loadp [cfr, t0, 8], t0
+ loadp PayloadOffset[cfr, t0, 8], t0
loadpFromInstruction(5, t1)
bpneq JSCell::m_structureID[t0], t1, slowPath
end
end
macro getClosureVar()
- loadp JSVariableObject::m_registers[t0], t0
loadisFromInstruction(6, t3)
- loadp TagOffset[t0, t3, 8], t1
- loadp PayloadOffset[t0, t3, 8], t2
+ loadp JSEnvironmentRecord_variables + TagOffset[t0, t3, 8], t1
+ loadp JSEnvironmentRecord_variables + PayloadOffset[t0, t3, 8], t2
valueProfile(t1, t2, 28, t0)
loadisFromInstruction(1, t0)
storei t1, TagOffset[cfr, t0, 8]
.gGlobalVarWithVarInjectionChecks:
bineq t0, GlobalVarWithVarInjectionChecks, .gClosureVarWithVarInjectionChecks
varInjectionCheck(.gDynamic)
- loadVariable(2, t2, t1, t0)
getGlobalVar()
dispatch(8)
loadisFromInstruction(3, t0)
loadConstantOrVariable(t0, t1, t2)
loadpFromInstruction(5, t3)
- notifyWrite(t3, t1, t2, t0, .pDynamic)
+ notifyWrite(t3, .pDynamic)
loadpFromInstruction(6, t0)
storei t1, TagOffset[t0]
storei t2, PayloadOffset[t0]
macro putClosureVar()
loadisFromInstruction(3, t1)
loadConstantOrVariable(t1, t2, t3)
- loadp JSVariableObject::m_registers[t0], t0
loadisFromInstruction(6, t1)
- storei t2, TagOffset[t0, t1, 8]
- storei t3, PayloadOffset[t0, t1, 8]
+ storei t2, JSEnvironmentRecord_variables + TagOffset[t0, t1, 8]
+ storei t3, JSEnvironmentRecord_variables + PayloadOffset[t0, t1, 8]
+end
+
+macro putLocalClosureVar()
+ loadisFromInstruction(3, t1)
+ loadConstantOrVariable(t1, t2, t3)
+ loadpFromInstruction(5, t4)
+ btpz t4, .noVariableWatchpointSet
+ notifyWrite(t4, .pDynamic)
+.noVariableWatchpointSet:
+ loadisFromInstruction(6, t1)
+ storei t2, JSEnvironmentRecord_variables + TagOffset[t0, t1, 8]
+ storei t3, JSEnvironmentRecord_variables + PayloadOffset[t0, t1, 8]
end
loadisFromInstruction(4, t0)
andi ResolveModeMask, t0
-#pGlobalProperty:
+#pLocalClosureVar:
+ bineq t0, LocalClosureVar, .pGlobalProperty
+ writeBarrierOnOperands(1, 3)
+ loadVariable(1, t2, t1, t0)
+ putLocalClosureVar()
+ dispatch(7)
+
+.pGlobalProperty:
bineq t0, GlobalProperty, .pGlobalVar
writeBarrierOnOperands(1, 3)
loadWithStructureCheck(1, .pDynamic)
.pDynamic:
callSlowPath(_llint_slow_path_put_to_scope)
dispatch(7)
+
+
+_llint_op_get_from_arguments:
+ traceExecution()
+ loadisFromInstruction(2, t0)
+ loadi PayloadOffset[cfr, t0, 8], t0
+ loadi 12[PC], t1
+ loadi DirectArguments_storage + TagOffset[t0, t1, 8], t2
+ loadi DirectArguments_storage + PayloadOffset[t0, t1, 8], t3
+ loadisFromInstruction(1, t1)
+ valueProfile(t2, t3, 16, t0)
+ storei t2, TagOffset[cfr, t1, 8]
+ storei t3, PayloadOffset[cfr, t1, 8]
+ dispatch(5)
+
+
+_llint_op_put_to_arguments:
+ traceExecution()
+ writeBarrierOnOperands(1, 3)
+ loadisFromInstruction(1, t0)
+ loadi PayloadOffset[cfr, t0, 8], t0
+ loadisFromInstruction(3, t1)
+ loadConstantOrVariable(t1, t2, t3)
+ loadi 8[PC], t1
+ storei t2, DirectArguments_storage + TagOffset[t0, t1, 8]
+ storei t3, DirectArguments_storage + PayloadOffset[t0, t1, 8]
+ dispatch(4)
+
+
+_llint_op_profile_type:
+ traceExecution()
+ loadp CodeBlock[cfr], t1
+ loadp CodeBlock::m_vm[t1], t1
+ # t1 is holding the pointer to the typeProfilerLog.
+ loadp VM::m_typeProfilerLog[t1], t1
+
+ # t0 is holding the payload, t4 is holding the tag.
+ loadisFromInstruction(1, t2)
+ loadConstantOrVariable(t2, t4, t0)
+
+ # t2 is holding the pointer to the current log entry.
+ loadp TypeProfilerLog::m_currentLogEntryPtr[t1], t2
+
+ # Store the JSValue onto the log entry.
+ storei t4, TypeProfilerLog::LogEntry::value + TagOffset[t2]
+ storei t0, TypeProfilerLog::LogEntry::value + PayloadOffset[t2]
+
+ # Store the TypeLocation onto the log entry.
+ loadpFromInstruction(2, t3)
+ storep t3, TypeProfilerLog::LogEntry::location[t2]
+
+ bieq t4, CellTag, .opProfileTypeIsCell
+ storei 0, TypeProfilerLog::LogEntry::structureID[t2]
+ jmp .opProfileTypeSkipIsCell
+.opProfileTypeIsCell:
+ loadi JSCell::m_structureID[t0], t3
+ storei t3, TypeProfilerLog::LogEntry::structureID[t2]
+.opProfileTypeSkipIsCell:
+
+ # Increment the current log entry.
+ addp sizeof TypeProfilerLog::LogEntry, t2
+ storep t2, TypeProfilerLog::m_currentLogEntryPtr[t1]
+
+ loadp TypeProfilerLog::m_logEndPtr[t1], t1
+ bpneq t2, t1, .opProfileTypeDone
+ callSlowPath(_slow_path_profile_type_clear_log)
+
+.opProfileTypeDone:
+ dispatch(6)