2 * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "AccessorCallJITStubRoutine.h"
32 #include "BinarySwitch.h"
33 #include "CCallHelpers.h"
34 #include "DFGOperations.h"
35 #include "DFGSpeculativeJIT.h"
36 #include "FTLThunks.h"
37 #include "GCAwareJITStubRoutine.h"
38 #include "GetterSetter.h"
40 #include "JITInlines.h"
41 #include "LinkBuffer.h"
42 #include "JSCInlines.h"
43 #include "PolymorphicGetByIdList.h"
44 #include "PolymorphicPutByIdList.h"
45 #include "RegExpMatchesArray.h"
46 #include "RepatchBuffer.h"
47 #include "ScratchRegisterAllocator.h"
48 #include "StackAlignment.h"
49 #include "StructureRareDataInlines.h"
50 #include "StructureStubClearingWatchpoint.h"
51 #include "ThunkGenerators.h"
52 #include <wtf/CommaPrinter.h>
53 #include <wtf/ListDump.h>
54 #include <wtf/StringPrintStream.h>
58 // Beware: in this code, it is not safe to assume anything about the following registers
59 // that would ordinarily have well-known values:
60 // - tagTypeNumberRegister
63 static FunctionPtr
readCallTarget(RepatchBuffer
& repatchBuffer
, CodeLocationCall call
)
65 FunctionPtr result
= MacroAssembler::readCallTarget(call
);
67 CodeBlock
* codeBlock
= repatchBuffer
.codeBlock();
68 if (codeBlock
->jitType() == JITCode::FTLJIT
) {
69 return FunctionPtr(codeBlock
->vm()->ftlThunks
->keyForSlowPathCallThunk(
70 MacroAssemblerCodePtr::createFromExecutableAddress(
71 result
.executableAddress())).callTarget());
74 UNUSED_PARAM(repatchBuffer
);
75 #endif // ENABLE(FTL_JIT)
79 static void repatchCall(RepatchBuffer
& repatchBuffer
, CodeLocationCall call
, FunctionPtr newCalleeFunction
)
82 CodeBlock
* codeBlock
= repatchBuffer
.codeBlock();
83 if (codeBlock
->jitType() == JITCode::FTLJIT
) {
84 VM
& vm
= *codeBlock
->vm();
85 FTL::Thunks
& thunks
= *vm
.ftlThunks
;
86 FTL::SlowPathCallKey key
= thunks
.keyForSlowPathCallThunk(
87 MacroAssemblerCodePtr::createFromExecutableAddress(
88 MacroAssembler::readCallTarget(call
).executableAddress()));
89 key
= key
.withCallTarget(newCalleeFunction
.executableAddress());
90 newCalleeFunction
= FunctionPtr(
91 thunks
.getSlowPathCallThunk(vm
, key
).code().executableAddress());
93 #endif // ENABLE(FTL_JIT)
94 repatchBuffer
.relink(call
, newCalleeFunction
);
97 static void repatchCall(CodeBlock
* codeblock
, CodeLocationCall call
, FunctionPtr newCalleeFunction
)
99 RepatchBuffer
repatchBuffer(codeblock
);
100 repatchCall(repatchBuffer
, call
, newCalleeFunction
);
103 static void repatchByIdSelfAccess(
104 VM
& vm
, CodeBlock
* codeBlock
, StructureStubInfo
& stubInfo
, Structure
* structure
,
105 const Identifier
& propertyName
, PropertyOffset offset
, const FunctionPtr
&slowPathFunction
,
108 if (structure
->typeInfo().newImpurePropertyFiresWatchpoints())
109 vm
.registerWatchpointForImpureProperty(propertyName
, stubInfo
.addWatchpoint(codeBlock
));
111 RepatchBuffer
repatchBuffer(codeBlock
);
113 // Only optimize once!
114 repatchCall(repatchBuffer
, stubInfo
.callReturnLocation
, slowPathFunction
);
116 // Patch the structure check & the offset of the load.
117 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(-(intptr_t)stubInfo
.patch
.deltaCheckImmToCall
), bitwise_cast
<int32_t>(structure
->id()));
118 repatchBuffer
.setLoadInstructionIsActive(stubInfo
.callReturnLocation
.convertibleLoadAtOffset(stubInfo
.patch
.deltaCallToStorageLoad
), isOutOfLineOffset(offset
));
121 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.deltaCallToLoadOrStore
), offsetRelativeToPatchedStorage(offset
));
123 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.deltaCallToLoadOrStore
), offsetRelativeToPatchedStorage(offset
));
124 #elif USE(JSVALUE32_64)
126 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.deltaCallToTagLoadOrStore
), offsetRelativeToPatchedStorage(offset
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
));
127 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.deltaCallToPayloadLoadOrStore
), offsetRelativeToPatchedStorage(offset
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
));
129 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.deltaCallToTagLoadOrStore
), offsetRelativeToPatchedStorage(offset
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
));
130 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.deltaCallToPayloadLoadOrStore
), offsetRelativeToPatchedStorage(offset
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
));
135 static void addStructureTransitionCheck(
136 JSCell
* object
, Structure
* structure
, CodeBlock
* codeBlock
, StructureStubInfo
& stubInfo
,
137 MacroAssembler
& jit
, MacroAssembler::JumpList
& failureCases
, GPRReg scratchGPR
)
139 if (object
->structure() == structure
&& structure
->transitionWatchpointSetIsStillValid()) {
140 structure
->addTransitionWatchpoint(stubInfo
.addWatchpoint(codeBlock
));
141 if (!ASSERT_DISABLED
) {
142 // If we execute this code, the object must have the structure we expect. Assert
143 // this in debug modes.
144 jit
.move(MacroAssembler::TrustedImmPtr(object
), scratchGPR
);
145 MacroAssembler::Jump ok
= branchStructure(
147 MacroAssembler::Equal
,
148 MacroAssembler::Address(scratchGPR
, JSCell::structureIDOffset()),
150 jit
.abortWithReason(RepatchIneffectiveWatchpoint
);
156 jit
.move(MacroAssembler::TrustedImmPtr(object
), scratchGPR
);
159 MacroAssembler::NotEqual
,
160 MacroAssembler::Address(scratchGPR
, JSCell::structureIDOffset()),
164 static void addStructureTransitionCheck(
165 JSValue prototype
, CodeBlock
* codeBlock
, StructureStubInfo
& stubInfo
,
166 MacroAssembler
& jit
, MacroAssembler::JumpList
& failureCases
, GPRReg scratchGPR
)
168 if (prototype
.isNull())
171 ASSERT(prototype
.isCell());
173 addStructureTransitionCheck(
174 prototype
.asCell(), prototype
.asCell()->structure(), codeBlock
, stubInfo
, jit
,
175 failureCases
, scratchGPR
);
178 static void replaceWithJump(RepatchBuffer
& repatchBuffer
, StructureStubInfo
& stubInfo
, const MacroAssemblerCodePtr target
)
180 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
181 repatchBuffer
.replaceWithJump(
182 RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
183 stubInfo
.callReturnLocation
.dataLabel32AtOffset(
184 -(intptr_t)stubInfo
.patch
.deltaCheckImmToCall
)),
185 CodeLocationLabel(target
));
189 repatchBuffer
.relink(
190 stubInfo
.callReturnLocation
.jumpAtOffset(
191 stubInfo
.patch
.deltaCallToJump
),
192 CodeLocationLabel(target
));
195 static void emitRestoreScratch(MacroAssembler
& stubJit
, bool needToRestoreScratch
, GPRReg scratchGPR
, MacroAssembler::Jump
& success
, MacroAssembler::Jump
& fail
, MacroAssembler::JumpList failureCases
)
197 if (needToRestoreScratch
) {
198 stubJit
.popToRestore(scratchGPR
);
200 success
= stubJit
.jump();
202 // link failure cases here, so we can pop scratchGPR, and then jump back.
203 failureCases
.link(&stubJit
);
205 stubJit
.popToRestore(scratchGPR
);
207 fail
= stubJit
.jump();
211 success
= stubJit
.jump();
214 static void linkRestoreScratch(LinkBuffer
& patchBuffer
, bool needToRestoreScratch
, MacroAssembler::Jump success
, MacroAssembler::Jump fail
, MacroAssembler::JumpList failureCases
, CodeLocationLabel successLabel
, CodeLocationLabel slowCaseBegin
)
216 patchBuffer
.link(success
, successLabel
);
218 if (needToRestoreScratch
) {
219 patchBuffer
.link(fail
, slowCaseBegin
);
223 // link failure cases directly back to normal path
224 patchBuffer
.link(failureCases
, slowCaseBegin
);
227 static void linkRestoreScratch(LinkBuffer
& patchBuffer
, bool needToRestoreScratch
, StructureStubInfo
& stubInfo
, MacroAssembler::Jump success
, MacroAssembler::Jump fail
, MacroAssembler::JumpList failureCases
)
229 linkRestoreScratch(patchBuffer
, needToRestoreScratch
, success
, fail
, failureCases
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToDone
), stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToSlowCase
));
241 static const char* toString(ByIdStubKind kind
)
247 return "GetUndefined";
250 case CallCustomGetter
:
251 return "CallCustomGetter";
254 case CallCustomSetter
:
255 return "CallCustomSetter";
257 RELEASE_ASSERT_NOT_REACHED();
262 static ByIdStubKind
kindFor(const PropertySlot
& slot
)
264 if (slot
.isCacheableValue())
268 if (slot
.isCacheableCustom())
269 return CallCustomGetter
;
270 RELEASE_ASSERT(slot
.isCacheableGetter());
274 static FunctionPtr
customFor(const PropertySlot
& slot
)
276 if (!slot
.isCacheableCustom())
277 return FunctionPtr();
278 return FunctionPtr(slot
.customGetter());
281 static ByIdStubKind
kindFor(const PutPropertySlot
& slot
)
283 RELEASE_ASSERT(!slot
.isCacheablePut());
284 if (slot
.isCacheableSetter())
286 RELEASE_ASSERT(slot
.isCacheableCustom());
287 return CallCustomSetter
;
290 static FunctionPtr
customFor(const PutPropertySlot
& slot
)
292 if (!slot
.isCacheableCustom())
293 return FunctionPtr();
294 return FunctionPtr(slot
.customSetter());
297 static bool generateByIdStub(
298 ExecState
* exec
, ByIdStubKind kind
, const Identifier
& propertyName
,
299 FunctionPtr custom
, StructureStubInfo
& stubInfo
, StructureChain
* chain
, size_t count
,
300 PropertyOffset offset
, Structure
* structure
, bool loadTargetFromProxy
, WatchpointSet
* watchpointSet
,
301 CodeLocationLabel successLabel
, CodeLocationLabel slowCaseLabel
, RefPtr
<JITStubRoutine
>& stubRoutine
)
304 VM
* vm
= &exec
->vm();
305 GPRReg baseGPR
= static_cast<GPRReg
>(stubInfo
.patch
.baseGPR
);
306 JSValueRegs valueRegs
= JSValueRegs(
307 #if USE(JSVALUE32_64)
308 static_cast<GPRReg
>(stubInfo
.patch
.valueTagGPR
),
310 static_cast<GPRReg
>(stubInfo
.patch
.valueGPR
));
311 GPRReg scratchGPR
= TempRegisterSet(stubInfo
.patch
.usedRegisters
).getFreeGPR();
312 bool needToRestoreScratch
= scratchGPR
== InvalidGPRReg
;
313 RELEASE_ASSERT(!needToRestoreScratch
|| (kind
== GetValue
|| kind
== GetUndefined
));
315 CCallHelpers
stubJit(&exec
->vm(), exec
->codeBlock());
316 if (needToRestoreScratch
) {
317 scratchGPR
= AssemblyHelpers::selectScratchGPR(
318 baseGPR
, valueRegs
.tagGPR(), valueRegs
.payloadGPR());
319 stubJit
.pushToSave(scratchGPR
);
320 needToRestoreScratch
= true;
323 MacroAssembler::JumpList failureCases
;
325 GPRReg baseForGetGPR
;
326 if (loadTargetFromProxy
) {
327 baseForGetGPR
= valueRegs
.payloadGPR();
328 failureCases
.append(stubJit
.branch8(
329 MacroAssembler::NotEqual
,
330 MacroAssembler::Address(baseGPR
, JSCell::typeInfoTypeOffset()),
331 MacroAssembler::TrustedImm32(PureForwardingProxyType
)));
333 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSProxy::targetOffset()), scratchGPR
);
335 failureCases
.append(branchStructure(stubJit
,
336 MacroAssembler::NotEqual
,
337 MacroAssembler::Address(scratchGPR
, JSCell::structureIDOffset()),
340 baseForGetGPR
= baseGPR
;
342 failureCases
.append(branchStructure(stubJit
,
343 MacroAssembler::NotEqual
,
344 MacroAssembler::Address(baseForGetGPR
, JSCell::structureIDOffset()),
348 CodeBlock
* codeBlock
= exec
->codeBlock();
349 if (structure
->typeInfo().newImpurePropertyFiresWatchpoints())
350 vm
->registerWatchpointForImpureProperty(propertyName
, stubInfo
.addWatchpoint(codeBlock
));
353 watchpointSet
->add(stubInfo
.addWatchpoint(codeBlock
));
355 Structure
* currStructure
= structure
;
356 JSObject
* protoObject
= 0;
358 WriteBarrier
<Structure
>* it
= chain
->head();
359 for (unsigned i
= 0; i
< count
; ++i
, ++it
) {
360 protoObject
= asObject(currStructure
->prototypeForLookup(exec
));
361 Structure
* protoStructure
= protoObject
->structure();
362 if (protoStructure
->typeInfo().newImpurePropertyFiresWatchpoints())
363 vm
->registerWatchpointForImpureProperty(propertyName
, stubInfo
.addWatchpoint(codeBlock
));
364 addStructureTransitionCheck(
365 protoObject
, protoStructure
, codeBlock
, stubInfo
, stubJit
,
366 failureCases
, scratchGPR
);
367 currStructure
= it
->get();
369 ASSERT(!protoObject
|| protoObject
->structure() == currStructure
);
372 currStructure
->startWatchingPropertyForReplacements(*vm
, offset
);
373 GPRReg baseForAccessGPR
= InvalidGPRReg
;
374 if (kind
!= GetUndefined
) {
376 // We could have clobbered scratchGPR earlier, so we have to reload from baseGPR to get the target.
377 if (loadTargetFromProxy
)
378 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSProxy::targetOffset()), baseForGetGPR
);
379 stubJit
.move(MacroAssembler::TrustedImmPtr(protoObject
), scratchGPR
);
380 baseForAccessGPR
= scratchGPR
;
382 // For proxy objects, we need to do all the Structure checks before moving the baseGPR into
383 // baseForGetGPR because if we fail any of the checks then we would have the wrong value in baseGPR
385 if (loadTargetFromProxy
)
386 stubJit
.move(scratchGPR
, baseForGetGPR
);
387 baseForAccessGPR
= baseForGetGPR
;
391 GPRReg loadedValueGPR
= InvalidGPRReg
;
392 if (kind
== GetUndefined
)
393 stubJit
.moveTrustedValue(jsUndefined(), valueRegs
);
394 else if (kind
!= CallCustomGetter
&& kind
!= CallCustomSetter
) {
395 if (kind
== GetValue
)
396 loadedValueGPR
= valueRegs
.payloadGPR();
398 loadedValueGPR
= scratchGPR
;
401 if (isInlineOffset(offset
))
402 storageGPR
= baseForAccessGPR
;
404 stubJit
.loadPtr(MacroAssembler::Address(baseForAccessGPR
, JSObject::butterflyOffset()), loadedValueGPR
);
405 storageGPR
= loadedValueGPR
;
409 stubJit
.load64(MacroAssembler::Address(storageGPR
, offsetRelativeToBase(offset
)), loadedValueGPR
);
411 if (kind
== GetValue
)
412 stubJit
.load32(MacroAssembler::Address(storageGPR
, offsetRelativeToBase(offset
) + TagOffset
), valueRegs
.tagGPR());
413 stubJit
.load32(MacroAssembler::Address(storageGPR
, offsetRelativeToBase(offset
) + PayloadOffset
), loadedValueGPR
);
417 // Stuff for custom getters.
418 MacroAssembler::Call operationCall
;
419 MacroAssembler::Call handlerCall
;
421 // Stuff for JS getters.
422 MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck
;
423 MacroAssembler::Call fastPathCall
;
424 MacroAssembler::Call slowPathCall
;
425 std::unique_ptr
<CallLinkInfo
> callLinkInfo
;
427 MacroAssembler::Jump success
, fail
;
428 if (kind
!= GetValue
&& kind
!= GetUndefined
) {
429 // Need to make sure that whenever this call is made in the future, we remember the
430 // place that we made it from. It just so happens to be the place that we are at
432 stubJit
.store32(MacroAssembler::TrustedImm32(exec
->locationAsRawBits()),
433 CCallHelpers::tagFor(static_cast<VirtualRegister
>(JSStack::ArgumentCount
)));
435 if (kind
== CallGetter
|| kind
== CallSetter
) {
436 // Create a JS call using a JS call inline cache. Assume that:
438 // - SP is aligned and represents the extent of the calling compiler's stack usage.
440 // - FP is set correctly (i.e. it points to the caller's call frame header).
442 // - SP - FP is an aligned difference.
444 // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
447 // Therefore, we temporarily grow the stack for the purpose of the call and then
450 callLinkInfo
= std::make_unique
<CallLinkInfo
>();
451 callLinkInfo
->setUpCall(CallLinkInfo::Call
, stubInfo
.codeOrigin
, loadedValueGPR
);
453 MacroAssembler::JumpList done
;
455 // There is a 'this' argument but nothing else.
456 unsigned numberOfParameters
= 1;
457 // ... unless we're calling a setter.
458 if (kind
== CallSetter
)
459 numberOfParameters
++;
461 // Get the accessor; if there ain't one then the result is jsUndefined().
462 if (kind
== CallSetter
) {
464 MacroAssembler::Address(loadedValueGPR
, GetterSetter::offsetOfSetter()),
468 MacroAssembler::Address(loadedValueGPR
, GetterSetter::offsetOfGetter()),
471 MacroAssembler::Jump returnUndefined
= stubJit
.branchTestPtr(
472 MacroAssembler::Zero
, loadedValueGPR
);
474 unsigned numberOfRegsForCall
=
475 JSStack::CallFrameHeaderSize
+ numberOfParameters
;
477 unsigned numberOfBytesForCall
=
478 numberOfRegsForCall
* sizeof(Register
) - sizeof(CallerFrameAndPC
);
480 unsigned alignedNumberOfBytesForCall
=
481 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall
);
484 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall
),
485 MacroAssembler::stackPointerRegister
);
487 MacroAssembler::Address calleeFrame
= MacroAssembler::Address(
488 MacroAssembler::stackPointerRegister
,
489 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC
)));
492 MacroAssembler::TrustedImm32(numberOfParameters
),
493 calleeFrame
.withOffset(
494 JSStack::ArgumentCount
* sizeof(Register
) + PayloadOffset
));
497 loadedValueGPR
, calleeFrame
.withOffset(JSStack::Callee
* sizeof(Register
)));
501 calleeFrame
.withOffset(
502 virtualRegisterForArgument(0).offset() * sizeof(Register
)));
504 if (kind
== CallSetter
) {
507 calleeFrame
.withOffset(
508 virtualRegisterForArgument(1).offset() * sizeof(Register
)));
511 MacroAssembler::Jump slowCase
= stubJit
.branchPtrWithPatch(
512 MacroAssembler::NotEqual
, loadedValueGPR
, addressOfLinkFunctionCheck
,
513 MacroAssembler::TrustedImmPtr(0));
515 fastPathCall
= stubJit
.nearCall();
518 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall
),
519 MacroAssembler::stackPointerRegister
);
520 if (kind
== CallGetter
)
521 stubJit
.setupResults(valueRegs
);
523 done
.append(stubJit
.jump());
524 slowCase
.link(&stubJit
);
526 stubJit
.move(loadedValueGPR
, GPRInfo::regT0
);
527 #if USE(JSVALUE32_64)
528 stubJit
.move(MacroAssembler::TrustedImm32(JSValue::CellTag
), GPRInfo::regT1
);
530 stubJit
.move(MacroAssembler::TrustedImmPtr(callLinkInfo
.get()), GPRInfo::regT2
);
531 slowPathCall
= stubJit
.nearCall();
534 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall
),
535 MacroAssembler::stackPointerRegister
);
536 if (kind
== CallGetter
)
537 stubJit
.setupResults(valueRegs
);
539 done
.append(stubJit
.jump());
540 returnUndefined
.link(&stubJit
);
542 if (kind
== CallGetter
)
543 stubJit
.moveTrustedValue(jsUndefined(), valueRegs
);
547 // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
548 // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
550 if (kind
== CallCustomGetter
)
551 stubJit
.setupArgumentsWithExecState(baseForAccessGPR
, baseForGetGPR
, MacroAssembler::TrustedImmPtr(propertyName
.impl()));
553 stubJit
.setupArgumentsWithExecState(baseForAccessGPR
, baseForGetGPR
, valueRegs
.gpr());
555 if (kind
== CallCustomGetter
)
556 stubJit
.setupArgumentsWithExecState(baseForAccessGPR
, baseForGetGPR
, MacroAssembler::TrustedImm32(JSValue::CellTag
), MacroAssembler::TrustedImmPtr(propertyName
.impl()));
558 stubJit
.setupArgumentsWithExecState(baseForAccessGPR
, baseForGetGPR
, MacroAssembler::TrustedImm32(JSValue::CellTag
), valueRegs
.payloadGPR(), valueRegs
.tagGPR());
560 stubJit
.storePtr(GPRInfo::callFrameRegister
, &vm
->topCallFrame
);
562 operationCall
= stubJit
.call();
563 if (kind
== CallCustomGetter
)
564 stubJit
.setupResults(valueRegs
);
565 MacroAssembler::Jump noException
= stubJit
.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck
);
567 stubJit
.setupArguments(CCallHelpers::TrustedImmPtr(vm
), GPRInfo::callFrameRegister
);
568 handlerCall
= stubJit
.call();
569 stubJit
.jumpToExceptionHandler();
571 noException
.link(&stubJit
);
574 emitRestoreScratch(stubJit
, needToRestoreScratch
, scratchGPR
, success
, fail
, failureCases
);
576 LinkBuffer
patchBuffer(*vm
, stubJit
, exec
->codeBlock(), JITCompilationCanFail
);
577 if (patchBuffer
.didFailToAllocate())
580 linkRestoreScratch(patchBuffer
, needToRestoreScratch
, success
, fail
, failureCases
, successLabel
, slowCaseLabel
);
581 if (kind
== CallCustomGetter
|| kind
== CallCustomSetter
) {
582 patchBuffer
.link(operationCall
, custom
);
583 patchBuffer
.link(handlerCall
, lookupExceptionHandler
);
584 } else if (kind
== CallGetter
|| kind
== CallSetter
) {
585 callLinkInfo
->setCallLocations(patchBuffer
.locationOfNearCall(slowPathCall
),
586 patchBuffer
.locationOf(addressOfLinkFunctionCheck
),
587 patchBuffer
.locationOfNearCall(fastPathCall
));
589 ThunkGenerator generator
= linkThunkGeneratorFor(
590 CodeForCall
, RegisterPreservationNotRequired
);
592 slowPathCall
, CodeLocationLabel(vm
->getCTIStub(generator
).code()));
595 MacroAssemblerCodeRef code
= FINALIZE_CODE_FOR(
596 exec
->codeBlock(), patchBuffer
,
597 ("%s access stub for %s, return point %p",
598 toString(kind
), toCString(*exec
->codeBlock()).data(),
599 successLabel
.executableAddress()));
601 if (kind
== CallGetter
|| kind
== CallSetter
)
602 stubRoutine
= adoptRef(new AccessorCallJITStubRoutine(code
, *vm
, WTF::move(callLinkInfo
)));
604 stubRoutine
= createJITStubRoutine(code
, *vm
, codeBlock
->ownerExecutable(), true);
609 enum InlineCacheAction
{
615 static InlineCacheAction
actionForCell(VM
& vm
, JSCell
* cell
)
617 Structure
* structure
= cell
->structure(vm
);
619 TypeInfo typeInfo
= structure
->typeInfo();
620 if (typeInfo
.prohibitsPropertyCaching())
621 return GiveUpOnCache
;
623 if (structure
->isUncacheableDictionary()) {
624 if (structure
->hasBeenFlattenedBefore())
625 return GiveUpOnCache
;
626 // Flattening could have changed the offset, so return early for another try.
627 asObject(cell
)->flattenDictionaryObject(vm
);
628 return RetryCacheLater
;
630 ASSERT(!structure
->isUncacheableDictionary());
632 if (typeInfo
.hasImpureGetOwnPropertySlot() && !typeInfo
.newImpurePropertyFiresWatchpoints())
633 return GiveUpOnCache
;
635 return AttemptToCache
;
638 static InlineCacheAction
tryCacheGetByID(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
640 if (Options::forceICFailure())
641 return GiveUpOnCache
;
643 // FIXME: Write a test that proves we need to check for recursion here just
644 // like the interpreter does, then add a check for recursion.
646 CodeBlock
* codeBlock
= exec
->codeBlock();
647 VM
* vm
= &exec
->vm();
649 if ((isJSArray(baseValue
) || isJSString(baseValue
)) && propertyName
== exec
->propertyNames().length
) {
650 GPRReg baseGPR
= static_cast<GPRReg
>(stubInfo
.patch
.baseGPR
);
651 #if USE(JSVALUE32_64)
652 GPRReg resultTagGPR
= static_cast<GPRReg
>(stubInfo
.patch
.valueTagGPR
);
654 GPRReg resultGPR
= static_cast<GPRReg
>(stubInfo
.patch
.valueGPR
);
656 MacroAssembler stubJit
;
658 if (isJSArray(baseValue
)) {
659 GPRReg scratchGPR
= TempRegisterSet(stubInfo
.patch
.usedRegisters
).getFreeGPR();
660 bool needToRestoreScratch
= false;
662 if (scratchGPR
== InvalidGPRReg
) {
664 scratchGPR
= AssemblyHelpers::selectScratchGPR(baseGPR
, resultGPR
);
666 scratchGPR
= AssemblyHelpers::selectScratchGPR(baseGPR
, resultGPR
, resultTagGPR
);
668 stubJit
.pushToSave(scratchGPR
);
669 needToRestoreScratch
= true;
672 MacroAssembler::JumpList failureCases
;
674 stubJit
.load8(MacroAssembler::Address(baseGPR
, JSCell::indexingTypeOffset()), scratchGPR
);
675 failureCases
.append(stubJit
.branchTest32(MacroAssembler::Zero
, scratchGPR
, MacroAssembler::TrustedImm32(IsArray
)));
676 failureCases
.append(stubJit
.branchTest32(MacroAssembler::Zero
, scratchGPR
, MacroAssembler::TrustedImm32(IndexingShapeMask
)));
678 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()), scratchGPR
);
679 stubJit
.load32(MacroAssembler::Address(scratchGPR
, ArrayStorage::lengthOffset()), scratchGPR
);
680 failureCases
.append(stubJit
.branch32(MacroAssembler::LessThan
, scratchGPR
, MacroAssembler::TrustedImm32(0)));
682 stubJit
.move(scratchGPR
, resultGPR
);
684 stubJit
.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber
), resultGPR
);
685 #elif USE(JSVALUE32_64)
686 stubJit
.move(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag
), resultTagGPR
);
689 MacroAssembler::Jump success
, fail
;
691 emitRestoreScratch(stubJit
, needToRestoreScratch
, scratchGPR
, success
, fail
, failureCases
);
693 LinkBuffer
patchBuffer(*vm
, stubJit
, codeBlock
, JITCompilationCanFail
);
694 if (patchBuffer
.didFailToAllocate())
695 return GiveUpOnCache
;
697 linkRestoreScratch(patchBuffer
, needToRestoreScratch
, stubInfo
, success
, fail
, failureCases
);
699 stubInfo
.stubRoutine
= FINALIZE_CODE_FOR_STUB(
700 exec
->codeBlock(), patchBuffer
,
701 ("GetById array length stub for %s, return point %p",
702 toCString(*exec
->codeBlock()).data(), stubInfo
.callReturnLocation
.labelAtOffset(
703 stubInfo
.patch
.deltaCallToDone
).executableAddress()));
705 RepatchBuffer
repatchBuffer(codeBlock
);
706 replaceWithJump(repatchBuffer
, stubInfo
, stubInfo
.stubRoutine
->code().code());
707 repatchCall(repatchBuffer
, stubInfo
.callReturnLocation
, operationGetById
);
709 return RetryCacheLater
;
712 // String.length case
713 MacroAssembler::Jump failure
= stubJit
.branch8(MacroAssembler::NotEqual
, MacroAssembler::Address(baseGPR
, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType
));
715 stubJit
.load32(MacroAssembler::Address(baseGPR
, JSString::offsetOfLength()), resultGPR
);
718 stubJit
.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber
), resultGPR
);
719 #elif USE(JSVALUE32_64)
720 stubJit
.move(AssemblyHelpers::TrustedImm32(JSValue::Int32Tag
), resultTagGPR
);
723 MacroAssembler::Jump success
= stubJit
.jump();
725 LinkBuffer
patchBuffer(*vm
, stubJit
, codeBlock
, JITCompilationCanFail
);
726 if (patchBuffer
.didFailToAllocate())
727 return GiveUpOnCache
;
729 patchBuffer
.link(success
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToDone
));
730 patchBuffer
.link(failure
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToSlowCase
));
732 stubInfo
.stubRoutine
= FINALIZE_CODE_FOR_STUB(
733 exec
->codeBlock(), patchBuffer
,
734 ("GetById string length stub for %s, return point %p",
735 toCString(*exec
->codeBlock()).data(), stubInfo
.callReturnLocation
.labelAtOffset(
736 stubInfo
.patch
.deltaCallToDone
).executableAddress()));
738 RepatchBuffer
repatchBuffer(codeBlock
);
739 replaceWithJump(repatchBuffer
, stubInfo
, stubInfo
.stubRoutine
->code().code());
740 repatchCall(repatchBuffer
, stubInfo
.callReturnLocation
, operationGetById
);
742 return RetryCacheLater
;
745 // FIXME: Cache property access for immediates.
746 if (!baseValue
.isCell())
747 return GiveUpOnCache
;
749 if (!slot
.isCacheable() && !slot
.isUnset())
750 return GiveUpOnCache
;
752 JSCell
* baseCell
= baseValue
.asCell();
753 Structure
* structure
= baseCell
->structure(*vm
);
755 InlineCacheAction action
= actionForCell(*vm
, baseCell
);
756 if (action
!= AttemptToCache
)
759 // Optimize self access.
760 if (slot
.isCacheableValue()
761 && slot
.slotBase() == baseValue
762 && !slot
.watchpointSet()
763 && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot
.cachedOffset()))) {
764 structure
->startWatchingPropertyForReplacements(*vm
, slot
.cachedOffset());
765 repatchByIdSelfAccess(*vm
, codeBlock
, stubInfo
, structure
, propertyName
, slot
.cachedOffset(), operationGetByIdBuildList
, true);
766 stubInfo
.initGetByIdSelf(*vm
, codeBlock
->ownerExecutable(), structure
);
767 return RetryCacheLater
;
770 repatchCall(codeBlock
, stubInfo
.callReturnLocation
, operationGetByIdBuildList
);
771 return RetryCacheLater
;
774 void repatchGetByID(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
776 GCSafeConcurrentJITLocker
locker(exec
->codeBlock()->m_lock
, exec
->vm().heap
);
778 if (tryCacheGetByID(exec
, baseValue
, propertyName
, slot
, stubInfo
) == GiveUpOnCache
)
779 repatchCall(exec
->codeBlock(), stubInfo
.callReturnLocation
, operationGetById
);
782 static void patchJumpToGetByIdStub(CodeBlock
* codeBlock
, StructureStubInfo
& stubInfo
, JITStubRoutine
* stubRoutine
)
784 RELEASE_ASSERT(stubInfo
.accessType
== access_get_by_id_list
);
785 RepatchBuffer
repatchBuffer(codeBlock
);
786 if (stubInfo
.u
.getByIdList
.list
->didSelfPatching()) {
787 repatchBuffer
.relink(
788 stubInfo
.callReturnLocation
.jumpAtOffset(
789 stubInfo
.patch
.deltaCallToJump
),
790 CodeLocationLabel(stubRoutine
->code().code()));
794 replaceWithJump(repatchBuffer
, stubInfo
, stubRoutine
->code().code());
797 static InlineCacheAction
tryBuildGetByIDList(ExecState
* exec
, JSValue baseValue
, const Identifier
& ident
, const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
799 if (!baseValue
.isCell()
800 || (!slot
.isCacheable() && !slot
.isUnset()))
801 return GiveUpOnCache
;
803 JSCell
* baseCell
= baseValue
.asCell();
804 bool loadTargetFromProxy
= false;
805 if (baseCell
->type() == PureForwardingProxyType
) {
806 baseValue
= jsCast
<JSProxy
*>(baseCell
)->target();
807 baseCell
= baseValue
.asCell();
808 loadTargetFromProxy
= true;
811 VM
* vm
= &exec
->vm();
812 CodeBlock
* codeBlock
= exec
->codeBlock();
814 InlineCacheAction action
= actionForCell(*vm
, baseCell
);
815 if (action
!= AttemptToCache
)
818 Structure
* structure
= baseCell
->structure(*vm
);
819 TypeInfo typeInfo
= structure
->typeInfo();
821 if (stubInfo
.patch
.spillMode
== NeedToSpill
) {
822 // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
823 // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
824 // if registers were not flushed, don't do non-Value caching.
825 if (!slot
.isCacheableValue() && !slot
.isUnset())
826 return GiveUpOnCache
;
829 PropertyOffset offset
= slot
.isUnset() ? invalidOffset
: slot
.cachedOffset();
830 StructureChain
* prototypeChain
= 0;
833 if (slot
.isUnset() || slot
.slotBase() != baseValue
) {
834 if (typeInfo
.prohibitsPropertyCaching() || structure
->isDictionary())
835 return GiveUpOnCache
;
838 count
= normalizePrototypeChain(exec
, structure
);
840 count
= normalizePrototypeChainForChainAccess(
841 exec
, structure
, slot
.slotBase(), ident
, offset
);
842 if (count
== InvalidPrototypeChain
)
843 return GiveUpOnCache
;
844 prototypeChain
= structure
->prototypeChain(exec
);
847 PolymorphicGetByIdList
* list
= PolymorphicGetByIdList::from(stubInfo
);
848 if (list
->isFull()) {
849 // We need this extra check because of recursion.
850 return GiveUpOnCache
;
853 RefPtr
<JITStubRoutine
> stubRoutine
;
854 bool result
= generateByIdStub(
855 exec
, kindFor(slot
), ident
, customFor(slot
), stubInfo
, prototypeChain
, count
, offset
,
856 structure
, loadTargetFromProxy
, slot
.watchpointSet(),
857 stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToDone
),
858 CodeLocationLabel(list
->currentSlowPathTarget(stubInfo
)), stubRoutine
);
860 return GiveUpOnCache
;
862 GetByIdAccess::AccessType accessType
;
863 if (slot
.isCacheableValue())
864 accessType
= slot
.watchpointSet() ? GetByIdAccess::WatchedStub
: GetByIdAccess::SimpleStub
;
865 else if (slot
.isUnset())
866 accessType
= GetByIdAccess::SimpleMiss
;
867 else if (slot
.isCacheableGetter())
868 accessType
= GetByIdAccess::Getter
;
870 accessType
= GetByIdAccess::CustomGetter
;
872 list
->addAccess(GetByIdAccess(
873 *vm
, codeBlock
->ownerExecutable(), accessType
, stubRoutine
, structure
,
874 prototypeChain
, count
));
876 patchJumpToGetByIdStub(codeBlock
, stubInfo
, stubRoutine
.get());
878 return list
->isFull() ? GiveUpOnCache
: RetryCacheLater
;
881 void buildGetByIDList(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
883 GCSafeConcurrentJITLocker
locker(exec
->codeBlock()->m_lock
, exec
->vm().heap
);
885 if (tryBuildGetByIDList(exec
, baseValue
, propertyName
, slot
, stubInfo
) == GiveUpOnCache
)
886 repatchCall(exec
->codeBlock(), stubInfo
.callReturnLocation
, operationGetById
);
889 static V_JITOperation_ESsiJJI
appropriateGenericPutByIdFunction(const PutPropertySlot
&slot
, PutKind putKind
)
891 if (slot
.isStrictMode()) {
892 if (putKind
== Direct
)
893 return operationPutByIdDirectStrict
;
894 return operationPutByIdStrict
;
896 if (putKind
== Direct
)
897 return operationPutByIdDirectNonStrict
;
898 return operationPutByIdNonStrict
;
901 static V_JITOperation_ESsiJJI
appropriateListBuildingPutByIdFunction(const PutPropertySlot
&slot
, PutKind putKind
)
903 if (slot
.isStrictMode()) {
904 if (putKind
== Direct
)
905 return operationPutByIdDirectStrictBuildList
;
906 return operationPutByIdStrictBuildList
;
908 if (putKind
== Direct
)
909 return operationPutByIdDirectNonStrictBuildList
;
910 return operationPutByIdNonStrictBuildList
;
913 static bool emitPutReplaceStub(
916 const PutPropertySlot
& slot
,
917 StructureStubInfo
& stubInfo
,
918 Structure
* structure
,
919 CodeLocationLabel failureLabel
,
920 RefPtr
<JITStubRoutine
>& stubRoutine
)
922 VM
* vm
= &exec
->vm();
923 GPRReg baseGPR
= static_cast<GPRReg
>(stubInfo
.patch
.baseGPR
);
924 #if USE(JSVALUE32_64)
925 GPRReg valueTagGPR
= static_cast<GPRReg
>(stubInfo
.patch
.valueTagGPR
);
927 GPRReg valueGPR
= static_cast<GPRReg
>(stubInfo
.patch
.valueGPR
);
929 ScratchRegisterAllocator
allocator(stubInfo
.patch
.usedRegisters
);
930 allocator
.lock(baseGPR
);
931 #if USE(JSVALUE32_64)
932 allocator
.lock(valueTagGPR
);
934 allocator
.lock(valueGPR
);
936 GPRReg scratchGPR1
= allocator
.allocateScratchGPR();
938 CCallHelpers
stubJit(vm
, exec
->codeBlock());
940 allocator
.preserveReusedRegistersByPushing(stubJit
);
942 MacroAssembler::Jump badStructure
= branchStructure(stubJit
,
943 MacroAssembler::NotEqual
,
944 MacroAssembler::Address(baseGPR
, JSCell::structureIDOffset()),
948 if (isInlineOffset(slot
.cachedOffset()))
949 stubJit
.store64(valueGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot
.cachedOffset()) * sizeof(JSValue
)));
951 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()), scratchGPR1
);
952 stubJit
.store64(valueGPR
, MacroAssembler::Address(scratchGPR1
, offsetInButterfly(slot
.cachedOffset()) * sizeof(JSValue
)));
954 #elif USE(JSVALUE32_64)
955 if (isInlineOffset(slot
.cachedOffset())) {
956 stubJit
.store32(valueGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)));
957 stubJit
.store32(valueTagGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)));
959 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()), scratchGPR1
);
960 stubJit
.store32(valueGPR
, MacroAssembler::Address(scratchGPR1
, offsetInButterfly(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)));
961 stubJit
.store32(valueTagGPR
, MacroAssembler::Address(scratchGPR1
, offsetInButterfly(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)));
965 MacroAssembler::Jump success
;
966 MacroAssembler::Jump failure
;
968 if (allocator
.didReuseRegisters()) {
969 allocator
.restoreReusedRegistersByPopping(stubJit
);
970 success
= stubJit
.jump();
972 badStructure
.link(&stubJit
);
973 allocator
.restoreReusedRegistersByPopping(stubJit
);
974 failure
= stubJit
.jump();
976 success
= stubJit
.jump();
977 failure
= badStructure
;
980 LinkBuffer
patchBuffer(*vm
, stubJit
, exec
->codeBlock(), JITCompilationCanFail
);
981 if (patchBuffer
.didFailToAllocate())
984 patchBuffer
.link(success
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToDone
));
985 patchBuffer
.link(failure
, failureLabel
);
987 stubRoutine
= FINALIZE_CODE_FOR_STUB(
988 exec
->codeBlock(), patchBuffer
,
989 ("PutById replace stub for %s, return point %p",
990 toCString(*exec
->codeBlock()).data(), stubInfo
.callReturnLocation
.labelAtOffset(
991 stubInfo
.patch
.deltaCallToDone
).executableAddress()));
996 static Structure
* emitPutTransitionStubAndGetOldStructure(ExecState
* exec
, VM
* vm
, Structure
*& structure
, const Identifier
& ident
,
997 const PutPropertySlot
& slot
, StructureStubInfo
& stubInfo
, PutKind putKind
)
999 PropertyName
pname(ident
);
1000 Structure
* oldStructure
= structure
;
1001 if (!oldStructure
->isObject() || oldStructure
->isDictionary() || parseIndex(pname
))
1004 PropertyOffset propertyOffset
;
1005 structure
= Structure::addPropertyTransitionToExistingStructureConcurrently(oldStructure
, ident
.impl(), 0, propertyOffset
);
1007 if (!structure
|| !structure
->isObject() || structure
->isDictionary() || !structure
->propertyAccessesAreCacheable())
1010 // Skip optimizing the case where we need a realloc, if we don't have
1011 // enough registers to make it happen.
1012 if (GPRInfo::numberOfRegisters
< 6
1013 && oldStructure
->outOfLineCapacity() != structure
->outOfLineCapacity()
1014 && oldStructure
->outOfLineCapacity()) {
1018 // Skip optimizing the case where we need realloc, and the structure has
1019 // indexing storage.
1020 // FIXME: We shouldn't skip this! Implement it!
1021 // https://bugs.webkit.org/show_bug.cgi?id=130914
1022 if (oldStructure
->couldHaveIndexingHeader())
1025 if (normalizePrototypeChain(exec
, structure
) == InvalidPrototypeChain
)
1028 StructureChain
* prototypeChain
= structure
->prototypeChain(exec
);
1030 // emitPutTransitionStub
1032 CodeLocationLabel failureLabel
= stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToSlowCase
);
1033 RefPtr
<JITStubRoutine
>& stubRoutine
= stubInfo
.stubRoutine
;
1035 GPRReg baseGPR
= static_cast<GPRReg
>(stubInfo
.patch
.baseGPR
);
1036 #if USE(JSVALUE32_64)
1037 GPRReg valueTagGPR
= static_cast<GPRReg
>(stubInfo
.patch
.valueTagGPR
);
1039 GPRReg valueGPR
= static_cast<GPRReg
>(stubInfo
.patch
.valueGPR
);
1041 ScratchRegisterAllocator
allocator(stubInfo
.patch
.usedRegisters
);
1042 allocator
.lock(baseGPR
);
1043 #if USE(JSVALUE32_64)
1044 allocator
.lock(valueTagGPR
);
1046 allocator
.lock(valueGPR
);
1048 CCallHelpers
stubJit(vm
);
1050 bool needThirdScratch
= false;
1051 if (structure
->outOfLineCapacity() != oldStructure
->outOfLineCapacity()
1052 && oldStructure
->outOfLineCapacity()) {
1053 needThirdScratch
= true;
1056 GPRReg scratchGPR1
= allocator
.allocateScratchGPR();
1057 ASSERT(scratchGPR1
!= baseGPR
);
1058 ASSERT(scratchGPR1
!= valueGPR
);
1060 GPRReg scratchGPR2
= allocator
.allocateScratchGPR();
1061 ASSERT(scratchGPR2
!= baseGPR
);
1062 ASSERT(scratchGPR2
!= valueGPR
);
1063 ASSERT(scratchGPR2
!= scratchGPR1
);
1066 if (needThirdScratch
) {
1067 scratchGPR3
= allocator
.allocateScratchGPR();
1068 ASSERT(scratchGPR3
!= baseGPR
);
1069 ASSERT(scratchGPR3
!= valueGPR
);
1070 ASSERT(scratchGPR3
!= scratchGPR1
);
1071 ASSERT(scratchGPR3
!= scratchGPR2
);
1073 scratchGPR3
= InvalidGPRReg
;
1075 allocator
.preserveReusedRegistersByPushing(stubJit
);
1077 MacroAssembler::JumpList failureCases
;
1079 ASSERT(oldStructure
->transitionWatchpointSetHasBeenInvalidated());
1081 failureCases
.append(branchStructure(stubJit
,
1082 MacroAssembler::NotEqual
,
1083 MacroAssembler::Address(baseGPR
, JSCell::structureIDOffset()),
1086 addStructureTransitionCheck(
1087 oldStructure
->storedPrototype(), exec
->codeBlock(), stubInfo
, stubJit
, failureCases
,
1090 if (putKind
== NotDirect
) {
1091 for (WriteBarrier
<Structure
>* it
= prototypeChain
->head(); *it
; ++it
) {
1092 addStructureTransitionCheck(
1093 (*it
)->storedPrototype(), exec
->codeBlock(), stubInfo
, stubJit
, failureCases
,
1098 MacroAssembler::JumpList slowPath
;
1100 bool scratchGPR1HasStorage
= false;
1102 if (structure
->outOfLineCapacity() != oldStructure
->outOfLineCapacity()) {
1103 size_t newSize
= structure
->outOfLineCapacity() * sizeof(JSValue
);
1104 CopiedAllocator
* copiedAllocator
= &vm
->heap
.storageAllocator();
1106 if (!oldStructure
->outOfLineCapacity()) {
1107 stubJit
.loadPtr(&copiedAllocator
->m_currentRemaining
, scratchGPR1
);
1108 slowPath
.append(stubJit
.branchSubPtr(MacroAssembler::Signed
, MacroAssembler::TrustedImm32(newSize
), scratchGPR1
));
1109 stubJit
.storePtr(scratchGPR1
, &copiedAllocator
->m_currentRemaining
);
1110 stubJit
.negPtr(scratchGPR1
);
1111 stubJit
.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator
->m_currentPayloadEnd
), scratchGPR1
);
1112 stubJit
.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue
)), scratchGPR1
);
1114 size_t oldSize
= oldStructure
->outOfLineCapacity() * sizeof(JSValue
);
1115 ASSERT(newSize
> oldSize
);
1117 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()), scratchGPR3
);
1118 stubJit
.loadPtr(&copiedAllocator
->m_currentRemaining
, scratchGPR1
);
1119 slowPath
.append(stubJit
.branchSubPtr(MacroAssembler::Signed
, MacroAssembler::TrustedImm32(newSize
), scratchGPR1
));
1120 stubJit
.storePtr(scratchGPR1
, &copiedAllocator
->m_currentRemaining
);
1121 stubJit
.negPtr(scratchGPR1
);
1122 stubJit
.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator
->m_currentPayloadEnd
), scratchGPR1
);
1123 stubJit
.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue
)), scratchGPR1
);
1124 // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1125 for (size_t offset
= 0; offset
< oldSize
; offset
+= sizeof(void*)) {
1126 stubJit
.loadPtr(MacroAssembler::Address(scratchGPR3
, -static_cast<ptrdiff_t>(offset
+ sizeof(JSValue
) + sizeof(void*))), scratchGPR2
);
1127 stubJit
.storePtr(scratchGPR2
, MacroAssembler::Address(scratchGPR1
, -static_cast<ptrdiff_t>(offset
+ sizeof(JSValue
) + sizeof(void*))));
1131 stubJit
.storePtr(scratchGPR1
, MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()));
1132 scratchGPR1HasStorage
= true;
1135 ASSERT(oldStructure
->typeInfo().type() == structure
->typeInfo().type());
1136 ASSERT(oldStructure
->typeInfo().inlineTypeFlags() == structure
->typeInfo().inlineTypeFlags());
1137 ASSERT(oldStructure
->indexingType() == structure
->indexingType());
1139 uint32_t val
= structure
->id();
1141 uint32_t val
= reinterpret_cast<uint32_t>(structure
->id());
1143 stubJit
.store32(MacroAssembler::TrustedImm32(val
), MacroAssembler::Address(baseGPR
, JSCell::structureIDOffset()));
1145 if (isInlineOffset(slot
.cachedOffset()))
1146 stubJit
.store64(valueGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot
.cachedOffset()) * sizeof(JSValue
)));
1148 if (!scratchGPR1HasStorage
)
1149 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()), scratchGPR1
);
1150 stubJit
.store64(valueGPR
, MacroAssembler::Address(scratchGPR1
, offsetInButterfly(slot
.cachedOffset()) * sizeof(JSValue
)));
1152 #elif USE(JSVALUE32_64)
1153 if (isInlineOffset(slot
.cachedOffset())) {
1154 stubJit
.store32(valueGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)));
1155 stubJit
.store32(valueTagGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)));
1157 if (!scratchGPR1HasStorage
)
1158 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()), scratchGPR1
);
1159 stubJit
.store32(valueGPR
, MacroAssembler::Address(scratchGPR1
, offsetInButterfly(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)));
1160 stubJit
.store32(valueTagGPR
, MacroAssembler::Address(scratchGPR1
, offsetInButterfly(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)));
1164 ScratchBuffer
* scratchBuffer
= nullptr;
1167 MacroAssembler::Call callFlushWriteBarrierBuffer
;
1168 MacroAssembler::Jump ownerIsRememberedOrInEden
= stubJit
.jumpIfIsRememberedOrInEden(baseGPR
);
1170 WriteBarrierBuffer
& writeBarrierBuffer
= stubJit
.vm()->heap
.writeBarrierBuffer();
1171 stubJit
.load32(writeBarrierBuffer
.currentIndexAddress(), scratchGPR2
);
1172 MacroAssembler::Jump needToFlush
=
1173 stubJit
.branch32(MacroAssembler::AboveOrEqual
, scratchGPR2
, MacroAssembler::TrustedImm32(writeBarrierBuffer
.capacity()));
1175 stubJit
.add32(MacroAssembler::TrustedImm32(1), scratchGPR2
);
1176 stubJit
.store32(scratchGPR2
, writeBarrierBuffer
.currentIndexAddress());
1178 stubJit
.move(MacroAssembler::TrustedImmPtr(writeBarrierBuffer
.buffer()), scratchGPR1
);
1179 // We use an offset of -sizeof(void*) because we already added 1 to scratchGPR2.
1180 stubJit
.storePtr(baseGPR
, MacroAssembler::BaseIndex(scratchGPR1
, scratchGPR2
, MacroAssembler::ScalePtr
, static_cast<int32_t>(-sizeof(void*))));
1182 MacroAssembler::Jump doneWithBarrier
= stubJit
.jump();
1183 needToFlush
.link(&stubJit
);
1185 scratchBuffer
= vm
->scratchBufferForSize(allocator
.desiredScratchBufferSizeForCall());
1186 allocator
.preserveUsedRegistersToScratchBufferForCall(stubJit
, scratchBuffer
, scratchGPR2
);
1187 stubJit
.setupArgumentsWithExecState(baseGPR
);
1188 callFlushWriteBarrierBuffer
= stubJit
.call();
1189 allocator
.restoreUsedRegistersFromScratchBufferForCall(stubJit
, scratchBuffer
, scratchGPR2
);
1191 doneWithBarrier
.link(&stubJit
);
1193 ownerIsRememberedOrInEden
.link(&stubJit
);
1196 MacroAssembler::Jump success
;
1197 MacroAssembler::Jump failure
;
1199 if (allocator
.didReuseRegisters()) {
1200 allocator
.restoreReusedRegistersByPopping(stubJit
);
1201 success
= stubJit
.jump();
1203 failureCases
.link(&stubJit
);
1204 allocator
.restoreReusedRegistersByPopping(stubJit
);
1205 failure
= stubJit
.jump();
1207 success
= stubJit
.jump();
1209 MacroAssembler::Call operationCall
;
1210 MacroAssembler::Jump successInSlowPath
;
1212 if (structure
->outOfLineCapacity() != oldStructure
->outOfLineCapacity()) {
1213 slowPath
.link(&stubJit
);
1215 allocator
.restoreReusedRegistersByPopping(stubJit
);
1217 scratchBuffer
= vm
->scratchBufferForSize(allocator
.desiredScratchBufferSizeForCall());
1218 allocator
.preserveUsedRegistersToScratchBufferForCall(stubJit
, scratchBuffer
, scratchGPR1
);
1220 stubJit
.setupArgumentsWithExecState(baseGPR
, MacroAssembler::TrustedImmPtr(structure
), MacroAssembler::TrustedImm32(slot
.cachedOffset()), valueGPR
);
1222 stubJit
.setupArgumentsWithExecState(baseGPR
, MacroAssembler::TrustedImmPtr(structure
), MacroAssembler::TrustedImm32(slot
.cachedOffset()), valueGPR
, valueTagGPR
);
1224 operationCall
= stubJit
.call();
1225 allocator
.restoreUsedRegistersFromScratchBufferForCall(stubJit
, scratchBuffer
, scratchGPR1
);
1226 successInSlowPath
= stubJit
.jump();
1229 LinkBuffer
patchBuffer(*vm
, stubJit
, exec
->codeBlock(), JITCompilationCanFail
);
1230 if (patchBuffer
.didFailToAllocate())
1233 patchBuffer
.link(success
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToDone
));
1234 if (allocator
.didReuseRegisters())
1235 patchBuffer
.link(failure
, failureLabel
);
1237 patchBuffer
.link(failureCases
, failureLabel
);
1239 patchBuffer
.link(callFlushWriteBarrierBuffer
, operationFlushWriteBarrierBuffer
);
1241 if (structure
->outOfLineCapacity() != oldStructure
->outOfLineCapacity()) {
1242 patchBuffer
.link(operationCall
, operationReallocateStorageAndFinishPut
);
1243 patchBuffer
.link(successInSlowPath
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToDone
));
1247 createJITStubRoutine(
1249 exec
->codeBlock(), patchBuffer
,
1250 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1251 structure
->outOfLineCapacity() != oldStructure
->outOfLineCapacity() ? "reallocating " : "",
1252 oldStructure
, structure
,
1253 toCString(*exec
->codeBlock()).data(), stubInfo
.callReturnLocation
.labelAtOffset(
1254 stubInfo
.patch
.deltaCallToDone
).executableAddress())),
1256 exec
->codeBlock()->ownerExecutable(),
1257 structure
->outOfLineCapacity() != oldStructure
->outOfLineCapacity(),
1260 return oldStructure
;
1263 static InlineCacheAction
tryCachePutByID(ExecState
* exec
, JSValue baseValue
, Structure
* structure
, const Identifier
& ident
, const PutPropertySlot
& slot
, StructureStubInfo
& stubInfo
, PutKind putKind
)
1265 if (Options::forceICFailure())
1266 return GiveUpOnCache
;
1268 CodeBlock
* codeBlock
= exec
->codeBlock();
1269 VM
* vm
= &exec
->vm();
1271 if (!baseValue
.isCell())
1272 return GiveUpOnCache
;
1274 if (!slot
.isCacheablePut() && !slot
.isCacheableCustom() && !slot
.isCacheableSetter())
1275 return GiveUpOnCache
;
1277 if (!structure
->propertyAccessesAreCacheable())
1278 return GiveUpOnCache
;
1280 // Optimize self access.
1281 if (slot
.base() == baseValue
&& slot
.isCacheablePut()) {
1282 if (slot
.type() == PutPropertySlot::NewProperty
) {
1284 Structure
* oldStructure
= emitPutTransitionStubAndGetOldStructure(exec
, vm
, structure
, ident
, slot
, stubInfo
, putKind
);
1286 return GiveUpOnCache
;
1288 StructureChain
* prototypeChain
= structure
->prototypeChain(exec
);
1290 RepatchBuffer
repatchBuffer(codeBlock
);
1291 repatchBuffer
.relink(
1292 stubInfo
.callReturnLocation
.jumpAtOffset(
1293 stubInfo
.patch
.deltaCallToJump
),
1294 CodeLocationLabel(stubInfo
.stubRoutine
->code().code()));
1295 repatchCall(repatchBuffer
, stubInfo
.callReturnLocation
, appropriateListBuildingPutByIdFunction(slot
, putKind
));
1297 stubInfo
.initPutByIdTransition(*vm
, codeBlock
->ownerExecutable(), oldStructure
, structure
, prototypeChain
, putKind
== Direct
);
1299 return RetryCacheLater
;
1302 if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot
.cachedOffset())))
1303 return GiveUpOnCache
;
1305 structure
->didCachePropertyReplacement(*vm
, slot
.cachedOffset());
1306 repatchByIdSelfAccess(*vm
, codeBlock
, stubInfo
, structure
, ident
, slot
.cachedOffset(), appropriateListBuildingPutByIdFunction(slot
, putKind
), false);
1307 stubInfo
.initPutByIdReplace(*vm
, codeBlock
->ownerExecutable(), structure
);
1308 return RetryCacheLater
;
1311 if ((slot
.isCacheableCustom() || slot
.isCacheableSetter())
1312 && stubInfo
.patch
.spillMode
== DontSpill
) {
1313 RefPtr
<JITStubRoutine
> stubRoutine
;
1315 StructureChain
* prototypeChain
= 0;
1316 PropertyOffset offset
= slot
.cachedOffset();
1318 if (baseValue
!= slot
.base()) {
1319 count
= normalizePrototypeChainForChainAccess(exec
, structure
, slot
.base(), ident
, offset
);
1320 if (count
== InvalidPrototypeChain
)
1321 return GiveUpOnCache
;
1322 prototypeChain
= structure
->prototypeChain(exec
);
1324 PolymorphicPutByIdList
* list
;
1325 list
= PolymorphicPutByIdList::from(putKind
, stubInfo
);
1327 bool result
= generateByIdStub(
1328 exec
, kindFor(slot
), ident
, customFor(slot
), stubInfo
, prototypeChain
, count
,
1329 offset
, structure
, false, nullptr,
1330 stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToDone
),
1331 stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToSlowCase
),
1334 return GiveUpOnCache
;
1336 list
->addAccess(PutByIdAccess::setter(
1337 *vm
, codeBlock
->ownerExecutable(),
1338 slot
.isCacheableSetter() ? PutByIdAccess::Setter
: PutByIdAccess::CustomSetter
,
1339 structure
, prototypeChain
, count
, slot
.customSetter(), stubRoutine
));
1341 RepatchBuffer
repatchBuffer(codeBlock
);
1342 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.deltaCallToJump
), CodeLocationLabel(stubRoutine
->code().code()));
1343 repatchCall(repatchBuffer
, stubInfo
.callReturnLocation
, appropriateListBuildingPutByIdFunction(slot
, putKind
));
1344 RELEASE_ASSERT(!list
->isFull());
1345 return RetryCacheLater
;
1348 return GiveUpOnCache
;
1351 void repatchPutByID(ExecState
* exec
, JSValue baseValue
, Structure
* structure
, const Identifier
& propertyName
, const PutPropertySlot
& slot
, StructureStubInfo
& stubInfo
, PutKind putKind
)
1353 GCSafeConcurrentJITLocker
locker(exec
->codeBlock()->m_lock
, exec
->vm().heap
);
1355 if (tryCachePutByID(exec
, baseValue
, structure
, propertyName
, slot
, stubInfo
, putKind
) == GiveUpOnCache
)
1356 repatchCall(exec
->codeBlock(), stubInfo
.callReturnLocation
, appropriateGenericPutByIdFunction(slot
, putKind
));
1359 static InlineCacheAction
tryBuildPutByIdList(ExecState
* exec
, JSValue baseValue
, Structure
* structure
, const Identifier
& propertyName
, const PutPropertySlot
& slot
, StructureStubInfo
& stubInfo
, PutKind putKind
)
1361 CodeBlock
* codeBlock
= exec
->codeBlock();
1362 VM
* vm
= &exec
->vm();
1364 if (!baseValue
.isCell())
1365 return GiveUpOnCache
;
1367 if (!slot
.isCacheablePut() && !slot
.isCacheableCustom() && !slot
.isCacheableSetter())
1368 return GiveUpOnCache
;
1370 if (!structure
->propertyAccessesAreCacheable())
1371 return GiveUpOnCache
;
1373 // Optimize self access.
1374 if (slot
.base() == baseValue
&& slot
.isCacheablePut()) {
1375 PolymorphicPutByIdList
* list
;
1376 RefPtr
<JITStubRoutine
> stubRoutine
;
1378 if (slot
.type() == PutPropertySlot::NewProperty
) {
1379 list
= PolymorphicPutByIdList::from(putKind
, stubInfo
);
1381 return GiveUpOnCache
; // Will get here due to recursion.
1383 Structure
* oldStructure
= emitPutTransitionStubAndGetOldStructure(exec
, vm
, structure
, propertyName
, slot
, stubInfo
, putKind
);
1386 return GiveUpOnCache
;
1388 StructureChain
* prototypeChain
= structure
->prototypeChain(exec
);
1389 stubRoutine
= stubInfo
.stubRoutine
;
1391 PutByIdAccess::transition(
1392 *vm
, codeBlock
->ownerExecutable(),
1393 oldStructure
, structure
, prototypeChain
,
1397 list
= PolymorphicPutByIdList::from(putKind
, stubInfo
);
1399 return GiveUpOnCache
; // Will get here due to recursion.
1401 structure
->didCachePropertyReplacement(*vm
, slot
.cachedOffset());
1403 // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1404 bool result
= emitPutReplaceStub(
1405 exec
, propertyName
, slot
, stubInfo
,
1406 structure
, CodeLocationLabel(list
->currentSlowPathTarget()), stubRoutine
);
1408 return GiveUpOnCache
;
1411 PutByIdAccess::replace(
1412 *vm
, codeBlock
->ownerExecutable(),
1413 structure
, stubRoutine
));
1415 RepatchBuffer
repatchBuffer(codeBlock
);
1416 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.deltaCallToJump
), CodeLocationLabel(stubRoutine
->code().code()));
1418 repatchCall(repatchBuffer
, stubInfo
.callReturnLocation
, appropriateGenericPutByIdFunction(slot
, putKind
));
1420 return RetryCacheLater
;
1423 if ((slot
.isCacheableCustom() || slot
.isCacheableSetter())
1424 && stubInfo
.patch
.spillMode
== DontSpill
) {
1425 RefPtr
<JITStubRoutine
> stubRoutine
;
1426 StructureChain
* prototypeChain
= 0;
1427 PropertyOffset offset
= slot
.cachedOffset();
1429 if (baseValue
!= slot
.base()) {
1430 count
= normalizePrototypeChainForChainAccess(exec
, structure
, slot
.base(), propertyName
, offset
);
1431 if (count
== InvalidPrototypeChain
)
1432 return GiveUpOnCache
;
1433 prototypeChain
= structure
->prototypeChain(exec
);
1436 PolymorphicPutByIdList
* list
;
1437 list
= PolymorphicPutByIdList::from(putKind
, stubInfo
);
1439 bool result
= generateByIdStub(
1440 exec
, kindFor(slot
), propertyName
, customFor(slot
), stubInfo
, prototypeChain
, count
,
1441 offset
, structure
, false, nullptr,
1442 stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToDone
),
1443 CodeLocationLabel(list
->currentSlowPathTarget()),
1446 return GiveUpOnCache
;
1448 list
->addAccess(PutByIdAccess::setter(
1449 *vm
, codeBlock
->ownerExecutable(),
1450 slot
.isCacheableSetter() ? PutByIdAccess::Setter
: PutByIdAccess::CustomSetter
,
1451 structure
, prototypeChain
, count
, slot
.customSetter(), stubRoutine
));
1453 RepatchBuffer
repatchBuffer(codeBlock
);
1454 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.deltaCallToJump
), CodeLocationLabel(stubRoutine
->code().code()));
1456 repatchCall(repatchBuffer
, stubInfo
.callReturnLocation
, appropriateGenericPutByIdFunction(slot
, putKind
));
1458 return RetryCacheLater
;
1460 return GiveUpOnCache
;
1463 void buildPutByIdList(ExecState
* exec
, JSValue baseValue
, Structure
* structure
, const Identifier
& propertyName
, const PutPropertySlot
& slot
, StructureStubInfo
& stubInfo
, PutKind putKind
)
1465 GCSafeConcurrentJITLocker
locker(exec
->codeBlock()->m_lock
, exec
->vm().heap
);
1467 if (tryBuildPutByIdList(exec
, baseValue
, structure
, propertyName
, slot
, stubInfo
, putKind
) == GiveUpOnCache
)
1468 repatchCall(exec
->codeBlock(), stubInfo
.callReturnLocation
, appropriateGenericPutByIdFunction(slot
, putKind
));
1471 static InlineCacheAction
tryRepatchIn(
1472 ExecState
* exec
, JSCell
* base
, const Identifier
& ident
, bool wasFound
,
1473 const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
1475 if (Options::forceICFailure())
1476 return GiveUpOnCache
;
1478 if (!base
->structure()->propertyAccessesAreCacheable())
1479 return GiveUpOnCache
;
1482 if (!slot
.isCacheable())
1483 return GiveUpOnCache
;
1486 CodeBlock
* codeBlock
= exec
->codeBlock();
1487 VM
* vm
= &exec
->vm();
1488 Structure
* structure
= base
->structure(*vm
);
1490 PropertyOffset offsetIgnored
;
1491 JSValue foundSlotBase
= wasFound
? slot
.slotBase() : JSValue();
1492 size_t count
= !foundSlotBase
|| foundSlotBase
!= base
?
1493 normalizePrototypeChainForChainAccess(exec
, structure
, foundSlotBase
, ident
, offsetIgnored
) : 0;
1494 if (count
== InvalidPrototypeChain
)
1495 return GiveUpOnCache
;
1497 PolymorphicAccessStructureList
* polymorphicStructureList
;
1500 CodeLocationLabel successLabel
= stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToDone
);
1501 CodeLocationLabel slowCaseLabel
;
1503 if (stubInfo
.accessType
== access_unset
) {
1504 polymorphicStructureList
= new PolymorphicAccessStructureList();
1505 stubInfo
.initInList(polymorphicStructureList
, 0);
1506 slowCaseLabel
= stubInfo
.callReturnLocation
.labelAtOffset(
1507 stubInfo
.patch
.deltaCallToSlowCase
);
1510 RELEASE_ASSERT(stubInfo
.accessType
== access_in_list
);
1511 polymorphicStructureList
= stubInfo
.u
.inList
.structureList
;
1512 listIndex
= stubInfo
.u
.inList
.listSize
;
1513 slowCaseLabel
= CodeLocationLabel(polymorphicStructureList
->list
[listIndex
- 1].stubRoutine
->code().code());
1515 if (listIndex
== POLYMORPHIC_LIST_CACHE_SIZE
)
1516 return GiveUpOnCache
;
1519 StructureChain
* chain
= structure
->prototypeChain(exec
);
1520 RefPtr
<JITStubRoutine
> stubRoutine
;
1523 GPRReg baseGPR
= static_cast<GPRReg
>(stubInfo
.patch
.baseGPR
);
1524 GPRReg resultGPR
= static_cast<GPRReg
>(stubInfo
.patch
.valueGPR
);
1525 GPRReg scratchGPR
= TempRegisterSet(stubInfo
.patch
.usedRegisters
).getFreeGPR();
1527 CCallHelpers
stubJit(vm
);
1529 bool needToRestoreScratch
;
1530 if (scratchGPR
== InvalidGPRReg
) {
1531 scratchGPR
= AssemblyHelpers::selectScratchGPR(baseGPR
, resultGPR
);
1532 stubJit
.pushToSave(scratchGPR
);
1533 needToRestoreScratch
= true;
1535 needToRestoreScratch
= false;
1537 MacroAssembler::JumpList failureCases
;
1538 failureCases
.append(branchStructure(stubJit
,
1539 MacroAssembler::NotEqual
,
1540 MacroAssembler::Address(baseGPR
, JSCell::structureIDOffset()),
1543 CodeBlock
* codeBlock
= exec
->codeBlock();
1544 if (structure
->typeInfo().newImpurePropertyFiresWatchpoints())
1545 vm
->registerWatchpointForImpureProperty(ident
, stubInfo
.addWatchpoint(codeBlock
));
1547 if (slot
.watchpointSet())
1548 slot
.watchpointSet()->add(stubInfo
.addWatchpoint(codeBlock
));
1550 Structure
* currStructure
= structure
;
1551 WriteBarrier
<Structure
>* it
= chain
->head();
1552 for (unsigned i
= 0; i
< count
; ++i
, ++it
) {
1553 JSObject
* prototype
= asObject(currStructure
->prototypeForLookup(exec
));
1554 Structure
* protoStructure
= prototype
->structure();
1555 addStructureTransitionCheck(
1556 prototype
, protoStructure
, exec
->codeBlock(), stubInfo
, stubJit
,
1557 failureCases
, scratchGPR
);
1558 if (protoStructure
->typeInfo().newImpurePropertyFiresWatchpoints())
1559 vm
->registerWatchpointForImpureProperty(ident
, stubInfo
.addWatchpoint(codeBlock
));
1560 currStructure
= it
->get();
1564 stubJit
.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound
))), resultGPR
);
1566 stubJit
.move(MacroAssembler::TrustedImm32(wasFound
), resultGPR
);
1569 MacroAssembler::Jump success
, fail
;
1571 emitRestoreScratch(stubJit
, needToRestoreScratch
, scratchGPR
, success
, fail
, failureCases
);
1573 LinkBuffer
patchBuffer(*vm
, stubJit
, exec
->codeBlock(), JITCompilationCanFail
);
1574 if (patchBuffer
.didFailToAllocate())
1575 return GiveUpOnCache
;
1577 linkRestoreScratch(patchBuffer
, needToRestoreScratch
, success
, fail
, failureCases
, successLabel
, slowCaseLabel
);
1579 stubRoutine
= FINALIZE_CODE_FOR_STUB(
1580 exec
->codeBlock(), patchBuffer
,
1581 ("In (found = %s) stub for %s, return point %p",
1582 wasFound
? "yes" : "no", toCString(*exec
->codeBlock()).data(),
1583 successLabel
.executableAddress()));
1586 polymorphicStructureList
->list
[listIndex
].set(*vm
, codeBlock
->ownerExecutable(), stubRoutine
, structure
, true);
1587 stubInfo
.u
.inList
.listSize
++;
1589 RepatchBuffer
repatchBuffer(codeBlock
);
1590 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.deltaCallToJump
), CodeLocationLabel(stubRoutine
->code().code()));
1592 return listIndex
< (POLYMORPHIC_LIST_CACHE_SIZE
- 1) ? RetryCacheLater
: GiveUpOnCache
;
1596 ExecState
* exec
, JSCell
* base
, const Identifier
& ident
, bool wasFound
,
1597 const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
1599 if (tryRepatchIn(exec
, base
, ident
, wasFound
, slot
, stubInfo
) == GiveUpOnCache
)
1600 repatchCall(exec
->codeBlock(), stubInfo
.callReturnLocation
, operationIn
);
1603 static void linkSlowFor(
1604 RepatchBuffer
& repatchBuffer
, VM
* vm
, CallLinkInfo
& callLinkInfo
, ThunkGenerator generator
)
1606 repatchBuffer
.relink(
1607 callLinkInfo
.callReturnLocation(), vm
->getCTIStub(generator
).code());
1610 static void linkSlowFor(
1611 RepatchBuffer
& repatchBuffer
, VM
* vm
, CallLinkInfo
& callLinkInfo
,
1612 CodeSpecializationKind kind
, RegisterPreservationMode registers
)
1614 linkSlowFor(repatchBuffer
, vm
, callLinkInfo
, virtualThunkGeneratorFor(kind
, registers
));
1618 ExecState
* exec
, CallLinkInfo
& callLinkInfo
, CodeBlock
* calleeCodeBlock
,
1619 JSFunction
* callee
, MacroAssemblerCodePtr codePtr
, CodeSpecializationKind kind
,
1620 RegisterPreservationMode registers
)
1622 ASSERT(!callLinkInfo
.stub());
1624 CodeBlock
* callerCodeBlock
= exec
->callerFrame()->codeBlock();
1626 VM
* vm
= callerCodeBlock
->vm();
1628 RepatchBuffer
repatchBuffer(callerCodeBlock
);
1630 ASSERT(!callLinkInfo
.isLinked());
1631 callLinkInfo
.setCallee(exec
->callerFrame()->vm(), callLinkInfo
.hotPathBegin(), callerCodeBlock
->ownerExecutable(), callee
);
1632 callLinkInfo
.setLastSeenCallee(exec
->callerFrame()->vm(), callerCodeBlock
->ownerExecutable(), callee
);
1633 if (shouldShowDisassemblyFor(callerCodeBlock
))
1634 dataLog("Linking call in ", *callerCodeBlock
, " at ", callLinkInfo
.codeOrigin(), " to ", pointerDump(calleeCodeBlock
), ", entrypoint at ", codePtr
, "\n");
1635 repatchBuffer
.relink(callLinkInfo
.hotPathOther(), codePtr
);
1637 if (calleeCodeBlock
)
1638 calleeCodeBlock
->linkIncomingCall(exec
->callerFrame(), &callLinkInfo
);
1640 if (kind
== CodeForCall
) {
1642 repatchBuffer
, vm
, callLinkInfo
, linkPolymorphicCallThunkGeneratorFor(registers
));
1646 ASSERT(kind
== CodeForConstruct
);
1647 linkSlowFor(repatchBuffer
, vm
, callLinkInfo
, CodeForConstruct
, registers
);
1651 ExecState
* exec
, CallLinkInfo
& callLinkInfo
, CodeSpecializationKind kind
,
1652 RegisterPreservationMode registers
)
1654 CodeBlock
* callerCodeBlock
= exec
->callerFrame()->codeBlock();
1655 VM
* vm
= callerCodeBlock
->vm();
1657 RepatchBuffer
repatchBuffer(callerCodeBlock
);
1659 linkSlowFor(repatchBuffer
, vm
, callLinkInfo
, kind
, registers
);
1662 static void revertCall(
1663 RepatchBuffer
& repatchBuffer
, VM
* vm
, CallLinkInfo
& callLinkInfo
, ThunkGenerator generator
)
1665 repatchBuffer
.revertJumpReplacementToBranchPtrWithPatch(
1666 RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo
.hotPathBegin()),
1667 static_cast<MacroAssembler::RegisterID
>(callLinkInfo
.calleeGPR()), 0);
1668 linkSlowFor(repatchBuffer
, vm
, callLinkInfo
, generator
);
1669 callLinkInfo
.clearSeen();
1670 callLinkInfo
.clearCallee();
1671 callLinkInfo
.clearStub();
1672 if (callLinkInfo
.isOnList())
1673 callLinkInfo
.remove();
1677 RepatchBuffer
& repatchBuffer
, CallLinkInfo
& callLinkInfo
,
1678 CodeSpecializationKind kind
, RegisterPreservationMode registers
)
1680 if (Options::showDisassembly())
1681 dataLog("Unlinking call from ", callLinkInfo
.callReturnLocation(), " in request from ", pointerDump(repatchBuffer
.codeBlock()), "\n");
1684 repatchBuffer
, repatchBuffer
.codeBlock()->vm(), callLinkInfo
,
1685 linkThunkGeneratorFor(kind
, registers
));
1688 void linkVirtualFor(
1689 ExecState
* exec
, CallLinkInfo
& callLinkInfo
,
1690 CodeSpecializationKind kind
, RegisterPreservationMode registers
)
1692 // FIXME: We could generate a virtual call stub here. This would lead to faster virtual calls
1693 // by eliminating the branch prediction bottleneck inside the shared virtual call thunk.
1695 CodeBlock
* callerCodeBlock
= exec
->callerFrame()->codeBlock();
1696 VM
* vm
= callerCodeBlock
->vm();
1698 if (shouldShowDisassemblyFor(callerCodeBlock
))
1699 dataLog("Linking virtual call at ", *callerCodeBlock
, " ", exec
->callerFrame()->codeOrigin(), "\n");
1701 RepatchBuffer
repatchBuffer(callerCodeBlock
);
1702 revertCall(repatchBuffer
, vm
, callLinkInfo
, virtualThunkGeneratorFor(kind
, registers
));
1706 struct CallToCodePtr
{
1707 CCallHelpers::Call call
;
1708 MacroAssemblerCodePtr codePtr
;
1710 } // annonymous namespace
1712 void linkPolymorphicCall(
1713 ExecState
* exec
, CallLinkInfo
& callLinkInfo
, CallVariant newVariant
,
1714 RegisterPreservationMode registers
)
1716 // Currently we can't do anything for non-function callees.
1717 // https://bugs.webkit.org/show_bug.cgi?id=140685
1718 if (!newVariant
|| !newVariant
.executable()) {
1719 linkVirtualFor(exec
, callLinkInfo
, CodeForCall
, registers
);
1723 CodeBlock
* callerCodeBlock
= exec
->callerFrame()->codeBlock();
1724 VM
* vm
= callerCodeBlock
->vm();
1726 CallVariantList list
;
1727 if (PolymorphicCallStubRoutine
* stub
= callLinkInfo
.stub())
1728 list
= stub
->variants();
1729 else if (JSFunction
* oldCallee
= callLinkInfo
.callee())
1730 list
= CallVariantList
{ CallVariant(oldCallee
) };
1732 list
= variantListWithVariant(list
, newVariant
);
1734 // If there are any closure calls then it makes sense to treat all of them as closure calls.
1735 // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
1736 // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
1737 bool isClosureCall
= false;
1738 for (CallVariant variant
: list
) {
1739 if (variant
.isClosureCall()) {
1740 list
= despecifiedVariantList(list
);
1741 isClosureCall
= true;
1747 callLinkInfo
.setHasSeenClosure();
1749 Vector
<PolymorphicCallCase
> callCases
;
1751 // Figure out what our cases are.
1752 for (CallVariant variant
: list
) {
1753 CodeBlock
* codeBlock
;
1754 if (variant
.executable()->isHostFunction())
1755 codeBlock
= nullptr;
1757 codeBlock
= jsCast
<FunctionExecutable
*>(variant
.executable())->codeBlockForCall();
1759 // If we cannot handle a callee, assume that it's better for this whole thing to be a
1761 if (exec
->argumentCountIncludingThis() < static_cast<size_t>(codeBlock
->numParameters()) || callLinkInfo
.callType() == CallLinkInfo::CallVarargs
|| callLinkInfo
.callType() == CallLinkInfo::ConstructVarargs
) {
1762 linkVirtualFor(exec
, callLinkInfo
, CodeForCall
, registers
);
1767 callCases
.append(PolymorphicCallCase(variant
, codeBlock
));
1770 // If we are over the limit, just use a normal virtual call.
1771 unsigned maxPolymorphicCallVariantListSize
;
1772 if (callerCodeBlock
->jitType() == JITCode::topTierJIT())
1773 maxPolymorphicCallVariantListSize
= Options::maxPolymorphicCallVariantListSizeForTopTier();
1775 maxPolymorphicCallVariantListSize
= Options::maxPolymorphicCallVariantListSize();
1776 if (list
.size() > maxPolymorphicCallVariantListSize
) {
1777 linkVirtualFor(exec
, callLinkInfo
, CodeForCall
, registers
);
1781 GPRReg calleeGPR
= static_cast<GPRReg
>(callLinkInfo
.calleeGPR());
1783 CCallHelpers
stubJit(vm
, callerCodeBlock
);
1785 CCallHelpers::JumpList slowPath
;
1787 ptrdiff_t offsetToFrame
= -sizeof(CallerFrameAndPC
);
1789 if (!ASSERT_DISABLED
) {
1790 CCallHelpers::Jump okArgumentCount
= stubJit
.branch32(
1791 CCallHelpers::Below
, CCallHelpers::Address(CCallHelpers::stackPointerRegister
, static_cast<ptrdiff_t>(sizeof(Register
) * JSStack::ArgumentCount
) + offsetToFrame
+ PayloadOffset
), CCallHelpers::TrustedImm32(10000000));
1792 stubJit
.abortWithReason(RepatchInsaneArgumentCount
);
1793 okArgumentCount
.link(&stubJit
);
1796 GPRReg scratch
= AssemblyHelpers::selectScratchGPR(calleeGPR
);
1797 GPRReg comparisonValueGPR
;
1799 if (isClosureCall
) {
1800 // Verify that we have a function and stash the executable in scratch.
1803 // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1804 // being set. So we do this the hard way.
1805 stubJit
.move(MacroAssembler::TrustedImm64(TagMask
), scratch
);
1806 slowPath
.append(stubJit
.branchTest64(CCallHelpers::NonZero
, calleeGPR
, scratch
));
1808 // We would have already checked that the callee is a cell.
1813 CCallHelpers::NotEqual
,
1814 CCallHelpers::Address(calleeGPR
, JSCell::typeInfoTypeOffset()),
1815 CCallHelpers::TrustedImm32(JSFunctionType
)));
1818 CCallHelpers::Address(calleeGPR
, JSFunction::offsetOfExecutable()),
1821 comparisonValueGPR
= scratch
;
1823 comparisonValueGPR
= calleeGPR
;
1825 Vector
<int64_t> caseValues(callCases
.size());
1826 Vector
<CallToCodePtr
> calls(callCases
.size());
1827 std::unique_ptr
<uint32_t[]> fastCounts
;
1829 if (callerCodeBlock
->jitType() != JITCode::topTierJIT())
1830 fastCounts
= std::make_unique
<uint32_t[]>(callCases
.size());
1832 for (size_t i
= 0; i
< callCases
.size(); ++i
) {
1836 CallVariant variant
= callCases
[i
].variant();
1837 int64_t newCaseValue
;
1839 newCaseValue
= bitwise_cast
<intptr_t>(variant
.executable());
1841 newCaseValue
= bitwise_cast
<intptr_t>(variant
.function());
1843 if (!ASSERT_DISABLED
) {
1844 for (size_t j
= 0; j
< i
; ++j
) {
1845 if (caseValues
[j
] != newCaseValue
)
1848 dataLog("ERROR: Attempt to add duplicate case value.\n");
1849 dataLog("Existing case values: ");
1851 for (size_t k
= 0; k
< i
; ++k
)
1852 dataLog(comma
, caseValues
[k
]);
1854 dataLog("Attempting to add: ", newCaseValue
, "\n");
1855 dataLog("Variant list: ", listDump(callCases
), "\n");
1856 RELEASE_ASSERT_NOT_REACHED();
1860 caseValues
[i
] = newCaseValue
;
1863 GPRReg fastCountsBaseGPR
=
1864 AssemblyHelpers::selectScratchGPR(calleeGPR
, comparisonValueGPR
, GPRInfo::regT3
);
1865 stubJit
.move(CCallHelpers::TrustedImmPtr(fastCounts
.get()), fastCountsBaseGPR
);
1867 BinarySwitch
binarySwitch(comparisonValueGPR
, caseValues
, BinarySwitch::IntPtr
);
1868 CCallHelpers::JumpList done
;
1869 while (binarySwitch
.advance(stubJit
)) {
1870 size_t caseIndex
= binarySwitch
.caseIndex();
1872 CallVariant variant
= callCases
[caseIndex
].variant();
1874 ASSERT(variant
.executable()->hasJITCodeForCall());
1875 MacroAssemblerCodePtr codePtr
=
1876 variant
.executable()->generatedJITCodeForCall()->addressForCall(
1877 *vm
, variant
.executable(), ArityCheckNotRequired
, registers
);
1881 CCallHelpers::TrustedImm32(1),
1882 CCallHelpers::Address(fastCountsBaseGPR
, caseIndex
* sizeof(uint32_t)));
1884 calls
[caseIndex
].call
= stubJit
.nearCall();
1885 calls
[caseIndex
].codePtr
= codePtr
;
1886 done
.append(stubJit
.jump());
1889 slowPath
.link(&stubJit
);
1890 binarySwitch
.fallThrough().link(&stubJit
);
1891 stubJit
.move(calleeGPR
, GPRInfo::regT0
);
1892 #if USE(JSVALUE32_64)
1893 stubJit
.move(CCallHelpers::TrustedImm32(JSValue::CellTag
), GPRInfo::regT1
);
1895 stubJit
.move(CCallHelpers::TrustedImmPtr(&callLinkInfo
), GPRInfo::regT2
);
1896 stubJit
.move(CCallHelpers::TrustedImmPtr(callLinkInfo
.callReturnLocation().executableAddress()), GPRInfo::regT4
);
1898 stubJit
.restoreReturnAddressBeforeReturn(GPRInfo::regT4
);
1899 AssemblyHelpers::Jump slow
= stubJit
.jump();
1901 LinkBuffer
patchBuffer(*vm
, stubJit
, callerCodeBlock
, JITCompilationCanFail
);
1902 if (patchBuffer
.didFailToAllocate()) {
1903 linkVirtualFor(exec
, callLinkInfo
, CodeForCall
, registers
);
1907 RELEASE_ASSERT(callCases
.size() == calls
.size());
1908 for (CallToCodePtr callToCodePtr
: calls
) {
1910 callToCodePtr
.call
, FunctionPtr(callToCodePtr
.codePtr
.executableAddress()));
1912 if (JITCode::isOptimizingJIT(callerCodeBlock
->jitType()))
1913 patchBuffer
.link(done
, callLinkInfo
.callReturnLocation().labelAtOffset(0));
1915 patchBuffer
.link(done
, callLinkInfo
.hotPathOther().labelAtOffset(0));
1916 patchBuffer
.link(slow
, CodeLocationLabel(vm
->getCTIStub(linkPolymorphicCallThunkGeneratorFor(registers
)).code()));
1918 RefPtr
<PolymorphicCallStubRoutine
> stubRoutine
= adoptRef(new PolymorphicCallStubRoutine(
1920 callerCodeBlock
, patchBuffer
,
1921 ("Polymorphic call stub for %s, return point %p, targets %s",
1922 toCString(*callerCodeBlock
).data(), callLinkInfo
.callReturnLocation().labelAtOffset(0).executableAddress(),
1923 toCString(listDump(callCases
)).data())),
1924 *vm
, callerCodeBlock
->ownerExecutable(), exec
->callerFrame(), callLinkInfo
, callCases
,
1925 WTF::move(fastCounts
)));
1927 RepatchBuffer
repatchBuffer(callerCodeBlock
);
1929 repatchBuffer
.replaceWithJump(
1930 RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo
.hotPathBegin()),
1931 CodeLocationLabel(stubRoutine
->code().code()));
1932 // This is weird. The original slow path should no longer be reachable.
1933 linkSlowFor(repatchBuffer
, vm
, callLinkInfo
, CodeForCall
, registers
);
1935 // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
1936 // that it's no longer on stack.
1937 callLinkInfo
.setStub(stubRoutine
.release());
1939 // The call link info no longer has a call cache apart from the jump to the polymorphic call
1941 if (callLinkInfo
.isOnList())
1942 callLinkInfo
.remove();
1945 void resetGetByID(RepatchBuffer
& repatchBuffer
, StructureStubInfo
& stubInfo
)
1947 repatchCall(repatchBuffer
, stubInfo
.callReturnLocation
, operationGetByIdOptimize
);
1948 CodeLocationDataLabel32 structureLabel
= stubInfo
.callReturnLocation
.dataLabel32AtOffset(-(intptr_t)stubInfo
.patch
.deltaCheckImmToCall
);
1949 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1950 repatchBuffer
.revertJumpReplacementToPatchableBranch32WithPatch(
1951 RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel
),
1952 MacroAssembler::Address(
1953 static_cast<MacroAssembler::RegisterID
>(stubInfo
.patch
.baseGPR
),
1954 JSCell::structureIDOffset()),
1955 static_cast<int32_t>(unusedPointer
));
1957 repatchBuffer
.repatch(structureLabel
, static_cast<int32_t>(unusedPointer
));
1959 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.deltaCallToLoadOrStore
), 0);
1961 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.deltaCallToTagLoadOrStore
), 0);
1962 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.deltaCallToPayloadLoadOrStore
), 0);
1964 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.deltaCallToJump
), stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToSlowCase
));
1967 void resetPutByID(RepatchBuffer
& repatchBuffer
, StructureStubInfo
& stubInfo
)
1969 V_JITOperation_ESsiJJI unoptimizedFunction
= bitwise_cast
<V_JITOperation_ESsiJJI
>(readCallTarget(repatchBuffer
, stubInfo
.callReturnLocation
).executableAddress());
1970 V_JITOperation_ESsiJJI optimizedFunction
;
1971 if (unoptimizedFunction
== operationPutByIdStrict
|| unoptimizedFunction
== operationPutByIdStrictBuildList
)
1972 optimizedFunction
= operationPutByIdStrictOptimize
;
1973 else if (unoptimizedFunction
== operationPutByIdNonStrict
|| unoptimizedFunction
== operationPutByIdNonStrictBuildList
)
1974 optimizedFunction
= operationPutByIdNonStrictOptimize
;
1975 else if (unoptimizedFunction
== operationPutByIdDirectStrict
|| unoptimizedFunction
== operationPutByIdDirectStrictBuildList
)
1976 optimizedFunction
= operationPutByIdDirectStrictOptimize
;
1978 ASSERT(unoptimizedFunction
== operationPutByIdDirectNonStrict
|| unoptimizedFunction
== operationPutByIdDirectNonStrictBuildList
);
1979 optimizedFunction
= operationPutByIdDirectNonStrictOptimize
;
1981 repatchCall(repatchBuffer
, stubInfo
.callReturnLocation
, optimizedFunction
);
1982 CodeLocationDataLabel32 structureLabel
= stubInfo
.callReturnLocation
.dataLabel32AtOffset(-(intptr_t)stubInfo
.patch
.deltaCheckImmToCall
);
1983 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1984 repatchBuffer
.revertJumpReplacementToPatchableBranch32WithPatch(
1985 RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel
),
1986 MacroAssembler::Address(
1987 static_cast<MacroAssembler::RegisterID
>(stubInfo
.patch
.baseGPR
),
1988 JSCell::structureIDOffset()),
1989 static_cast<int32_t>(unusedPointer
));
1991 repatchBuffer
.repatch(structureLabel
, static_cast<int32_t>(unusedPointer
));
1993 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.deltaCallToLoadOrStore
), 0);
1995 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.deltaCallToTagLoadOrStore
), 0);
1996 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.deltaCallToPayloadLoadOrStore
), 0);
1998 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.deltaCallToJump
), stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToSlowCase
));
2001 void resetIn(RepatchBuffer
& repatchBuffer
, StructureStubInfo
& stubInfo
)
2003 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.deltaCallToJump
), stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToSlowCase
));