2 * Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "AccessorCallJITStubRoutine.h"
32 #include "CCallHelpers.h"
33 #include "DFGOperations.h"
34 #include "DFGSpeculativeJIT.h"
35 #include "FTLThunks.h"
36 #include "GCAwareJITStubRoutine.h"
37 #include "GetterSetter.h"
39 #include "JITInlines.h"
40 #include "LinkBuffer.h"
41 #include "JSCInlines.h"
42 #include "PolymorphicGetByIdList.h"
43 #include "PolymorphicPutByIdList.h"
44 #include "RegExpMatchesArray.h"
45 #include "RepatchBuffer.h"
46 #include "ScratchRegisterAllocator.h"
47 #include "StackAlignment.h"
48 #include "StructureRareDataInlines.h"
49 #include "StructureStubClearingWatchpoint.h"
50 #include "ThunkGenerators.h"
51 #include <wtf/StringPrintStream.h>
55 // Beware: in this code, it is not safe to assume anything about the following registers
56 // that would ordinarily have well-known values:
57 // - tagTypeNumberRegister
60 static FunctionPtr
readCallTarget(RepatchBuffer
& repatchBuffer
, CodeLocationCall call
)
62 FunctionPtr result
= MacroAssembler::readCallTarget(call
);
64 CodeBlock
* codeBlock
= repatchBuffer
.codeBlock();
65 if (codeBlock
->jitType() == JITCode::FTLJIT
) {
66 return FunctionPtr(codeBlock
->vm()->ftlThunks
->keyForSlowPathCallThunk(
67 MacroAssemblerCodePtr::createFromExecutableAddress(
68 result
.executableAddress())).callTarget());
71 UNUSED_PARAM(repatchBuffer
);
72 #endif // ENABLE(FTL_JIT)
76 static void repatchCall(RepatchBuffer
& repatchBuffer
, CodeLocationCall call
, FunctionPtr newCalleeFunction
)
79 CodeBlock
* codeBlock
= repatchBuffer
.codeBlock();
80 if (codeBlock
->jitType() == JITCode::FTLJIT
) {
81 VM
& vm
= *codeBlock
->vm();
82 FTL::Thunks
& thunks
= *vm
.ftlThunks
;
83 FTL::SlowPathCallKey key
= thunks
.keyForSlowPathCallThunk(
84 MacroAssemblerCodePtr::createFromExecutableAddress(
85 MacroAssembler::readCallTarget(call
).executableAddress()));
86 key
= key
.withCallTarget(newCalleeFunction
.executableAddress());
87 newCalleeFunction
= FunctionPtr(
88 thunks
.getSlowPathCallThunk(vm
, key
).code().executableAddress());
90 #endif // ENABLE(FTL_JIT)
91 repatchBuffer
.relink(call
, newCalleeFunction
);
94 static void repatchCall(CodeBlock
* codeblock
, CodeLocationCall call
, FunctionPtr newCalleeFunction
)
96 RepatchBuffer
repatchBuffer(codeblock
);
97 repatchCall(repatchBuffer
, call
, newCalleeFunction
);
100 static void repatchByIdSelfAccess(VM
& vm
, CodeBlock
* codeBlock
, StructureStubInfo
& stubInfo
, Structure
* structure
, const Identifier
& propertyName
, PropertyOffset offset
,
101 const FunctionPtr
&slowPathFunction
, bool compact
)
103 if (structure
->typeInfo().newImpurePropertyFiresWatchpoints())
104 vm
.registerWatchpointForImpureProperty(propertyName
, stubInfo
.addWatchpoint(codeBlock
));
106 RepatchBuffer
repatchBuffer(codeBlock
);
108 // Only optimize once!
109 repatchCall(repatchBuffer
, stubInfo
.callReturnLocation
, slowPathFunction
);
111 // Patch the structure check & the offset of the load.
112 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(-(intptr_t)stubInfo
.patch
.deltaCheckImmToCall
), bitwise_cast
<int32_t>(structure
->id()));
113 repatchBuffer
.setLoadInstructionIsActive(stubInfo
.callReturnLocation
.convertibleLoadAtOffset(stubInfo
.patch
.deltaCallToStorageLoad
), isOutOfLineOffset(offset
));
116 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.deltaCallToLoadOrStore
), offsetRelativeToPatchedStorage(offset
));
118 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.deltaCallToLoadOrStore
), offsetRelativeToPatchedStorage(offset
));
119 #elif USE(JSVALUE32_64)
121 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.deltaCallToTagLoadOrStore
), offsetRelativeToPatchedStorage(offset
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
));
122 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.deltaCallToPayloadLoadOrStore
), offsetRelativeToPatchedStorage(offset
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
));
124 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.deltaCallToTagLoadOrStore
), offsetRelativeToPatchedStorage(offset
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
));
125 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.deltaCallToPayloadLoadOrStore
), offsetRelativeToPatchedStorage(offset
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
));
130 static void addStructureTransitionCheck(
131 JSCell
* object
, Structure
* structure
, CodeBlock
* codeBlock
, StructureStubInfo
& stubInfo
,
132 MacroAssembler
& jit
, MacroAssembler::JumpList
& failureCases
, GPRReg scratchGPR
)
134 if (object
->structure() == structure
&& structure
->transitionWatchpointSetIsStillValid()) {
135 structure
->addTransitionWatchpoint(stubInfo
.addWatchpoint(codeBlock
));
136 if (!ASSERT_DISABLED
) {
137 // If we execute this code, the object must have the structure we expect. Assert
138 // this in debug modes.
139 jit
.move(MacroAssembler::TrustedImmPtr(object
), scratchGPR
);
140 MacroAssembler::Jump ok
= branchStructure(
142 MacroAssembler::Equal
,
143 MacroAssembler::Address(scratchGPR
, JSCell::structureIDOffset()),
145 jit
.abortWithReason(RepatchIneffectiveWatchpoint
);
151 jit
.move(MacroAssembler::TrustedImmPtr(object
), scratchGPR
);
154 MacroAssembler::NotEqual
,
155 MacroAssembler::Address(scratchGPR
, JSCell::structureIDOffset()),
159 static void addStructureTransitionCheck(
160 JSValue prototype
, CodeBlock
* codeBlock
, StructureStubInfo
& stubInfo
,
161 MacroAssembler
& jit
, MacroAssembler::JumpList
& failureCases
, GPRReg scratchGPR
)
163 if (prototype
.isNull())
166 ASSERT(prototype
.isCell());
168 addStructureTransitionCheck(
169 prototype
.asCell(), prototype
.asCell()->structure(), codeBlock
, stubInfo
, jit
,
170 failureCases
, scratchGPR
);
173 static void replaceWithJump(RepatchBuffer
& repatchBuffer
, StructureStubInfo
& stubInfo
, const MacroAssemblerCodePtr target
)
175 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
176 repatchBuffer
.replaceWithJump(
177 RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
178 stubInfo
.callReturnLocation
.dataLabel32AtOffset(
179 -(intptr_t)stubInfo
.patch
.deltaCheckImmToCall
)),
180 CodeLocationLabel(target
));
184 repatchBuffer
.relink(
185 stubInfo
.callReturnLocation
.jumpAtOffset(
186 stubInfo
.patch
.deltaCallToJump
),
187 CodeLocationLabel(target
));
190 static void emitRestoreScratch(MacroAssembler
& stubJit
, bool needToRestoreScratch
, GPRReg scratchGPR
, MacroAssembler::Jump
& success
, MacroAssembler::Jump
& fail
, MacroAssembler::JumpList failureCases
)
192 if (needToRestoreScratch
) {
193 stubJit
.popToRestore(scratchGPR
);
195 success
= stubJit
.jump();
197 // link failure cases here, so we can pop scratchGPR, and then jump back.
198 failureCases
.link(&stubJit
);
200 stubJit
.popToRestore(scratchGPR
);
202 fail
= stubJit
.jump();
206 success
= stubJit
.jump();
209 static void linkRestoreScratch(LinkBuffer
& patchBuffer
, bool needToRestoreScratch
, MacroAssembler::Jump success
, MacroAssembler::Jump fail
, MacroAssembler::JumpList failureCases
, CodeLocationLabel successLabel
, CodeLocationLabel slowCaseBegin
)
211 patchBuffer
.link(success
, successLabel
);
213 if (needToRestoreScratch
) {
214 patchBuffer
.link(fail
, slowCaseBegin
);
218 // link failure cases directly back to normal path
219 patchBuffer
.link(failureCases
, slowCaseBegin
);
222 static void linkRestoreScratch(LinkBuffer
& patchBuffer
, bool needToRestoreScratch
, StructureStubInfo
& stubInfo
, MacroAssembler::Jump success
, MacroAssembler::Jump fail
, MacroAssembler::JumpList failureCases
)
224 linkRestoreScratch(patchBuffer
, needToRestoreScratch
, success
, fail
, failureCases
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToDone
), stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToSlowCase
));
235 static const char* toString(ByIdStubKind kind
)
242 case CallCustomGetter
:
243 return "CallCustomGetter";
246 case CallCustomSetter
:
247 return "CallCustomSetter";
249 RELEASE_ASSERT_NOT_REACHED();
254 static ByIdStubKind
kindFor(const PropertySlot
& slot
)
256 if (slot
.isCacheableValue())
258 if (slot
.isCacheableCustom())
259 return CallCustomGetter
;
260 RELEASE_ASSERT(slot
.isCacheableGetter());
264 static FunctionPtr
customFor(const PropertySlot
& slot
)
266 if (!slot
.isCacheableCustom())
267 return FunctionPtr();
268 return FunctionPtr(slot
.customGetter());
271 static ByIdStubKind
kindFor(const PutPropertySlot
& slot
)
273 RELEASE_ASSERT(!slot
.isCacheablePut());
274 if (slot
.isCacheableSetter())
276 RELEASE_ASSERT(slot
.isCacheableCustom());
277 return CallCustomSetter
;
280 static FunctionPtr
customFor(const PutPropertySlot
& slot
)
282 if (!slot
.isCacheableCustom())
283 return FunctionPtr();
284 return FunctionPtr(slot
.customSetter());
287 static void generateByIdStub(
288 ExecState
* exec
, ByIdStubKind kind
, const Identifier
& propertyName
,
289 FunctionPtr custom
, StructureStubInfo
& stubInfo
, StructureChain
* chain
, size_t count
,
290 PropertyOffset offset
, Structure
* structure
, bool loadTargetFromProxy
, WatchpointSet
* watchpointSet
,
291 CodeLocationLabel successLabel
, CodeLocationLabel slowCaseLabel
, RefPtr
<JITStubRoutine
>& stubRoutine
)
293 VM
* vm
= &exec
->vm();
294 GPRReg baseGPR
= static_cast<GPRReg
>(stubInfo
.patch
.baseGPR
);
295 JSValueRegs valueRegs
= JSValueRegs(
296 #if USE(JSVALUE32_64)
297 static_cast<GPRReg
>(stubInfo
.patch
.valueTagGPR
),
299 static_cast<GPRReg
>(stubInfo
.patch
.valueGPR
));
300 GPRReg scratchGPR
= TempRegisterSet(stubInfo
.patch
.usedRegisters
).getFreeGPR();
301 bool needToRestoreScratch
= scratchGPR
== InvalidGPRReg
;
302 RELEASE_ASSERT(!needToRestoreScratch
|| kind
== GetValue
);
304 CCallHelpers
stubJit(&exec
->vm(), exec
->codeBlock());
305 if (needToRestoreScratch
) {
306 scratchGPR
= AssemblyHelpers::selectScratchGPR(
307 baseGPR
, valueRegs
.tagGPR(), valueRegs
.payloadGPR());
308 stubJit
.pushToSave(scratchGPR
);
309 needToRestoreScratch
= true;
312 MacroAssembler::JumpList failureCases
;
314 GPRReg baseForGetGPR
;
315 if (loadTargetFromProxy
) {
316 baseForGetGPR
= valueRegs
.payloadGPR();
317 failureCases
.append(stubJit
.branch8(
318 MacroAssembler::NotEqual
,
319 MacroAssembler::Address(baseGPR
, JSCell::typeInfoTypeOffset()),
320 MacroAssembler::TrustedImm32(PureForwardingProxyType
)));
322 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSProxy::targetOffset()), scratchGPR
);
324 failureCases
.append(branchStructure(stubJit
,
325 MacroAssembler::NotEqual
,
326 MacroAssembler::Address(scratchGPR
, JSCell::structureIDOffset()),
329 baseForGetGPR
= baseGPR
;
331 failureCases
.append(branchStructure(stubJit
,
332 MacroAssembler::NotEqual
,
333 MacroAssembler::Address(baseForGetGPR
, JSCell::structureIDOffset()),
337 CodeBlock
* codeBlock
= exec
->codeBlock();
338 if (structure
->typeInfo().newImpurePropertyFiresWatchpoints())
339 vm
->registerWatchpointForImpureProperty(propertyName
, stubInfo
.addWatchpoint(codeBlock
));
342 watchpointSet
->add(stubInfo
.addWatchpoint(codeBlock
));
344 Structure
* currStructure
= structure
;
345 JSObject
* protoObject
= 0;
347 WriteBarrier
<Structure
>* it
= chain
->head();
348 for (unsigned i
= 0; i
< count
; ++i
, ++it
) {
349 protoObject
= asObject(currStructure
->prototypeForLookup(exec
));
350 Structure
* protoStructure
= protoObject
->structure();
351 if (protoStructure
->typeInfo().newImpurePropertyFiresWatchpoints())
352 vm
->registerWatchpointForImpureProperty(propertyName
, stubInfo
.addWatchpoint(codeBlock
));
353 addStructureTransitionCheck(
354 protoObject
, protoStructure
, codeBlock
, stubInfo
, stubJit
,
355 failureCases
, scratchGPR
);
356 currStructure
= it
->get();
360 GPRReg baseForAccessGPR
;
362 // We could have clobbered scratchGPR earlier, so we have to reload from baseGPR to get the target.
363 if (loadTargetFromProxy
)
364 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSProxy::targetOffset()), baseForGetGPR
);
365 stubJit
.move(MacroAssembler::TrustedImmPtr(protoObject
), scratchGPR
);
366 baseForAccessGPR
= scratchGPR
;
368 // For proxy objects, we need to do all the Structure checks before moving the baseGPR into
369 // baseForGetGPR because if we fail any of the checks then we would have the wrong value in baseGPR
371 if (loadTargetFromProxy
)
372 stubJit
.move(scratchGPR
, baseForGetGPR
);
373 baseForAccessGPR
= baseForGetGPR
;
376 GPRReg loadedValueGPR
= InvalidGPRReg
;
377 if (kind
!= CallCustomGetter
&& kind
!= CallCustomSetter
) {
378 if (kind
== GetValue
)
379 loadedValueGPR
= valueRegs
.payloadGPR();
381 loadedValueGPR
= scratchGPR
;
384 if (isInlineOffset(offset
))
385 storageGPR
= baseForAccessGPR
;
387 stubJit
.loadPtr(MacroAssembler::Address(baseForAccessGPR
, JSObject::butterflyOffset()), loadedValueGPR
);
388 storageGPR
= loadedValueGPR
;
392 stubJit
.load64(MacroAssembler::Address(storageGPR
, offsetRelativeToBase(offset
)), loadedValueGPR
);
394 if (kind
== GetValue
)
395 stubJit
.load32(MacroAssembler::Address(storageGPR
, offsetRelativeToBase(offset
) + TagOffset
), valueRegs
.tagGPR());
396 stubJit
.load32(MacroAssembler::Address(storageGPR
, offsetRelativeToBase(offset
) + PayloadOffset
), loadedValueGPR
);
400 // Stuff for custom getters.
401 MacroAssembler::Call operationCall
;
402 MacroAssembler::Call handlerCall
;
404 // Stuff for JS getters.
405 MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck
;
406 MacroAssembler::Call fastPathCall
;
407 MacroAssembler::Call slowPathCall
;
408 std::unique_ptr
<CallLinkInfo
> callLinkInfo
;
410 MacroAssembler::Jump success
, fail
;
411 if (kind
!= GetValue
) {
412 // Need to make sure that whenever this call is made in the future, we remember the
413 // place that we made it from. It just so happens to be the place that we are at
415 stubJit
.store32(MacroAssembler::TrustedImm32(exec
->locationAsRawBits()),
416 CCallHelpers::tagFor(static_cast<VirtualRegister
>(JSStack::ArgumentCount
)));
418 if (kind
== CallGetter
|| kind
== CallSetter
) {
419 // Create a JS call using a JS call inline cache. Assume that:
421 // - SP is aligned and represents the extent of the calling compiler's stack usage.
423 // - FP is set correctly (i.e. it points to the caller's call frame header).
425 // - SP - FP is an aligned difference.
427 // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
430 // Therefore, we temporarily grow the stack for the purpose of the call and then
433 callLinkInfo
= std::make_unique
<CallLinkInfo
>();
434 callLinkInfo
->callType
= CallLinkInfo::Call
;
435 callLinkInfo
->codeOrigin
= stubInfo
.codeOrigin
;
436 callLinkInfo
->calleeGPR
= loadedValueGPR
;
438 MacroAssembler::JumpList done
;
440 // There is a 'this' argument but nothing else.
441 unsigned numberOfParameters
= 1;
442 // ... unless we're calling a setter.
443 if (kind
== CallSetter
)
444 numberOfParameters
++;
446 // Get the accessor; if there ain't one then the result is jsUndefined().
447 if (kind
== CallSetter
) {
449 MacroAssembler::Address(loadedValueGPR
, GetterSetter::offsetOfSetter()),
453 MacroAssembler::Address(loadedValueGPR
, GetterSetter::offsetOfGetter()),
456 MacroAssembler::Jump returnUndefined
= stubJit
.branchTestPtr(
457 MacroAssembler::Zero
, loadedValueGPR
);
459 unsigned numberOfRegsForCall
=
460 JSStack::CallFrameHeaderSize
+ numberOfParameters
;
462 unsigned numberOfBytesForCall
=
463 numberOfRegsForCall
* sizeof(Register
) - sizeof(CallerFrameAndPC
);
465 unsigned alignedNumberOfBytesForCall
=
466 WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall
);
469 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall
),
470 MacroAssembler::stackPointerRegister
);
472 MacroAssembler::Address calleeFrame
= MacroAssembler::Address(
473 MacroAssembler::stackPointerRegister
,
474 -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC
)));
477 MacroAssembler::TrustedImm32(numberOfParameters
),
478 calleeFrame
.withOffset(
479 JSStack::ArgumentCount
* sizeof(Register
) + PayloadOffset
));
482 loadedValueGPR
, calleeFrame
.withOffset(JSStack::Callee
* sizeof(Register
)));
486 calleeFrame
.withOffset(
487 virtualRegisterForArgument(0).offset() * sizeof(Register
)));
489 if (kind
== CallSetter
) {
492 calleeFrame
.withOffset(
493 virtualRegisterForArgument(1).offset() * sizeof(Register
)));
496 MacroAssembler::Jump slowCase
= stubJit
.branchPtrWithPatch(
497 MacroAssembler::NotEqual
, loadedValueGPR
, addressOfLinkFunctionCheck
,
498 MacroAssembler::TrustedImmPtr(0));
500 // loadedValueGPR is already burned. We can reuse it. From here on we assume that
501 // any volatile register will be clobbered anyway.
503 MacroAssembler::Address(loadedValueGPR
, JSFunction::offsetOfScopeChain()),
506 loadedValueGPR
, calleeFrame
.withOffset(JSStack::ScopeChain
* sizeof(Register
)));
507 fastPathCall
= stubJit
.nearCall();
510 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall
),
511 MacroAssembler::stackPointerRegister
);
512 if (kind
== CallGetter
)
513 stubJit
.setupResults(valueRegs
);
515 done
.append(stubJit
.jump());
516 slowCase
.link(&stubJit
);
518 stubJit
.move(loadedValueGPR
, GPRInfo::regT0
);
519 #if USE(JSVALUE32_64)
520 stubJit
.move(MacroAssembler::TrustedImm32(JSValue::CellTag
), GPRInfo::regT1
);
522 stubJit
.move(MacroAssembler::TrustedImmPtr(callLinkInfo
.get()), GPRInfo::regT2
);
523 slowPathCall
= stubJit
.nearCall();
526 MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall
),
527 MacroAssembler::stackPointerRegister
);
528 if (kind
== CallGetter
)
529 stubJit
.setupResults(valueRegs
);
531 done
.append(stubJit
.jump());
532 returnUndefined
.link(&stubJit
);
534 if (kind
== CallGetter
)
535 stubJit
.moveTrustedValue(jsUndefined(), valueRegs
);
539 // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
540 // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
542 if (kind
== CallCustomGetter
)
543 stubJit
.setupArgumentsWithExecState(baseForAccessGPR
, baseForGetGPR
, MacroAssembler::TrustedImmPtr(propertyName
.impl()));
545 stubJit
.setupArgumentsWithExecState(baseForAccessGPR
, baseForGetGPR
, valueRegs
.gpr());
547 if (kind
== CallCustomGetter
)
548 stubJit
.setupArgumentsWithExecState(baseForAccessGPR
, baseForGetGPR
, MacroAssembler::TrustedImm32(JSValue::CellTag
), MacroAssembler::TrustedImmPtr(propertyName
.impl()));
550 stubJit
.setupArgumentsWithExecState(baseForAccessGPR
, baseForGetGPR
, MacroAssembler::TrustedImm32(JSValue::CellTag
), valueRegs
.payloadGPR(), valueRegs
.tagGPR());
552 stubJit
.storePtr(GPRInfo::callFrameRegister
, &vm
->topCallFrame
);
554 operationCall
= stubJit
.call();
555 if (kind
== CallCustomGetter
)
556 stubJit
.setupResults(valueRegs
);
557 MacroAssembler::Jump noException
= stubJit
.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck
);
559 stubJit
.setupArguments(CCallHelpers::TrustedImmPtr(vm
), GPRInfo::callFrameRegister
);
560 handlerCall
= stubJit
.call();
561 stubJit
.jumpToExceptionHandler();
563 noException
.link(&stubJit
);
566 emitRestoreScratch(stubJit
, needToRestoreScratch
, scratchGPR
, success
, fail
, failureCases
);
568 LinkBuffer
patchBuffer(*vm
, stubJit
, exec
->codeBlock());
570 linkRestoreScratch(patchBuffer
, needToRestoreScratch
, success
, fail
, failureCases
, successLabel
, slowCaseLabel
);
571 if (kind
== CallCustomGetter
|| kind
== CallCustomSetter
) {
572 patchBuffer
.link(operationCall
, custom
);
573 patchBuffer
.link(handlerCall
, lookupExceptionHandler
);
574 } else if (kind
== CallGetter
|| kind
== CallSetter
) {
575 callLinkInfo
->hotPathOther
= patchBuffer
.locationOfNearCall(fastPathCall
);
576 callLinkInfo
->hotPathBegin
= patchBuffer
.locationOf(addressOfLinkFunctionCheck
);
577 callLinkInfo
->callReturnLocation
= patchBuffer
.locationOfNearCall(slowPathCall
);
579 ThunkGenerator generator
= linkThunkGeneratorFor(
580 CodeForCall
, RegisterPreservationNotRequired
);
582 slowPathCall
, CodeLocationLabel(vm
->getCTIStub(generator
).code()));
585 MacroAssemblerCodeRef code
= FINALIZE_CODE_FOR(
586 exec
->codeBlock(), patchBuffer
,
587 ("%s access stub for %s, return point %p",
588 toString(kind
), toCString(*exec
->codeBlock()).data(),
589 successLabel
.executableAddress()));
591 if (kind
== CallGetter
|| kind
== CallSetter
)
592 stubRoutine
= adoptRef(new AccessorCallJITStubRoutine(code
, *vm
, WTF::move(callLinkInfo
)));
594 stubRoutine
= createJITStubRoutine(code
, *vm
, codeBlock
->ownerExecutable(), true);
597 enum InlineCacheAction
{
603 static InlineCacheAction
actionForCell(VM
& vm
, JSCell
* cell
)
605 Structure
* structure
= cell
->structure(vm
);
607 TypeInfo typeInfo
= structure
->typeInfo();
608 if (typeInfo
.prohibitsPropertyCaching())
609 return GiveUpOnCache
;
611 if (structure
->isUncacheableDictionary()) {
612 if (structure
->hasBeenFlattenedBefore())
613 return GiveUpOnCache
;
614 // Flattening could have changed the offset, so return early for another try.
615 asObject(cell
)->flattenDictionaryObject(vm
);
616 return RetryCacheLater
;
618 ASSERT(!structure
->isUncacheableDictionary());
620 if (typeInfo
.hasImpureGetOwnPropertySlot() && !typeInfo
.newImpurePropertyFiresWatchpoints())
621 return GiveUpOnCache
;
623 return AttemptToCache
;
626 static InlineCacheAction
tryCacheGetByID(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
628 if (Options::forceICFailure())
629 return GiveUpOnCache
;
631 // FIXME: Write a test that proves we need to check for recursion here just
632 // like the interpreter does, then add a check for recursion.
634 CodeBlock
* codeBlock
= exec
->codeBlock();
635 VM
* vm
= &exec
->vm();
637 if ((isJSArray(baseValue
) || isRegExpMatchesArray(baseValue
) || isJSString(baseValue
)) && propertyName
== exec
->propertyNames().length
) {
638 GPRReg baseGPR
= static_cast<GPRReg
>(stubInfo
.patch
.baseGPR
);
639 #if USE(JSVALUE32_64)
640 GPRReg resultTagGPR
= static_cast<GPRReg
>(stubInfo
.patch
.valueTagGPR
);
642 GPRReg resultGPR
= static_cast<GPRReg
>(stubInfo
.patch
.valueGPR
);
644 MacroAssembler stubJit
;
646 if (isJSArray(baseValue
) || isRegExpMatchesArray(baseValue
)) {
647 GPRReg scratchGPR
= TempRegisterSet(stubInfo
.patch
.usedRegisters
).getFreeGPR();
648 bool needToRestoreScratch
= false;
650 if (scratchGPR
== InvalidGPRReg
) {
652 scratchGPR
= AssemblyHelpers::selectScratchGPR(baseGPR
, resultGPR
);
654 scratchGPR
= AssemblyHelpers::selectScratchGPR(baseGPR
, resultGPR
, resultTagGPR
);
656 stubJit
.pushToSave(scratchGPR
);
657 needToRestoreScratch
= true;
660 MacroAssembler::JumpList failureCases
;
662 stubJit
.load8(MacroAssembler::Address(baseGPR
, JSCell::indexingTypeOffset()), scratchGPR
);
663 failureCases
.append(stubJit
.branchTest32(MacroAssembler::Zero
, scratchGPR
, MacroAssembler::TrustedImm32(IsArray
)));
664 failureCases
.append(stubJit
.branchTest32(MacroAssembler::Zero
, scratchGPR
, MacroAssembler::TrustedImm32(IndexingShapeMask
)));
666 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()), scratchGPR
);
667 stubJit
.load32(MacroAssembler::Address(scratchGPR
, ArrayStorage::lengthOffset()), scratchGPR
);
668 failureCases
.append(stubJit
.branch32(MacroAssembler::LessThan
, scratchGPR
, MacroAssembler::TrustedImm32(0)));
670 stubJit
.move(scratchGPR
, resultGPR
);
672 stubJit
.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber
), resultGPR
);
673 #elif USE(JSVALUE32_64)
674 stubJit
.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR
); // JSValue::Int32Tag
677 MacroAssembler::Jump success
, fail
;
679 emitRestoreScratch(stubJit
, needToRestoreScratch
, scratchGPR
, success
, fail
, failureCases
);
681 LinkBuffer
patchBuffer(*vm
, stubJit
, codeBlock
);
683 linkRestoreScratch(patchBuffer
, needToRestoreScratch
, stubInfo
, success
, fail
, failureCases
);
685 stubInfo
.stubRoutine
= FINALIZE_CODE_FOR_STUB(
686 exec
->codeBlock(), patchBuffer
,
687 ("GetById array length stub for %s, return point %p",
688 toCString(*exec
->codeBlock()).data(), stubInfo
.callReturnLocation
.labelAtOffset(
689 stubInfo
.patch
.deltaCallToDone
).executableAddress()));
691 RepatchBuffer
repatchBuffer(codeBlock
);
692 replaceWithJump(repatchBuffer
, stubInfo
, stubInfo
.stubRoutine
->code().code());
693 repatchCall(repatchBuffer
, stubInfo
.callReturnLocation
, operationGetById
);
695 return RetryCacheLater
;
698 // String.length case
699 MacroAssembler::Jump failure
= stubJit
.branch8(MacroAssembler::NotEqual
, MacroAssembler::Address(baseGPR
, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType
));
701 stubJit
.load32(MacroAssembler::Address(baseGPR
, JSString::offsetOfLength()), resultGPR
);
704 stubJit
.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber
), resultGPR
);
705 #elif USE(JSVALUE32_64)
706 stubJit
.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR
); // JSValue::Int32Tag
709 MacroAssembler::Jump success
= stubJit
.jump();
711 LinkBuffer
patchBuffer(*vm
, stubJit
, codeBlock
);
713 patchBuffer
.link(success
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToDone
));
714 patchBuffer
.link(failure
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToSlowCase
));
716 stubInfo
.stubRoutine
= FINALIZE_CODE_FOR_STUB(
717 exec
->codeBlock(), patchBuffer
,
718 ("GetById string length stub for %s, return point %p",
719 toCString(*exec
->codeBlock()).data(), stubInfo
.callReturnLocation
.labelAtOffset(
720 stubInfo
.patch
.deltaCallToDone
).executableAddress()));
722 RepatchBuffer
repatchBuffer(codeBlock
);
723 replaceWithJump(repatchBuffer
, stubInfo
, stubInfo
.stubRoutine
->code().code());
724 repatchCall(repatchBuffer
, stubInfo
.callReturnLocation
, operationGetById
);
726 return RetryCacheLater
;
729 // FIXME: Cache property access for immediates.
730 if (!baseValue
.isCell())
731 return GiveUpOnCache
;
732 JSCell
* baseCell
= baseValue
.asCell();
733 Structure
* structure
= baseCell
->structure();
734 if (!slot
.isCacheable())
735 return GiveUpOnCache
;
737 InlineCacheAction action
= actionForCell(*vm
, baseCell
);
738 if (action
!= AttemptToCache
)
741 // Optimize self access.
742 if (slot
.slotBase() == baseValue
743 && slot
.isCacheableValue()
744 && !slot
.watchpointSet()
745 && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot
.cachedOffset()))) {
746 repatchByIdSelfAccess(*vm
, codeBlock
, stubInfo
, structure
, propertyName
, slot
.cachedOffset(), operationGetByIdBuildList
, true);
747 stubInfo
.initGetByIdSelf(*vm
, codeBlock
->ownerExecutable(), structure
);
748 return RetryCacheLater
;
751 repatchCall(codeBlock
, stubInfo
.callReturnLocation
, operationGetByIdBuildList
);
752 return RetryCacheLater
;
755 void repatchGetByID(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
757 GCSafeConcurrentJITLocker
locker(exec
->codeBlock()->m_lock
, exec
->vm().heap
);
759 if (tryCacheGetByID(exec
, baseValue
, propertyName
, slot
, stubInfo
) == GiveUpOnCache
)
760 repatchCall(exec
->codeBlock(), stubInfo
.callReturnLocation
, operationGetById
);
763 static void patchJumpToGetByIdStub(CodeBlock
* codeBlock
, StructureStubInfo
& stubInfo
, JITStubRoutine
* stubRoutine
)
765 RELEASE_ASSERT(stubInfo
.accessType
== access_get_by_id_list
);
766 RepatchBuffer
repatchBuffer(codeBlock
);
767 if (stubInfo
.u
.getByIdList
.list
->didSelfPatching()) {
768 repatchBuffer
.relink(
769 stubInfo
.callReturnLocation
.jumpAtOffset(
770 stubInfo
.patch
.deltaCallToJump
),
771 CodeLocationLabel(stubRoutine
->code().code()));
775 replaceWithJump(repatchBuffer
, stubInfo
, stubRoutine
->code().code());
778 static InlineCacheAction
tryBuildGetByIDList(ExecState
* exec
, JSValue baseValue
, const Identifier
& ident
, const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
780 if (!baseValue
.isCell()
781 || !slot
.isCacheable())
782 return GiveUpOnCache
;
784 JSCell
* baseCell
= baseValue
.asCell();
785 bool loadTargetFromProxy
= false;
786 if (baseCell
->type() == PureForwardingProxyType
) {
787 baseValue
= jsCast
<JSProxy
*>(baseCell
)->target();
788 baseCell
= baseValue
.asCell();
789 loadTargetFromProxy
= true;
792 VM
* vm
= &exec
->vm();
793 CodeBlock
* codeBlock
= exec
->codeBlock();
795 InlineCacheAction action
= actionForCell(*vm
, baseCell
);
796 if (action
!= AttemptToCache
)
799 Structure
* structure
= baseCell
->structure(*vm
);
800 TypeInfo typeInfo
= structure
->typeInfo();
802 if (stubInfo
.patch
.spillMode
== NeedToSpill
) {
803 // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
804 // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
805 // if registers were not flushed, don't do non-Value caching.
806 if (!slot
.isCacheableValue())
807 return GiveUpOnCache
;
810 PropertyOffset offset
= slot
.cachedOffset();
811 StructureChain
* prototypeChain
= 0;
814 if (slot
.slotBase() != baseValue
) {
815 if (typeInfo
.prohibitsPropertyCaching() || structure
->isDictionary())
816 return GiveUpOnCache
;
818 count
= normalizePrototypeChainForChainAccess(
819 exec
, baseValue
, slot
.slotBase(), ident
, offset
);
820 if (count
== InvalidPrototypeChain
)
821 return GiveUpOnCache
;
822 prototypeChain
= structure
->prototypeChain(exec
);
825 PolymorphicGetByIdList
* list
= PolymorphicGetByIdList::from(stubInfo
);
826 if (list
->isFull()) {
827 // We need this extra check because of recursion.
828 return GiveUpOnCache
;
831 RefPtr
<JITStubRoutine
> stubRoutine
;
833 exec
, kindFor(slot
), ident
, customFor(slot
), stubInfo
, prototypeChain
, count
, offset
,
834 structure
, loadTargetFromProxy
, slot
.watchpointSet(),
835 stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToDone
),
836 CodeLocationLabel(list
->currentSlowPathTarget(stubInfo
)), stubRoutine
);
838 GetByIdAccess::AccessType accessType
;
839 if (slot
.isCacheableValue())
840 accessType
= slot
.watchpointSet() ? GetByIdAccess::WatchedStub
: GetByIdAccess::SimpleStub
;
841 else if (slot
.isCacheableGetter())
842 accessType
= GetByIdAccess::Getter
;
844 accessType
= GetByIdAccess::CustomGetter
;
846 list
->addAccess(GetByIdAccess(
847 *vm
, codeBlock
->ownerExecutable(), accessType
, stubRoutine
, structure
,
848 prototypeChain
, count
));
850 patchJumpToGetByIdStub(codeBlock
, stubInfo
, stubRoutine
.get());
852 return list
->isFull() ? GiveUpOnCache
: RetryCacheLater
;
855 void buildGetByIDList(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
857 GCSafeConcurrentJITLocker
locker(exec
->codeBlock()->m_lock
, exec
->vm().heap
);
859 if (tryBuildGetByIDList(exec
, baseValue
, propertyName
, slot
, stubInfo
) == GiveUpOnCache
)
860 repatchCall(exec
->codeBlock(), stubInfo
.callReturnLocation
, operationGetById
);
863 static V_JITOperation_ESsiJJI
appropriateGenericPutByIdFunction(const PutPropertySlot
&slot
, PutKind putKind
)
865 if (slot
.isStrictMode()) {
866 if (putKind
== Direct
)
867 return operationPutByIdDirectStrict
;
868 return operationPutByIdStrict
;
870 if (putKind
== Direct
)
871 return operationPutByIdDirectNonStrict
;
872 return operationPutByIdNonStrict
;
875 static V_JITOperation_ESsiJJI
appropriateListBuildingPutByIdFunction(const PutPropertySlot
&slot
, PutKind putKind
)
877 if (slot
.isStrictMode()) {
878 if (putKind
== Direct
)
879 return operationPutByIdDirectStrictBuildList
;
880 return operationPutByIdStrictBuildList
;
882 if (putKind
== Direct
)
883 return operationPutByIdDirectNonStrictBuildList
;
884 return operationPutByIdNonStrictBuildList
;
887 static void emitPutReplaceStub(
891 const PutPropertySlot
& slot
,
892 StructureStubInfo
& stubInfo
,
894 Structure
* structure
,
895 CodeLocationLabel failureLabel
,
896 RefPtr
<JITStubRoutine
>& stubRoutine
)
898 VM
* vm
= &exec
->vm();
899 GPRReg baseGPR
= static_cast<GPRReg
>(stubInfo
.patch
.baseGPR
);
900 #if USE(JSVALUE32_64)
901 GPRReg valueTagGPR
= static_cast<GPRReg
>(stubInfo
.patch
.valueTagGPR
);
903 GPRReg valueGPR
= static_cast<GPRReg
>(stubInfo
.patch
.valueGPR
);
905 ScratchRegisterAllocator
allocator(stubInfo
.patch
.usedRegisters
);
906 allocator
.lock(baseGPR
);
907 #if USE(JSVALUE32_64)
908 allocator
.lock(valueTagGPR
);
910 allocator
.lock(valueGPR
);
912 GPRReg scratchGPR1
= allocator
.allocateScratchGPR();
914 CCallHelpers
stubJit(vm
, exec
->codeBlock());
916 allocator
.preserveReusedRegistersByPushing(stubJit
);
918 MacroAssembler::Jump badStructure
= branchStructure(stubJit
,
919 MacroAssembler::NotEqual
,
920 MacroAssembler::Address(baseGPR
, JSCell::structureIDOffset()),
924 if (isInlineOffset(slot
.cachedOffset()))
925 stubJit
.store64(valueGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot
.cachedOffset()) * sizeof(JSValue
)));
927 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()), scratchGPR1
);
928 stubJit
.store64(valueGPR
, MacroAssembler::Address(scratchGPR1
, offsetInButterfly(slot
.cachedOffset()) * sizeof(JSValue
)));
930 #elif USE(JSVALUE32_64)
931 if (isInlineOffset(slot
.cachedOffset())) {
932 stubJit
.store32(valueGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)));
933 stubJit
.store32(valueTagGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)));
935 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()), scratchGPR1
);
936 stubJit
.store32(valueGPR
, MacroAssembler::Address(scratchGPR1
, offsetInButterfly(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)));
937 stubJit
.store32(valueTagGPR
, MacroAssembler::Address(scratchGPR1
, offsetInButterfly(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)));
941 MacroAssembler::Jump success
;
942 MacroAssembler::Jump failure
;
944 if (allocator
.didReuseRegisters()) {
945 allocator
.restoreReusedRegistersByPopping(stubJit
);
946 success
= stubJit
.jump();
948 badStructure
.link(&stubJit
);
949 allocator
.restoreReusedRegistersByPopping(stubJit
);
950 failure
= stubJit
.jump();
952 success
= stubJit
.jump();
953 failure
= badStructure
;
956 LinkBuffer
patchBuffer(*vm
, stubJit
, exec
->codeBlock());
957 patchBuffer
.link(success
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToDone
));
958 patchBuffer
.link(failure
, failureLabel
);
960 stubRoutine
= FINALIZE_CODE_FOR_STUB(
961 exec
->codeBlock(), patchBuffer
,
962 ("PutById replace stub for %s, return point %p",
963 toCString(*exec
->codeBlock()).data(), stubInfo
.callReturnLocation
.labelAtOffset(
964 stubInfo
.patch
.deltaCallToDone
).executableAddress()));
967 static void emitPutTransitionStub(
971 const PutPropertySlot
& slot
,
972 StructureStubInfo
& stubInfo
,
974 Structure
* structure
,
975 Structure
* oldStructure
,
976 StructureChain
* prototypeChain
,
977 CodeLocationLabel failureLabel
,
978 RefPtr
<JITStubRoutine
>& stubRoutine
)
980 VM
* vm
= &exec
->vm();
982 GPRReg baseGPR
= static_cast<GPRReg
>(stubInfo
.patch
.baseGPR
);
983 #if USE(JSVALUE32_64)
984 GPRReg valueTagGPR
= static_cast<GPRReg
>(stubInfo
.patch
.valueTagGPR
);
986 GPRReg valueGPR
= static_cast<GPRReg
>(stubInfo
.patch
.valueGPR
);
988 ScratchRegisterAllocator
allocator(stubInfo
.patch
.usedRegisters
);
989 allocator
.lock(baseGPR
);
990 #if USE(JSVALUE32_64)
991 allocator
.lock(valueTagGPR
);
993 allocator
.lock(valueGPR
);
995 CCallHelpers
stubJit(vm
);
997 bool needThirdScratch
= false;
998 if (structure
->outOfLineCapacity() != oldStructure
->outOfLineCapacity()
999 && oldStructure
->outOfLineCapacity()) {
1000 needThirdScratch
= true;
1003 GPRReg scratchGPR1
= allocator
.allocateScratchGPR();
1004 ASSERT(scratchGPR1
!= baseGPR
);
1005 ASSERT(scratchGPR1
!= valueGPR
);
1007 GPRReg scratchGPR2
= allocator
.allocateScratchGPR();
1008 ASSERT(scratchGPR2
!= baseGPR
);
1009 ASSERT(scratchGPR2
!= valueGPR
);
1010 ASSERT(scratchGPR2
!= scratchGPR1
);
1013 if (needThirdScratch
) {
1014 scratchGPR3
= allocator
.allocateScratchGPR();
1015 ASSERT(scratchGPR3
!= baseGPR
);
1016 ASSERT(scratchGPR3
!= valueGPR
);
1017 ASSERT(scratchGPR3
!= scratchGPR1
);
1018 ASSERT(scratchGPR3
!= scratchGPR2
);
1020 scratchGPR3
= InvalidGPRReg
;
1022 allocator
.preserveReusedRegistersByPushing(stubJit
);
1024 MacroAssembler::JumpList failureCases
;
1026 ASSERT(oldStructure
->transitionWatchpointSetHasBeenInvalidated());
1028 failureCases
.append(branchStructure(stubJit
,
1029 MacroAssembler::NotEqual
,
1030 MacroAssembler::Address(baseGPR
, JSCell::structureIDOffset()),
1033 addStructureTransitionCheck(
1034 oldStructure
->storedPrototype(), exec
->codeBlock(), stubInfo
, stubJit
, failureCases
,
1037 if (putKind
== NotDirect
) {
1038 for (WriteBarrier
<Structure
>* it
= prototypeChain
->head(); *it
; ++it
) {
1039 addStructureTransitionCheck(
1040 (*it
)->storedPrototype(), exec
->codeBlock(), stubInfo
, stubJit
, failureCases
,
1045 MacroAssembler::JumpList slowPath
;
1047 bool scratchGPR1HasStorage
= false;
1049 if (structure
->outOfLineCapacity() != oldStructure
->outOfLineCapacity()) {
1050 size_t newSize
= structure
->outOfLineCapacity() * sizeof(JSValue
);
1051 CopiedAllocator
* copiedAllocator
= &vm
->heap
.storageAllocator();
1053 if (!oldStructure
->outOfLineCapacity()) {
1054 stubJit
.loadPtr(&copiedAllocator
->m_currentRemaining
, scratchGPR1
);
1055 slowPath
.append(stubJit
.branchSubPtr(MacroAssembler::Signed
, MacroAssembler::TrustedImm32(newSize
), scratchGPR1
));
1056 stubJit
.storePtr(scratchGPR1
, &copiedAllocator
->m_currentRemaining
);
1057 stubJit
.negPtr(scratchGPR1
);
1058 stubJit
.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator
->m_currentPayloadEnd
), scratchGPR1
);
1059 stubJit
.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue
)), scratchGPR1
);
1061 size_t oldSize
= oldStructure
->outOfLineCapacity() * sizeof(JSValue
);
1062 ASSERT(newSize
> oldSize
);
1064 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()), scratchGPR3
);
1065 stubJit
.loadPtr(&copiedAllocator
->m_currentRemaining
, scratchGPR1
);
1066 slowPath
.append(stubJit
.branchSubPtr(MacroAssembler::Signed
, MacroAssembler::TrustedImm32(newSize
), scratchGPR1
));
1067 stubJit
.storePtr(scratchGPR1
, &copiedAllocator
->m_currentRemaining
);
1068 stubJit
.negPtr(scratchGPR1
);
1069 stubJit
.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator
->m_currentPayloadEnd
), scratchGPR1
);
1070 stubJit
.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue
)), scratchGPR1
);
1071 // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1072 for (size_t offset
= 0; offset
< oldSize
; offset
+= sizeof(void*)) {
1073 stubJit
.loadPtr(MacroAssembler::Address(scratchGPR3
, -static_cast<ptrdiff_t>(offset
+ sizeof(JSValue
) + sizeof(void*))), scratchGPR2
);
1074 stubJit
.storePtr(scratchGPR2
, MacroAssembler::Address(scratchGPR1
, -static_cast<ptrdiff_t>(offset
+ sizeof(JSValue
) + sizeof(void*))));
1078 stubJit
.storePtr(scratchGPR1
, MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()));
1079 scratchGPR1HasStorage
= true;
1082 ASSERT(oldStructure
->typeInfo().type() == structure
->typeInfo().type());
1083 ASSERT(oldStructure
->typeInfo().inlineTypeFlags() == structure
->typeInfo().inlineTypeFlags());
1084 ASSERT(oldStructure
->indexingType() == structure
->indexingType());
1086 uint32_t val
= structure
->id();
1088 uint32_t val
= reinterpret_cast<uint32_t>(structure
->id());
1090 stubJit
.store32(MacroAssembler::TrustedImm32(val
), MacroAssembler::Address(baseGPR
, JSCell::structureIDOffset()));
1092 if (isInlineOffset(slot
.cachedOffset()))
1093 stubJit
.store64(valueGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot
.cachedOffset()) * sizeof(JSValue
)));
1095 if (!scratchGPR1HasStorage
)
1096 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()), scratchGPR1
);
1097 stubJit
.store64(valueGPR
, MacroAssembler::Address(scratchGPR1
, offsetInButterfly(slot
.cachedOffset()) * sizeof(JSValue
)));
1099 #elif USE(JSVALUE32_64)
1100 if (isInlineOffset(slot
.cachedOffset())) {
1101 stubJit
.store32(valueGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)));
1102 stubJit
.store32(valueTagGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)));
1104 if (!scratchGPR1HasStorage
)
1105 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()), scratchGPR1
);
1106 stubJit
.store32(valueGPR
, MacroAssembler::Address(scratchGPR1
, offsetInButterfly(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)));
1107 stubJit
.store32(valueTagGPR
, MacroAssembler::Address(scratchGPR1
, offsetInButterfly(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)));
1111 MacroAssembler::Jump success
;
1112 MacroAssembler::Jump failure
;
1114 if (allocator
.didReuseRegisters()) {
1115 allocator
.restoreReusedRegistersByPopping(stubJit
);
1116 success
= stubJit
.jump();
1118 failureCases
.link(&stubJit
);
1119 allocator
.restoreReusedRegistersByPopping(stubJit
);
1120 failure
= stubJit
.jump();
1122 success
= stubJit
.jump();
1124 MacroAssembler::Call operationCall
;
1125 MacroAssembler::Jump successInSlowPath
;
1127 if (structure
->outOfLineCapacity() != oldStructure
->outOfLineCapacity()) {
1128 slowPath
.link(&stubJit
);
1130 allocator
.restoreReusedRegistersByPopping(stubJit
);
1131 ScratchBuffer
* scratchBuffer
= vm
->scratchBufferForSize(allocator
.desiredScratchBufferSizeForCall());
1132 allocator
.preserveUsedRegistersToScratchBufferForCall(stubJit
, scratchBuffer
, scratchGPR1
);
1134 stubJit
.setupArgumentsWithExecState(baseGPR
, MacroAssembler::TrustedImmPtr(structure
), MacroAssembler::TrustedImm32(slot
.cachedOffset()), valueGPR
);
1136 stubJit
.setupArgumentsWithExecState(baseGPR
, MacroAssembler::TrustedImmPtr(structure
), MacroAssembler::TrustedImm32(slot
.cachedOffset()), valueGPR
, valueTagGPR
);
1138 operationCall
= stubJit
.call();
1139 allocator
.restoreUsedRegistersFromScratchBufferForCall(stubJit
, scratchBuffer
, scratchGPR1
);
1140 successInSlowPath
= stubJit
.jump();
1143 LinkBuffer
patchBuffer(*vm
, stubJit
, exec
->codeBlock());
1144 patchBuffer
.link(success
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToDone
));
1145 if (allocator
.didReuseRegisters())
1146 patchBuffer
.link(failure
, failureLabel
);
1148 patchBuffer
.link(failureCases
, failureLabel
);
1149 if (structure
->outOfLineCapacity() != oldStructure
->outOfLineCapacity()) {
1150 patchBuffer
.link(operationCall
, operationReallocateStorageAndFinishPut
);
1151 patchBuffer
.link(successInSlowPath
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToDone
));
1155 createJITStubRoutine(
1157 exec
->codeBlock(), patchBuffer
,
1158 ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1159 structure
->outOfLineCapacity() != oldStructure
->outOfLineCapacity() ? "reallocating " : "",
1160 oldStructure
, structure
,
1161 toCString(*exec
->codeBlock()).data(), stubInfo
.callReturnLocation
.labelAtOffset(
1162 stubInfo
.patch
.deltaCallToDone
).executableAddress())),
1164 exec
->codeBlock()->ownerExecutable(),
1165 structure
->outOfLineCapacity() != oldStructure
->outOfLineCapacity(),
1169 static InlineCacheAction
tryCachePutByID(ExecState
* exec
, JSValue baseValue
, const Identifier
& ident
, const PutPropertySlot
& slot
, StructureStubInfo
& stubInfo
, PutKind putKind
)
1171 if (Options::forceICFailure())
1172 return GiveUpOnCache
;
1174 CodeBlock
* codeBlock
= exec
->codeBlock();
1175 VM
* vm
= &exec
->vm();
1177 if (!baseValue
.isCell())
1178 return GiveUpOnCache
;
1179 JSCell
* baseCell
= baseValue
.asCell();
1180 Structure
* structure
= baseCell
->structure();
1181 Structure
* oldStructure
= structure
->previousID();
1183 if (!slot
.isCacheablePut() && !slot
.isCacheableCustom() && !slot
.isCacheableSetter())
1184 return GiveUpOnCache
;
1185 if (!structure
->propertyAccessesAreCacheable())
1186 return GiveUpOnCache
;
1188 // Optimize self access.
1189 if (slot
.base() == baseValue
&& slot
.isCacheablePut()) {
1190 if (slot
.type() == PutPropertySlot::NewProperty
) {
1191 if (structure
->isDictionary())
1192 return GiveUpOnCache
;
1194 // Skip optimizing the case where we need a realloc, if we don't have
1195 // enough registers to make it happen.
1196 if (GPRInfo::numberOfRegisters
< 6
1197 && oldStructure
->outOfLineCapacity() != structure
->outOfLineCapacity()
1198 && oldStructure
->outOfLineCapacity())
1199 return GiveUpOnCache
;
1201 // Skip optimizing the case where we need realloc, and the structure has
1202 // indexing storage.
1203 // FIXME: We shouldn't skip this! Implement it!
1204 // https://bugs.webkit.org/show_bug.cgi?id=130914
1205 if (oldStructure
->couldHaveIndexingHeader())
1206 return GiveUpOnCache
;
1208 if (normalizePrototypeChain(exec
, baseCell
) == InvalidPrototypeChain
)
1209 return GiveUpOnCache
;
1211 StructureChain
* prototypeChain
= structure
->prototypeChain(exec
);
1213 emitPutTransitionStub(
1214 exec
, baseValue
, ident
, slot
, stubInfo
, putKind
,
1215 structure
, oldStructure
, prototypeChain
,
1216 stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToSlowCase
),
1217 stubInfo
.stubRoutine
);
1219 RepatchBuffer
repatchBuffer(codeBlock
);
1220 repatchBuffer
.relink(
1221 stubInfo
.callReturnLocation
.jumpAtOffset(
1222 stubInfo
.patch
.deltaCallToJump
),
1223 CodeLocationLabel(stubInfo
.stubRoutine
->code().code()));
1224 repatchCall(repatchBuffer
, stubInfo
.callReturnLocation
, appropriateListBuildingPutByIdFunction(slot
, putKind
));
1226 stubInfo
.initPutByIdTransition(*vm
, codeBlock
->ownerExecutable(), oldStructure
, structure
, prototypeChain
, putKind
== Direct
);
1228 return RetryCacheLater
;
1231 if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot
.cachedOffset())))
1232 return GiveUpOnCache
;
1234 repatchByIdSelfAccess(*vm
, codeBlock
, stubInfo
, structure
, ident
, slot
.cachedOffset(), appropriateListBuildingPutByIdFunction(slot
, putKind
), false);
1235 stubInfo
.initPutByIdReplace(*vm
, codeBlock
->ownerExecutable(), structure
);
1236 return RetryCacheLater
;
1238 if ((slot
.isCacheableCustom() || slot
.isCacheableSetter())
1239 && stubInfo
.patch
.spillMode
== DontSpill
) {
1240 RefPtr
<JITStubRoutine
> stubRoutine
;
1242 StructureChain
* prototypeChain
= 0;
1243 PropertyOffset offset
= slot
.cachedOffset();
1245 if (baseValue
!= slot
.base()) {
1246 count
= normalizePrototypeChainForChainAccess(exec
, baseCell
, slot
.base(), ident
, offset
);
1247 if (count
== InvalidPrototypeChain
)
1248 return GiveUpOnCache
;
1250 prototypeChain
= structure
->prototypeChain(exec
);
1252 PolymorphicPutByIdList
* list
;
1253 list
= PolymorphicPutByIdList::from(putKind
, stubInfo
);
1256 exec
, kindFor(slot
), ident
, customFor(slot
), stubInfo
, prototypeChain
, count
,
1257 offset
, structure
, false, nullptr,
1258 stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToDone
),
1259 stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToSlowCase
),
1262 list
->addAccess(PutByIdAccess::setter(
1263 *vm
, codeBlock
->ownerExecutable(),
1264 slot
.isCacheableSetter() ? PutByIdAccess::Setter
: PutByIdAccess::CustomSetter
,
1265 structure
, prototypeChain
, slot
.customSetter(), stubRoutine
));
1267 RepatchBuffer
repatchBuffer(codeBlock
);
1268 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.deltaCallToJump
), CodeLocationLabel(stubRoutine
->code().code()));
1269 repatchCall(repatchBuffer
, stubInfo
.callReturnLocation
, appropriateListBuildingPutByIdFunction(slot
, putKind
));
1270 RELEASE_ASSERT(!list
->isFull());
1271 return RetryCacheLater
;
1274 return GiveUpOnCache
;
1277 void repatchPutByID(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PutPropertySlot
& slot
, StructureStubInfo
& stubInfo
, PutKind putKind
)
1279 GCSafeConcurrentJITLocker
locker(exec
->codeBlock()->m_lock
, exec
->vm().heap
);
1281 if (tryCachePutByID(exec
, baseValue
, propertyName
, slot
, stubInfo
, putKind
) == GiveUpOnCache
)
1282 repatchCall(exec
->codeBlock(), stubInfo
.callReturnLocation
, appropriateGenericPutByIdFunction(slot
, putKind
));
1285 static InlineCacheAction
tryBuildPutByIdList(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PutPropertySlot
& slot
, StructureStubInfo
& stubInfo
, PutKind putKind
)
1287 CodeBlock
* codeBlock
= exec
->codeBlock();
1288 VM
* vm
= &exec
->vm();
1290 if (!baseValue
.isCell())
1291 return GiveUpOnCache
;
1292 JSCell
* baseCell
= baseValue
.asCell();
1293 Structure
* structure
= baseCell
->structure();
1294 Structure
* oldStructure
= structure
->previousID();
1297 if (!slot
.isCacheablePut() && !slot
.isCacheableCustom() && !slot
.isCacheableSetter())
1298 return GiveUpOnCache
;
1300 if (!structure
->propertyAccessesAreCacheable())
1301 return GiveUpOnCache
;
1303 // Optimize self access.
1304 if (slot
.base() == baseValue
&& slot
.isCacheablePut()) {
1305 PolymorphicPutByIdList
* list
;
1306 RefPtr
<JITStubRoutine
> stubRoutine
;
1308 if (slot
.type() == PutPropertySlot::NewProperty
) {
1309 if (structure
->isDictionary())
1310 return GiveUpOnCache
;
1312 // Skip optimizing the case where we need a realloc, if we don't have
1313 // enough registers to make it happen.
1314 if (GPRInfo::numberOfRegisters
< 6
1315 && oldStructure
->outOfLineCapacity() != structure
->outOfLineCapacity()
1316 && oldStructure
->outOfLineCapacity())
1317 return GiveUpOnCache
;
1319 // Skip optimizing the case where we need realloc, and the structure has
1320 // indexing storage.
1321 if (oldStructure
->couldHaveIndexingHeader())
1322 return GiveUpOnCache
;
1324 if (normalizePrototypeChain(exec
, baseCell
) == InvalidPrototypeChain
)
1325 return GiveUpOnCache
;
1327 StructureChain
* prototypeChain
= structure
->prototypeChain(exec
);
1329 list
= PolymorphicPutByIdList::from(putKind
, stubInfo
);
1331 return GiveUpOnCache
; // Will get here due to recursion.
1333 // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1334 emitPutTransitionStub(
1335 exec
, baseValue
, propertyName
, slot
, stubInfo
, putKind
,
1336 structure
, oldStructure
, prototypeChain
,
1337 CodeLocationLabel(list
->currentSlowPathTarget()),
1341 PutByIdAccess::transition(
1342 *vm
, codeBlock
->ownerExecutable(),
1343 oldStructure
, structure
, prototypeChain
,
1346 list
= PolymorphicPutByIdList::from(putKind
, stubInfo
);
1348 return GiveUpOnCache
; // Will get here due to recursion.
1350 // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1352 exec
, baseValue
, propertyName
, slot
, stubInfo
, putKind
,
1353 structure
, CodeLocationLabel(list
->currentSlowPathTarget()), stubRoutine
);
1356 PutByIdAccess::replace(
1357 *vm
, codeBlock
->ownerExecutable(),
1358 structure
, stubRoutine
));
1361 RepatchBuffer
repatchBuffer(codeBlock
);
1362 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.deltaCallToJump
), CodeLocationLabel(stubRoutine
->code().code()));
1365 repatchCall(repatchBuffer
, stubInfo
.callReturnLocation
, appropriateGenericPutByIdFunction(slot
, putKind
));
1367 return RetryCacheLater
;
1370 if ((slot
.isCacheableCustom() || slot
.isCacheableSetter())
1371 && stubInfo
.patch
.spillMode
== DontSpill
) {
1372 RefPtr
<JITStubRoutine
> stubRoutine
;
1373 StructureChain
* prototypeChain
= 0;
1374 PropertyOffset offset
= slot
.cachedOffset();
1376 if (baseValue
!= slot
.base()) {
1377 count
= normalizePrototypeChainForChainAccess(exec
, baseCell
, slot
.base(), propertyName
, offset
);
1378 if (count
== InvalidPrototypeChain
)
1379 return GiveUpOnCache
;
1381 prototypeChain
= structure
->prototypeChain(exec
);
1383 PolymorphicPutByIdList
* list
;
1384 list
= PolymorphicPutByIdList::from(putKind
, stubInfo
);
1387 exec
, kindFor(slot
), propertyName
, customFor(slot
), stubInfo
, prototypeChain
, count
,
1388 offset
, structure
, false, nullptr,
1389 stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToDone
),
1390 CodeLocationLabel(list
->currentSlowPathTarget()),
1393 list
->addAccess(PutByIdAccess::setter(
1394 *vm
, codeBlock
->ownerExecutable(),
1395 slot
.isCacheableSetter() ? PutByIdAccess::Setter
: PutByIdAccess::CustomSetter
,
1396 structure
, prototypeChain
, slot
.customSetter(), stubRoutine
));
1398 RepatchBuffer
repatchBuffer(codeBlock
);
1399 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.deltaCallToJump
), CodeLocationLabel(stubRoutine
->code().code()));
1401 repatchCall(repatchBuffer
, stubInfo
.callReturnLocation
, appropriateGenericPutByIdFunction(slot
, putKind
));
1403 return RetryCacheLater
;
1405 return GiveUpOnCache
;
1408 void buildPutByIdList(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PutPropertySlot
& slot
, StructureStubInfo
& stubInfo
, PutKind putKind
)
1410 GCSafeConcurrentJITLocker
locker(exec
->codeBlock()->m_lock
, exec
->vm().heap
);
1412 if (tryBuildPutByIdList(exec
, baseValue
, propertyName
, slot
, stubInfo
, putKind
) == GiveUpOnCache
)
1413 repatchCall(exec
->codeBlock(), stubInfo
.callReturnLocation
, appropriateGenericPutByIdFunction(slot
, putKind
));
1416 static InlineCacheAction
tryRepatchIn(
1417 ExecState
* exec
, JSCell
* base
, const Identifier
& ident
, bool wasFound
,
1418 const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
1420 if (Options::forceICFailure())
1421 return GiveUpOnCache
;
1423 if (!base
->structure()->propertyAccessesAreCacheable())
1424 return GiveUpOnCache
;
1427 if (!slot
.isCacheable())
1428 return GiveUpOnCache
;
1431 CodeBlock
* codeBlock
= exec
->codeBlock();
1432 VM
* vm
= &exec
->vm();
1433 Structure
* structure
= base
->structure();
1435 PropertyOffset offsetIgnored
;
1436 size_t count
= normalizePrototypeChainForChainAccess(exec
, base
, wasFound
? slot
.slotBase() : JSValue(), ident
, offsetIgnored
);
1437 if (count
== InvalidPrototypeChain
)
1438 return GiveUpOnCache
;
1440 PolymorphicAccessStructureList
* polymorphicStructureList
;
1443 CodeLocationLabel successLabel
= stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToDone
);
1444 CodeLocationLabel slowCaseLabel
;
1446 if (stubInfo
.accessType
== access_unset
) {
1447 polymorphicStructureList
= new PolymorphicAccessStructureList();
1448 stubInfo
.initInList(polymorphicStructureList
, 0);
1449 slowCaseLabel
= stubInfo
.callReturnLocation
.labelAtOffset(
1450 stubInfo
.patch
.deltaCallToSlowCase
);
1453 RELEASE_ASSERT(stubInfo
.accessType
== access_in_list
);
1454 polymorphicStructureList
= stubInfo
.u
.inList
.structureList
;
1455 listIndex
= stubInfo
.u
.inList
.listSize
;
1456 slowCaseLabel
= CodeLocationLabel(polymorphicStructureList
->list
[listIndex
- 1].stubRoutine
->code().code());
1458 if (listIndex
== POLYMORPHIC_LIST_CACHE_SIZE
)
1459 return GiveUpOnCache
;
1462 StructureChain
* chain
= structure
->prototypeChain(exec
);
1463 RefPtr
<JITStubRoutine
> stubRoutine
;
1466 GPRReg baseGPR
= static_cast<GPRReg
>(stubInfo
.patch
.baseGPR
);
1467 GPRReg resultGPR
= static_cast<GPRReg
>(stubInfo
.patch
.valueGPR
);
1468 GPRReg scratchGPR
= TempRegisterSet(stubInfo
.patch
.usedRegisters
).getFreeGPR();
1470 CCallHelpers
stubJit(vm
);
1472 bool needToRestoreScratch
;
1473 if (scratchGPR
== InvalidGPRReg
) {
1474 scratchGPR
= AssemblyHelpers::selectScratchGPR(baseGPR
, resultGPR
);
1475 stubJit
.pushToSave(scratchGPR
);
1476 needToRestoreScratch
= true;
1478 needToRestoreScratch
= false;
1480 MacroAssembler::JumpList failureCases
;
1481 failureCases
.append(branchStructure(stubJit
,
1482 MacroAssembler::NotEqual
,
1483 MacroAssembler::Address(baseGPR
, JSCell::structureIDOffset()),
1486 CodeBlock
* codeBlock
= exec
->codeBlock();
1487 if (structure
->typeInfo().newImpurePropertyFiresWatchpoints())
1488 vm
->registerWatchpointForImpureProperty(ident
, stubInfo
.addWatchpoint(codeBlock
));
1490 if (slot
.watchpointSet())
1491 slot
.watchpointSet()->add(stubInfo
.addWatchpoint(codeBlock
));
1493 Structure
* currStructure
= structure
;
1494 WriteBarrier
<Structure
>* it
= chain
->head();
1495 for (unsigned i
= 0; i
< count
; ++i
, ++it
) {
1496 JSObject
* prototype
= asObject(currStructure
->prototypeForLookup(exec
));
1497 Structure
* protoStructure
= prototype
->structure();
1498 addStructureTransitionCheck(
1499 prototype
, protoStructure
, exec
->codeBlock(), stubInfo
, stubJit
,
1500 failureCases
, scratchGPR
);
1501 if (protoStructure
->typeInfo().newImpurePropertyFiresWatchpoints())
1502 vm
->registerWatchpointForImpureProperty(ident
, stubInfo
.addWatchpoint(codeBlock
));
1503 currStructure
= it
->get();
1507 stubJit
.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound
))), resultGPR
);
1509 stubJit
.move(MacroAssembler::TrustedImm32(wasFound
), resultGPR
);
1512 MacroAssembler::Jump success
, fail
;
1514 emitRestoreScratch(stubJit
, needToRestoreScratch
, scratchGPR
, success
, fail
, failureCases
);
1516 LinkBuffer
patchBuffer(*vm
, stubJit
, exec
->codeBlock());
1518 linkRestoreScratch(patchBuffer
, needToRestoreScratch
, success
, fail
, failureCases
, successLabel
, slowCaseLabel
);
1520 stubRoutine
= FINALIZE_CODE_FOR_STUB(
1521 exec
->codeBlock(), patchBuffer
,
1522 ("In (found = %s) stub for %s, return point %p",
1523 wasFound
? "yes" : "no", toCString(*exec
->codeBlock()).data(),
1524 successLabel
.executableAddress()));
1527 polymorphicStructureList
->list
[listIndex
].set(*vm
, codeBlock
->ownerExecutable(), stubRoutine
, structure
, true);
1528 stubInfo
.u
.inList
.listSize
++;
1530 RepatchBuffer
repatchBuffer(codeBlock
);
1531 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.deltaCallToJump
), CodeLocationLabel(stubRoutine
->code().code()));
1533 return listIndex
< (POLYMORPHIC_LIST_CACHE_SIZE
- 1) ? RetryCacheLater
: GiveUpOnCache
;
1537 ExecState
* exec
, JSCell
* base
, const Identifier
& ident
, bool wasFound
,
1538 const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
1540 if (tryRepatchIn(exec
, base
, ident
, wasFound
, slot
, stubInfo
) == GiveUpOnCache
)
1541 repatchCall(exec
->codeBlock(), stubInfo
.callReturnLocation
, operationIn
);
1544 static void linkSlowFor(
1545 RepatchBuffer
& repatchBuffer
, VM
* vm
, CallLinkInfo
& callLinkInfo
,
1546 CodeSpecializationKind kind
, RegisterPreservationMode registers
)
1548 repatchBuffer
.relink(
1549 callLinkInfo
.callReturnLocation
,
1550 vm
->getCTIStub(virtualThunkGeneratorFor(kind
, registers
)).code());
1554 ExecState
* exec
, CallLinkInfo
& callLinkInfo
, CodeBlock
* calleeCodeBlock
,
1555 JSFunction
* callee
, MacroAssemblerCodePtr codePtr
, CodeSpecializationKind kind
,
1556 RegisterPreservationMode registers
)
1558 ASSERT(!callLinkInfo
.stub
);
1560 CodeBlock
* callerCodeBlock
= exec
->callerFrame()->codeBlock();
1562 // If you're being call-linked from a DFG caller then you obviously didn't get inlined.
1563 if (calleeCodeBlock
&& JITCode::isOptimizingJIT(callerCodeBlock
->jitType()))
1564 calleeCodeBlock
->m_shouldAlwaysBeInlined
= false;
1566 VM
* vm
= callerCodeBlock
->vm();
1568 RepatchBuffer
repatchBuffer(callerCodeBlock
);
1570 ASSERT(!callLinkInfo
.isLinked());
1571 callLinkInfo
.callee
.set(exec
->callerFrame()->vm(), callLinkInfo
.hotPathBegin
, callerCodeBlock
->ownerExecutable(), callee
);
1572 callLinkInfo
.lastSeenCallee
.set(exec
->callerFrame()->vm(), callerCodeBlock
->ownerExecutable(), callee
);
1573 if (shouldShowDisassemblyFor(callerCodeBlock
))
1574 dataLog("Linking call in ", *callerCodeBlock
, " at ", callLinkInfo
.codeOrigin
, " to ", pointerDump(calleeCodeBlock
), ", entrypoint at ", codePtr
, "\n");
1575 repatchBuffer
.relink(callLinkInfo
.hotPathOther
, codePtr
);
1577 if (calleeCodeBlock
)
1578 calleeCodeBlock
->linkIncomingCall(exec
->callerFrame(), &callLinkInfo
);
1580 if (kind
== CodeForCall
) {
1581 repatchBuffer
.relink(callLinkInfo
.callReturnLocation
, vm
->getCTIStub(linkClosureCallThunkGeneratorFor(registers
)).code());
1585 ASSERT(kind
== CodeForConstruct
);
1586 linkSlowFor(repatchBuffer
, vm
, callLinkInfo
, CodeForConstruct
, registers
);
1590 ExecState
* exec
, CallLinkInfo
& callLinkInfo
, CodeSpecializationKind kind
,
1591 RegisterPreservationMode registers
)
1593 CodeBlock
* callerCodeBlock
= exec
->callerFrame()->codeBlock();
1594 VM
* vm
= callerCodeBlock
->vm();
1596 RepatchBuffer
repatchBuffer(callerCodeBlock
);
1598 linkSlowFor(repatchBuffer
, vm
, callLinkInfo
, kind
, registers
);
1601 void linkClosureCall(
1602 ExecState
* exec
, CallLinkInfo
& callLinkInfo
, CodeBlock
* calleeCodeBlock
,
1603 Structure
* structure
, ExecutableBase
* executable
, MacroAssemblerCodePtr codePtr
,
1604 RegisterPreservationMode registers
)
1606 ASSERT(!callLinkInfo
.stub
);
1608 CodeBlock
* callerCodeBlock
= exec
->callerFrame()->codeBlock();
1609 VM
* vm
= callerCodeBlock
->vm();
1611 GPRReg calleeGPR
= static_cast<GPRReg
>(callLinkInfo
.calleeGPR
);
1613 CCallHelpers
stubJit(vm
, callerCodeBlock
);
1615 CCallHelpers::JumpList slowPath
;
1617 ptrdiff_t offsetToFrame
= -sizeof(CallerFrameAndPC
);
1619 if (!ASSERT_DISABLED
) {
1620 CCallHelpers::Jump okArgumentCount
= stubJit
.branch32(
1621 CCallHelpers::Below
, CCallHelpers::Address(CCallHelpers::stackPointerRegister
, static_cast<ptrdiff_t>(sizeof(Register
) * JSStack::ArgumentCount
) + offsetToFrame
+ PayloadOffset
), CCallHelpers::TrustedImm32(10000000));
1622 stubJit
.abortWithReason(RepatchInsaneArgumentCount
);
1623 okArgumentCount
.link(&stubJit
);
1627 // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1628 // being set. So we do this the hard way.
1629 GPRReg scratch
= AssemblyHelpers::selectScratchGPR(calleeGPR
);
1630 stubJit
.move(MacroAssembler::TrustedImm64(TagMask
), scratch
);
1631 slowPath
.append(stubJit
.branchTest64(CCallHelpers::NonZero
, calleeGPR
, scratch
));
1633 // We would have already checked that the callee is a cell.
1637 branchStructure(stubJit
,
1638 CCallHelpers::NotEqual
,
1639 CCallHelpers::Address(calleeGPR
, JSCell::structureIDOffset()),
1644 CCallHelpers::NotEqual
,
1645 CCallHelpers::Address(calleeGPR
, JSFunction::offsetOfExecutable()),
1646 CCallHelpers::TrustedImmPtr(executable
)));
1649 CCallHelpers::Address(calleeGPR
, JSFunction::offsetOfScopeChain()),
1650 GPRInfo::returnValueGPR
);
1654 GPRInfo::returnValueGPR
,
1655 CCallHelpers::Address(MacroAssembler::stackPointerRegister
, static_cast<ptrdiff_t>(sizeof(Register
) * JSStack::ScopeChain
) + offsetToFrame
));
1658 GPRInfo::returnValueGPR
,
1659 CCallHelpers::Address(MacroAssembler::stackPointerRegister
, static_cast<ptrdiff_t>(sizeof(Register
) * JSStack::ScopeChain
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
) + offsetToFrame
));
1661 CCallHelpers::TrustedImm32(JSValue::CellTag
),
1662 CCallHelpers::Address(MacroAssembler::stackPointerRegister
, static_cast<ptrdiff_t>(sizeof(Register
) * JSStack::ScopeChain
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
) + offsetToFrame
));
1665 AssemblyHelpers::Call call
= stubJit
.nearCall();
1666 AssemblyHelpers::Jump done
= stubJit
.jump();
1668 slowPath
.link(&stubJit
);
1669 stubJit
.move(calleeGPR
, GPRInfo::regT0
);
1670 #if USE(JSVALUE32_64)
1671 stubJit
.move(CCallHelpers::TrustedImm32(JSValue::CellTag
), GPRInfo::regT1
);
1673 stubJit
.move(CCallHelpers::TrustedImmPtr(&callLinkInfo
), GPRInfo::regT2
);
1674 stubJit
.move(CCallHelpers::TrustedImmPtr(callLinkInfo
.callReturnLocation
.executableAddress()), GPRInfo::regT4
);
1676 stubJit
.restoreReturnAddressBeforeReturn(GPRInfo::regT4
);
1677 AssemblyHelpers::Jump slow
= stubJit
.jump();
1679 LinkBuffer
patchBuffer(*vm
, stubJit
, callerCodeBlock
);
1681 patchBuffer
.link(call
, FunctionPtr(codePtr
.executableAddress()));
1682 if (JITCode::isOptimizingJIT(callerCodeBlock
->jitType()))
1683 patchBuffer
.link(done
, callLinkInfo
.callReturnLocation
.labelAtOffset(0));
1685 patchBuffer
.link(done
, callLinkInfo
.hotPathOther
.labelAtOffset(0));
1686 patchBuffer
.link(slow
, CodeLocationLabel(vm
->getCTIStub(virtualThunkGeneratorFor(CodeForCall
, registers
)).code()));
1688 RefPtr
<ClosureCallStubRoutine
> stubRoutine
= adoptRef(new ClosureCallStubRoutine(
1690 callerCodeBlock
, patchBuffer
,
1691 ("Closure call stub for %s, return point %p, target %p (%s)",
1692 toCString(*callerCodeBlock
).data(), callLinkInfo
.callReturnLocation
.labelAtOffset(0).executableAddress(),
1693 codePtr
.executableAddress(), toCString(pointerDump(calleeCodeBlock
)).data())),
1694 *vm
, callerCodeBlock
->ownerExecutable(), structure
, executable
, callLinkInfo
.codeOrigin
));
1696 RepatchBuffer
repatchBuffer(callerCodeBlock
);
1698 repatchBuffer
.replaceWithJump(
1699 RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo
.hotPathBegin
),
1700 CodeLocationLabel(stubRoutine
->code().code()));
1701 linkSlowFor(repatchBuffer
, vm
, callLinkInfo
, CodeForCall
, registers
);
1703 callLinkInfo
.stub
= stubRoutine
.release();
1705 ASSERT(!calleeCodeBlock
|| calleeCodeBlock
->isIncomingCallAlreadyLinked(&callLinkInfo
));
1708 void resetGetByID(RepatchBuffer
& repatchBuffer
, StructureStubInfo
& stubInfo
)
1710 repatchCall(repatchBuffer
, stubInfo
.callReturnLocation
, operationGetByIdOptimize
);
1711 CodeLocationDataLabel32 structureLabel
= stubInfo
.callReturnLocation
.dataLabel32AtOffset(-(intptr_t)stubInfo
.patch
.deltaCheckImmToCall
);
1712 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1713 repatchBuffer
.revertJumpReplacementToPatchableBranch32WithPatch(
1714 RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel
),
1715 MacroAssembler::Address(
1716 static_cast<MacroAssembler::RegisterID
>(stubInfo
.patch
.baseGPR
),
1717 JSCell::structureIDOffset()),
1718 static_cast<int32_t>(unusedPointer
));
1720 repatchBuffer
.repatch(structureLabel
, static_cast<int32_t>(unusedPointer
));
1722 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.deltaCallToLoadOrStore
), 0);
1724 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.deltaCallToTagLoadOrStore
), 0);
1725 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.deltaCallToPayloadLoadOrStore
), 0);
1727 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.deltaCallToJump
), stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToSlowCase
));
1730 void resetPutByID(RepatchBuffer
& repatchBuffer
, StructureStubInfo
& stubInfo
)
1732 V_JITOperation_ESsiJJI unoptimizedFunction
= bitwise_cast
<V_JITOperation_ESsiJJI
>(readCallTarget(repatchBuffer
, stubInfo
.callReturnLocation
).executableAddress());
1733 V_JITOperation_ESsiJJI optimizedFunction
;
1734 if (unoptimizedFunction
== operationPutByIdStrict
|| unoptimizedFunction
== operationPutByIdStrictBuildList
)
1735 optimizedFunction
= operationPutByIdStrictOptimize
;
1736 else if (unoptimizedFunction
== operationPutByIdNonStrict
|| unoptimizedFunction
== operationPutByIdNonStrictBuildList
)
1737 optimizedFunction
= operationPutByIdNonStrictOptimize
;
1738 else if (unoptimizedFunction
== operationPutByIdDirectStrict
|| unoptimizedFunction
== operationPutByIdDirectStrictBuildList
)
1739 optimizedFunction
= operationPutByIdDirectStrictOptimize
;
1741 ASSERT(unoptimizedFunction
== operationPutByIdDirectNonStrict
|| unoptimizedFunction
== operationPutByIdDirectNonStrictBuildList
);
1742 optimizedFunction
= operationPutByIdDirectNonStrictOptimize
;
1744 repatchCall(repatchBuffer
, stubInfo
.callReturnLocation
, optimizedFunction
);
1745 CodeLocationDataLabel32 structureLabel
= stubInfo
.callReturnLocation
.dataLabel32AtOffset(-(intptr_t)stubInfo
.patch
.deltaCheckImmToCall
);
1746 if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1747 repatchBuffer
.revertJumpReplacementToPatchableBranch32WithPatch(
1748 RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel
),
1749 MacroAssembler::Address(
1750 static_cast<MacroAssembler::RegisterID
>(stubInfo
.patch
.baseGPR
),
1751 JSCell::structureIDOffset()),
1752 static_cast<int32_t>(unusedPointer
));
1754 repatchBuffer
.repatch(structureLabel
, static_cast<int32_t>(unusedPointer
));
1756 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.deltaCallToLoadOrStore
), 0);
1758 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.deltaCallToTagLoadOrStore
), 0);
1759 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.deltaCallToPayloadLoadOrStore
), 0);
1761 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.deltaCallToJump
), stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToSlowCase
));
1764 void resetIn(RepatchBuffer
& repatchBuffer
, StructureStubInfo
& stubInfo
)
1766 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.deltaCallToJump
), stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.deltaCallToSlowCase
));