2 * Copyright (C) 2011, 2012 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #include "DFGRepatch.h"
31 #include "DFGCCallHelpers.h"
32 #include "DFGScratchRegisterAllocator.h"
33 #include "DFGSpeculativeJIT.h"
34 #include "DFGThunks.h"
35 #include "GCAwareJITStubRoutine.h"
36 #include "LinkBuffer.h"
37 #include "Operations.h"
38 #include "PolymorphicPutByIdList.h"
39 #include "RepatchBuffer.h"
40 #include "StructureRareDataInlines.h"
41 #include <wtf/StringPrintStream.h>
43 namespace JSC
{ namespace DFG
{
45 static void dfgRepatchCall(CodeBlock
* codeblock
, CodeLocationCall call
, FunctionPtr newCalleeFunction
)
47 RepatchBuffer
repatchBuffer(codeblock
);
48 repatchBuffer
.relink(call
, newCalleeFunction
);
51 static void dfgRepatchByIdSelfAccess(CodeBlock
* codeBlock
, StructureStubInfo
& stubInfo
, Structure
* structure
, PropertyOffset offset
, const FunctionPtr
&slowPathFunction
, bool compact
)
53 RepatchBuffer
repatchBuffer(codeBlock
);
55 // Only optimize once!
56 repatchBuffer
.relink(stubInfo
.callReturnLocation
, slowPathFunction
);
58 // Patch the structure check & the offset of the load.
59 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelPtrAtOffset(-(intptr_t)stubInfo
.patch
.dfg
.deltaCheckImmToCall
), structure
);
60 repatchBuffer
.setLoadInstructionIsActive(stubInfo
.callReturnLocation
.convertibleLoadAtOffset(stubInfo
.patch
.dfg
.deltaCallToStorageLoad
), isOutOfLineOffset(offset
));
63 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.dfg
.deltaCallToLoadOrStore
), offsetRelativeToPatchedStorage(offset
));
65 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.dfg
.deltaCallToLoadOrStore
), offsetRelativeToPatchedStorage(offset
));
66 #elif USE(JSVALUE32_64)
68 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.dfg
.deltaCallToTagLoadOrStore
), offsetRelativeToPatchedStorage(offset
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
));
69 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.dfg
.deltaCallToPayloadLoadOrStore
), offsetRelativeToPatchedStorage(offset
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
));
71 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.dfg
.deltaCallToTagLoadOrStore
), offsetRelativeToPatchedStorage(offset
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
));
72 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.dfg
.deltaCallToPayloadLoadOrStore
), offsetRelativeToPatchedStorage(offset
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
));
77 static void addStructureTransitionCheck(
78 JSCell
* object
, Structure
* structure
, CodeBlock
* codeBlock
, StructureStubInfo
& stubInfo
,
79 MacroAssembler
& jit
, MacroAssembler::JumpList
& failureCases
, GPRReg scratchGPR
)
81 if (object
->structure() == structure
&& structure
->transitionWatchpointSetIsStillValid()) {
82 structure
->addTransitionWatchpoint(stubInfo
.addWatchpoint(codeBlock
));
83 #if DFG_ENABLE(JIT_ASSERT)
84 // If we execute this code, the object must have the structure we expect. Assert
85 // this in debug modes.
86 jit
.move(MacroAssembler::TrustedImmPtr(object
), scratchGPR
);
87 MacroAssembler::Jump ok
= jit
.branchPtr(
88 MacroAssembler::Equal
,
89 MacroAssembler::Address(scratchGPR
, JSCell::structureOffset()),
90 MacroAssembler::TrustedImmPtr(structure
));
97 jit
.move(MacroAssembler::TrustedImmPtr(object
), scratchGPR
);
100 MacroAssembler::NotEqual
,
101 MacroAssembler::Address(scratchGPR
, JSCell::structureOffset()),
102 MacroAssembler::TrustedImmPtr(structure
)));
105 static void addStructureTransitionCheck(
106 JSValue prototype
, CodeBlock
* codeBlock
, StructureStubInfo
& stubInfo
,
107 MacroAssembler
& jit
, MacroAssembler::JumpList
& failureCases
, GPRReg scratchGPR
)
109 if (prototype
.isNull())
112 ASSERT(prototype
.isCell());
114 addStructureTransitionCheck(
115 prototype
.asCell(), prototype
.asCell()->structure(), codeBlock
, stubInfo
, jit
,
116 failureCases
, scratchGPR
);
119 static void replaceWithJump(RepatchBuffer
& repatchBuffer
, StructureStubInfo
& stubInfo
, const MacroAssemblerCodePtr target
)
121 if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) {
122 repatchBuffer
.replaceWithJump(
123 RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(
124 stubInfo
.callReturnLocation
.dataLabelPtrAtOffset(
125 -(intptr_t)stubInfo
.patch
.dfg
.deltaCheckImmToCall
)),
126 CodeLocationLabel(target
));
130 repatchBuffer
.relink(
131 stubInfo
.callReturnLocation
.jumpAtOffset(
132 stubInfo
.patch
.dfg
.deltaCallToStructCheck
),
133 CodeLocationLabel(target
));
136 static void emitRestoreScratch(MacroAssembler
& stubJit
, bool needToRestoreScratch
, GPRReg scratchGPR
, MacroAssembler::Jump
& success
, MacroAssembler::Jump
& fail
, MacroAssembler::JumpList failureCases
)
138 if (needToRestoreScratch
) {
139 stubJit
.popToRestore(scratchGPR
);
141 success
= stubJit
.jump();
143 // link failure cases here, so we can pop scratchGPR, and then jump back.
144 failureCases
.link(&stubJit
);
146 stubJit
.popToRestore(scratchGPR
);
148 fail
= stubJit
.jump();
152 success
= stubJit
.jump();
155 static void linkRestoreScratch(LinkBuffer
& patchBuffer
, bool needToRestoreScratch
, MacroAssembler::Jump success
, MacroAssembler::Jump fail
, MacroAssembler::JumpList failureCases
, CodeLocationLabel successLabel
, CodeLocationLabel slowCaseBegin
)
157 patchBuffer
.link(success
, successLabel
);
159 if (needToRestoreScratch
) {
160 patchBuffer
.link(fail
, slowCaseBegin
);
164 // link failure cases directly back to normal path
165 patchBuffer
.link(failureCases
, slowCaseBegin
);
168 static void linkRestoreScratch(LinkBuffer
& patchBuffer
, bool needToRestoreScratch
, StructureStubInfo
& stubInfo
, MacroAssembler::Jump success
, MacroAssembler::Jump fail
, MacroAssembler::JumpList failureCases
)
170 linkRestoreScratch(patchBuffer
, needToRestoreScratch
, success
, fail
, failureCases
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToDone
), stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToSlowCase
));
173 static void generateProtoChainAccessStub(ExecState
* exec
, StructureStubInfo
& stubInfo
, StructureChain
* chain
, size_t count
, PropertyOffset offset
, Structure
* structure
, CodeLocationLabel successLabel
, CodeLocationLabel slowCaseLabel
, RefPtr
<JITStubRoutine
>& stubRoutine
)
175 VM
* vm
= &exec
->vm();
177 MacroAssembler stubJit
;
179 GPRReg baseGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.baseGPR
);
180 #if USE(JSVALUE32_64)
181 GPRReg resultTagGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.valueTagGPR
);
183 GPRReg resultGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.valueGPR
);
184 GPRReg scratchGPR
= RegisterSet(stubInfo
.patch
.dfg
.usedRegisters
).getFreeGPR();
185 bool needToRestoreScratch
= false;
187 if (scratchGPR
== InvalidGPRReg
) {
189 scratchGPR
= SpeculativeJIT::selectScratchGPR(baseGPR
, resultGPR
);
191 scratchGPR
= SpeculativeJIT::selectScratchGPR(baseGPR
, resultGPR
, resultTagGPR
);
193 stubJit
.pushToSave(scratchGPR
);
194 needToRestoreScratch
= true;
197 MacroAssembler::JumpList failureCases
;
199 failureCases
.append(stubJit
.branchPtr(MacroAssembler::NotEqual
, MacroAssembler::Address(baseGPR
, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure
)));
201 Structure
* currStructure
= structure
;
202 WriteBarrier
<Structure
>* it
= chain
->head();
203 JSObject
* protoObject
= 0;
204 for (unsigned i
= 0; i
< count
; ++i
, ++it
) {
205 protoObject
= asObject(currStructure
->prototypeForLookup(exec
));
206 addStructureTransitionCheck(
207 protoObject
, protoObject
->structure(), exec
->codeBlock(), stubInfo
, stubJit
,
208 failureCases
, scratchGPR
);
209 currStructure
= it
->get();
212 if (isInlineOffset(offset
)) {
214 stubJit
.load64(protoObject
->locationForOffset(offset
), resultGPR
);
215 #elif USE(JSVALUE32_64)
216 stubJit
.move(MacroAssembler::TrustedImmPtr(protoObject
->locationForOffset(offset
)), resultGPR
);
217 stubJit
.load32(MacroAssembler::Address(resultGPR
, OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)), resultTagGPR
);
218 stubJit
.load32(MacroAssembler::Address(resultGPR
, OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)), resultGPR
);
221 stubJit
.loadPtr(protoObject
->butterflyAddress(), resultGPR
);
223 stubJit
.load64(MacroAssembler::Address(resultGPR
, offsetInButterfly(offset
) * sizeof(WriteBarrier
<Unknown
>)), resultGPR
);
224 #elif USE(JSVALUE32_64)
225 stubJit
.load32(MacroAssembler::Address(resultGPR
, offsetInButterfly(offset
) * sizeof(WriteBarrier
<Unknown
>) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)), resultTagGPR
);
226 stubJit
.load32(MacroAssembler::Address(resultGPR
, offsetInButterfly(offset
) * sizeof(WriteBarrier
<Unknown
>) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)), resultGPR
);
230 MacroAssembler::Jump success
, fail
;
232 emitRestoreScratch(stubJit
, needToRestoreScratch
, scratchGPR
, success
, fail
, failureCases
);
234 LinkBuffer
patchBuffer(*vm
, &stubJit
, exec
->codeBlock());
236 linkRestoreScratch(patchBuffer
, needToRestoreScratch
, success
, fail
, failureCases
, successLabel
, slowCaseLabel
);
238 stubRoutine
= FINALIZE_CODE_FOR_DFG_STUB(
240 ("DFG prototype chain access stub for %s, return point %p",
241 toCString(*exec
->codeBlock()).data(), successLabel
.executableAddress()));
244 static bool tryCacheGetByID(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
246 // FIXME: Write a test that proves we need to check for recursion here just
247 // like the interpreter does, then add a check for recursion.
249 CodeBlock
* codeBlock
= exec
->codeBlock();
250 VM
* vm
= &exec
->vm();
252 if (isJSArray(baseValue
) && propertyName
== exec
->propertyNames().length
) {
253 GPRReg baseGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.baseGPR
);
254 #if USE(JSVALUE32_64)
255 GPRReg resultTagGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.valueTagGPR
);
257 GPRReg resultGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.valueGPR
);
258 GPRReg scratchGPR
= RegisterSet(stubInfo
.patch
.dfg
.usedRegisters
).getFreeGPR();
259 bool needToRestoreScratch
= false;
261 MacroAssembler stubJit
;
263 if (scratchGPR
== InvalidGPRReg
) {
265 scratchGPR
= SpeculativeJIT::selectScratchGPR(baseGPR
, resultGPR
);
267 scratchGPR
= SpeculativeJIT::selectScratchGPR(baseGPR
, resultGPR
, resultTagGPR
);
269 stubJit
.pushToSave(scratchGPR
);
270 needToRestoreScratch
= true;
273 MacroAssembler::JumpList failureCases
;
275 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSCell::structureOffset()), scratchGPR
);
276 stubJit
.load8(MacroAssembler::Address(scratchGPR
, Structure::indexingTypeOffset()), scratchGPR
);
277 failureCases
.append(stubJit
.branchTest32(MacroAssembler::Zero
, scratchGPR
, MacroAssembler::TrustedImm32(IsArray
)));
278 failureCases
.append(stubJit
.branchTest32(MacroAssembler::Zero
, scratchGPR
, MacroAssembler::TrustedImm32(IndexingShapeMask
)));
280 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()), scratchGPR
);
281 stubJit
.load32(MacroAssembler::Address(scratchGPR
, ArrayStorage::lengthOffset()), scratchGPR
);
282 failureCases
.append(stubJit
.branch32(MacroAssembler::LessThan
, scratchGPR
, MacroAssembler::TrustedImm32(0)));
285 stubJit
.or64(GPRInfo::tagTypeNumberRegister
, scratchGPR
, resultGPR
);
286 #elif USE(JSVALUE32_64)
287 stubJit
.move(scratchGPR
, resultGPR
);
288 stubJit
.move(JITCompiler::TrustedImm32(0xffffffff), resultTagGPR
); // JSValue::Int32Tag
291 MacroAssembler::Jump success
, fail
;
293 emitRestoreScratch(stubJit
, needToRestoreScratch
, scratchGPR
, success
, fail
, failureCases
);
295 LinkBuffer
patchBuffer(*vm
, &stubJit
, codeBlock
);
297 linkRestoreScratch(patchBuffer
, needToRestoreScratch
, stubInfo
, success
, fail
, failureCases
);
299 stubInfo
.stubRoutine
= FINALIZE_CODE_FOR_DFG_STUB(
301 ("DFG GetById array length stub for %s, return point %p",
302 toCString(*exec
->codeBlock()).data(), stubInfo
.callReturnLocation
.labelAtOffset(
303 stubInfo
.patch
.dfg
.deltaCallToDone
).executableAddress()));
305 RepatchBuffer
repatchBuffer(codeBlock
);
306 replaceWithJump(repatchBuffer
, stubInfo
, stubInfo
.stubRoutine
->code().code());
307 repatchBuffer
.relink(stubInfo
.callReturnLocation
, operationGetById
);
312 // FIXME: should support length access for String.
314 // FIXME: Cache property access for immediates.
315 if (!baseValue
.isCell())
317 JSCell
* baseCell
= baseValue
.asCell();
318 Structure
* structure
= baseCell
->structure();
319 if (!slot
.isCacheable())
321 if (!structure
->propertyAccessesAreCacheable())
324 // Optimize self access.
325 if (slot
.slotBase() == baseValue
) {
326 if ((slot
.cachedPropertyType() != PropertySlot::Value
)
327 || !MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot
.cachedOffset()))) {
328 dfgRepatchCall(codeBlock
, stubInfo
.callReturnLocation
, operationGetByIdBuildList
);
332 dfgRepatchByIdSelfAccess(codeBlock
, stubInfo
, structure
, slot
.cachedOffset(), operationGetByIdBuildList
, true);
333 stubInfo
.initGetByIdSelf(*vm
, codeBlock
->ownerExecutable(), structure
);
337 if (structure
->isDictionary())
340 // FIXME: optimize getters and setters
341 if (slot
.cachedPropertyType() != PropertySlot::Value
)
344 PropertyOffset offset
= slot
.cachedOffset();
345 size_t count
= normalizePrototypeChainForChainAccess(exec
, baseValue
, slot
.slotBase(), propertyName
, offset
);
346 if (count
== InvalidPrototypeChain
)
349 StructureChain
* prototypeChain
= structure
->prototypeChain(exec
);
351 ASSERT(slot
.slotBase().isObject());
353 generateProtoChainAccessStub(exec
, stubInfo
, prototypeChain
, count
, offset
, structure
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToDone
), stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToSlowCase
), stubInfo
.stubRoutine
);
355 RepatchBuffer
repatchBuffer(codeBlock
);
356 replaceWithJump(repatchBuffer
, stubInfo
, stubInfo
.stubRoutine
->code().code());
357 repatchBuffer
.relink(stubInfo
.callReturnLocation
, operationGetByIdProtoBuildList
);
359 stubInfo
.initGetByIdChain(*vm
, codeBlock
->ownerExecutable(), structure
, prototypeChain
, count
, true);
363 void dfgRepatchGetByID(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
365 bool cached
= tryCacheGetByID(exec
, baseValue
, propertyName
, slot
, stubInfo
);
367 dfgRepatchCall(exec
->codeBlock(), stubInfo
.callReturnLocation
, operationGetById
);
370 static bool tryBuildGetByIDList(ExecState
* exec
, JSValue baseValue
, const Identifier
& ident
, const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
372 if (!baseValue
.isCell()
373 || !slot
.isCacheable()
374 || baseValue
.asCell()->structure()->isUncacheableDictionary()
375 || slot
.slotBase() != baseValue
)
378 if (!stubInfo
.patch
.dfg
.registersFlushed
) {
379 // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
380 // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
381 // if registers were not flushed, don't do non-Value caching.
382 if (slot
.cachedPropertyType() != PropertySlot::Value
)
386 CodeBlock
* codeBlock
= exec
->codeBlock();
387 JSCell
* baseCell
= baseValue
.asCell();
388 Structure
* structure
= baseCell
->structure();
389 VM
* vm
= &exec
->vm();
391 ASSERT(slot
.slotBase().isObject());
393 PolymorphicAccessStructureList
* polymorphicStructureList
;
396 if (stubInfo
.accessType
== access_unset
) {
397 ASSERT(!stubInfo
.stubRoutine
);
398 polymorphicStructureList
= new PolymorphicAccessStructureList();
399 stubInfo
.initGetByIdSelfList(polymorphicStructureList
, 0);
401 } else if (stubInfo
.accessType
== access_get_by_id_self
) {
402 ASSERT(!stubInfo
.stubRoutine
);
403 polymorphicStructureList
= new PolymorphicAccessStructureList(*vm
, codeBlock
->ownerExecutable(), JITStubRoutine::createSelfManagedRoutine(stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToSlowCase
)), stubInfo
.u
.getByIdSelf
.baseObjectStructure
.get(), true);
404 stubInfo
.initGetByIdSelfList(polymorphicStructureList
, 1);
407 polymorphicStructureList
= stubInfo
.u
.getByIdSelfList
.structureList
;
408 listIndex
= stubInfo
.u
.getByIdSelfList
.listSize
;
411 if (listIndex
< POLYMORPHIC_LIST_CACHE_SIZE
) {
412 stubInfo
.u
.getByIdSelfList
.listSize
++;
414 GPRReg baseGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.baseGPR
);
415 #if USE(JSVALUE32_64)
416 GPRReg resultTagGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.valueTagGPR
);
418 GPRReg resultGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.valueGPR
);
419 GPRReg scratchGPR
= RegisterSet(stubInfo
.patch
.dfg
.usedRegisters
).getFreeGPR();
421 CCallHelpers
stubJit(vm
, codeBlock
);
423 MacroAssembler::Jump wrongStruct
= stubJit
.branchPtr(MacroAssembler::NotEqual
, MacroAssembler::Address(baseGPR
, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure
));
425 // The strategy we use for stubs is as follows:
426 // 1) Call DFG helper that calls the getter.
427 // 2) Check if there was an exception, and if there was, call yet another
430 bool isDirect
= false;
431 MacroAssembler::Call operationCall
;
432 MacroAssembler::Call handlerCall
;
433 FunctionPtr operationFunction
;
434 MacroAssembler::Jump success
;
436 if (slot
.cachedPropertyType() == PropertySlot::Getter
437 || slot
.cachedPropertyType() == PropertySlot::Custom
) {
438 if (slot
.cachedPropertyType() == PropertySlot::Getter
) {
439 ASSERT(scratchGPR
!= InvalidGPRReg
);
440 ASSERT(baseGPR
!= scratchGPR
);
441 if (isInlineOffset(slot
.cachedOffset())) {
443 stubJit
.load64(MacroAssembler::Address(baseGPR
, offsetRelativeToBase(slot
.cachedOffset())), scratchGPR
);
445 stubJit
.load32(MacroAssembler::Address(baseGPR
, offsetRelativeToBase(slot
.cachedOffset())), scratchGPR
);
448 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()), scratchGPR
);
450 stubJit
.load64(MacroAssembler::Address(scratchGPR
, offsetRelativeToBase(slot
.cachedOffset())), scratchGPR
);
452 stubJit
.load32(MacroAssembler::Address(scratchGPR
, offsetRelativeToBase(slot
.cachedOffset())), scratchGPR
);
455 stubJit
.setupArgumentsWithExecState(baseGPR
, scratchGPR
);
456 operationFunction
= operationCallGetter
;
458 stubJit
.setupArgumentsWithExecState(
460 MacroAssembler::TrustedImmPtr(FunctionPtr(slot
.customGetter()).executableAddress()),
461 MacroAssembler::TrustedImmPtr(const_cast<Identifier
*>(&ident
)));
462 operationFunction
= operationCallCustomGetter
;
465 // Need to make sure that whenever this call is made in the future, we remember the
466 // place that we made it from. It just so happens to be the place that we are at
469 MacroAssembler::TrustedImm32(exec
->codeOriginIndexForDFG()),
470 CCallHelpers::tagFor(static_cast<VirtualRegister
>(JSStack::ArgumentCount
)));
472 operationCall
= stubJit
.call();
474 stubJit
.move(GPRInfo::returnValueGPR
, resultGPR
);
476 stubJit
.setupResults(resultGPR
, resultTagGPR
);
478 success
= stubJit
.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck
);
480 stubJit
.setupArgumentsWithExecState(
481 MacroAssembler::TrustedImmPtr(&stubInfo
));
482 handlerCall
= stubJit
.call();
483 stubJit
.jump(GPRInfo::returnValueGPR2
);
485 if (isInlineOffset(slot
.cachedOffset())) {
487 stubJit
.load64(MacroAssembler::Address(baseGPR
, offsetRelativeToBase(slot
.cachedOffset())), resultGPR
);
489 if (baseGPR
== resultTagGPR
) {
490 stubJit
.load32(MacroAssembler::Address(baseGPR
, offsetRelativeToBase(slot
.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)), resultGPR
);
491 stubJit
.load32(MacroAssembler::Address(baseGPR
, offsetRelativeToBase(slot
.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)), resultTagGPR
);
493 stubJit
.load32(MacroAssembler::Address(baseGPR
, offsetRelativeToBase(slot
.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)), resultTagGPR
);
494 stubJit
.load32(MacroAssembler::Address(baseGPR
, offsetRelativeToBase(slot
.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)), resultGPR
);
498 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()), resultGPR
);
500 stubJit
.load64(MacroAssembler::Address(resultGPR
, offsetRelativeToBase(slot
.cachedOffset())), resultGPR
);
502 stubJit
.load32(MacroAssembler::Address(resultGPR
, offsetRelativeToBase(slot
.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)), resultTagGPR
);
503 stubJit
.load32(MacroAssembler::Address(resultGPR
, offsetRelativeToBase(slot
.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)), resultGPR
);
506 success
= stubJit
.jump();
510 LinkBuffer
patchBuffer(*vm
, &stubJit
, codeBlock
);
512 CodeLocationLabel lastProtoBegin
;
514 lastProtoBegin
= CodeLocationLabel(polymorphicStructureList
->list
[listIndex
- 1].stubRoutine
->code().code());
516 lastProtoBegin
= stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToSlowCase
);
517 ASSERT(!!lastProtoBegin
);
519 patchBuffer
.link(wrongStruct
, lastProtoBegin
);
520 patchBuffer
.link(success
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToDone
));
522 patchBuffer
.link(operationCall
, operationFunction
);
523 patchBuffer
.link(handlerCall
, lookupExceptionHandlerInStub
);
526 RefPtr
<JITStubRoutine
> stubRoutine
=
527 createJITStubRoutine(
530 ("DFG GetById polymorphic list access for %s, return point %p",
531 toCString(*exec
->codeBlock()).data(), stubInfo
.callReturnLocation
.labelAtOffset(
532 stubInfo
.patch
.dfg
.deltaCallToDone
).executableAddress())),
534 codeBlock
->ownerExecutable(),
535 slot
.cachedPropertyType() == PropertySlot::Getter
536 || slot
.cachedPropertyType() == PropertySlot::Custom
);
538 polymorphicStructureList
->list
[listIndex
].set(*vm
, codeBlock
->ownerExecutable(), stubRoutine
, structure
, isDirect
);
540 RepatchBuffer
repatchBuffer(codeBlock
);
541 repatchBuffer
.relink(
542 stubInfo
.callReturnLocation
.jumpAtOffset(
543 stubInfo
.patch
.dfg
.deltaCallToStructCheck
),
544 CodeLocationLabel(stubRoutine
->code().code()));
546 if (listIndex
< (POLYMORPHIC_LIST_CACHE_SIZE
- 1))
553 void dfgBuildGetByIDList(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
555 bool dontChangeCall
= tryBuildGetByIDList(exec
, baseValue
, propertyName
, slot
, stubInfo
);
557 dfgRepatchCall(exec
->codeBlock(), stubInfo
.callReturnLocation
, operationGetById
);
560 static bool tryBuildGetByIDProtoList(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
562 if (!baseValue
.isCell()
563 || !slot
.isCacheable()
564 || baseValue
.asCell()->structure()->isDictionary()
565 || baseValue
.asCell()->structure()->typeInfo().prohibitsPropertyCaching()
566 || slot
.slotBase() == baseValue
567 || slot
.cachedPropertyType() != PropertySlot::Value
)
570 ASSERT(slot
.slotBase().isObject());
572 PropertyOffset offset
= slot
.cachedOffset();
573 size_t count
= normalizePrototypeChainForChainAccess(exec
, baseValue
, slot
.slotBase(), propertyName
, offset
);
574 if (count
== InvalidPrototypeChain
)
577 Structure
* structure
= baseValue
.asCell()->structure();
578 StructureChain
* prototypeChain
= structure
->prototypeChain(exec
);
579 CodeBlock
* codeBlock
= exec
->codeBlock();
580 VM
* vm
= &exec
->vm();
582 PolymorphicAccessStructureList
* polymorphicStructureList
;
585 if (stubInfo
.accessType
== access_get_by_id_chain
) {
586 ASSERT(!!stubInfo
.stubRoutine
);
587 polymorphicStructureList
= new PolymorphicAccessStructureList(*vm
, codeBlock
->ownerExecutable(), stubInfo
.stubRoutine
, stubInfo
.u
.getByIdChain
.baseObjectStructure
.get(), stubInfo
.u
.getByIdChain
.chain
.get(), true);
588 stubInfo
.stubRoutine
.clear();
589 stubInfo
.initGetByIdProtoList(polymorphicStructureList
, 1);
591 ASSERT(stubInfo
.accessType
== access_get_by_id_proto_list
);
592 polymorphicStructureList
= stubInfo
.u
.getByIdProtoList
.structureList
;
593 listIndex
= stubInfo
.u
.getByIdProtoList
.listSize
;
596 if (listIndex
< POLYMORPHIC_LIST_CACHE_SIZE
) {
597 stubInfo
.u
.getByIdProtoList
.listSize
++;
599 CodeLocationLabel lastProtoBegin
= CodeLocationLabel(polymorphicStructureList
->list
[listIndex
- 1].stubRoutine
->code().code());
600 ASSERT(!!lastProtoBegin
);
602 RefPtr
<JITStubRoutine
> stubRoutine
;
604 generateProtoChainAccessStub(exec
, stubInfo
, prototypeChain
, count
, offset
, structure
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToDone
), lastProtoBegin
, stubRoutine
);
606 polymorphicStructureList
->list
[listIndex
].set(*vm
, codeBlock
->ownerExecutable(), stubRoutine
, structure
, true);
608 RepatchBuffer
repatchBuffer(codeBlock
);
609 replaceWithJump(repatchBuffer
, stubInfo
, stubRoutine
->code().code());
611 if (listIndex
< (POLYMORPHIC_LIST_CACHE_SIZE
- 1))
618 void dfgBuildGetByIDProtoList(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
620 bool dontChangeCall
= tryBuildGetByIDProtoList(exec
, baseValue
, propertyName
, slot
, stubInfo
);
622 dfgRepatchCall(exec
->codeBlock(), stubInfo
.callReturnLocation
, operationGetById
);
625 static V_DFGOperation_EJCI
appropriateGenericPutByIdFunction(const PutPropertySlot
&slot
, PutKind putKind
)
627 if (slot
.isStrictMode()) {
628 if (putKind
== Direct
)
629 return operationPutByIdDirectStrict
;
630 return operationPutByIdStrict
;
632 if (putKind
== Direct
)
633 return operationPutByIdDirectNonStrict
;
634 return operationPutByIdNonStrict
;
637 static V_DFGOperation_EJCI
appropriateListBuildingPutByIdFunction(const PutPropertySlot
&slot
, PutKind putKind
)
639 if (slot
.isStrictMode()) {
640 if (putKind
== Direct
)
641 return operationPutByIdDirectStrictBuildList
;
642 return operationPutByIdStrictBuildList
;
644 if (putKind
== Direct
)
645 return operationPutByIdDirectNonStrictBuildList
;
646 return operationPutByIdNonStrictBuildList
;
649 static void emitPutReplaceStub(
653 const PutPropertySlot
& slot
,
654 StructureStubInfo
& stubInfo
,
656 Structure
* structure
,
657 CodeLocationLabel failureLabel
,
658 RefPtr
<JITStubRoutine
>& stubRoutine
)
660 VM
* vm
= &exec
->vm();
661 GPRReg baseGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.baseGPR
);
662 #if USE(JSVALUE32_64)
663 GPRReg valueTagGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.valueTagGPR
);
665 GPRReg valueGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.valueGPR
);
666 GPRReg scratchGPR
= RegisterSet(stubInfo
.patch
.dfg
.usedRegisters
).getFreeGPR();
667 bool needToRestoreScratch
= false;
668 #if ENABLE(WRITE_BARRIER_PROFILING)
670 const bool writeBarrierNeeded
= true;
672 const bool writeBarrierNeeded
= false;
675 MacroAssembler stubJit
;
677 if (scratchGPR
== InvalidGPRReg
&& (writeBarrierNeeded
|| isOutOfLineOffset(slot
.cachedOffset()))) {
679 scratchGPR
= SpeculativeJIT::selectScratchGPR(baseGPR
, valueGPR
);
681 scratchGPR
= SpeculativeJIT::selectScratchGPR(baseGPR
, valueGPR
, valueTagGPR
);
683 needToRestoreScratch
= true;
684 stubJit
.pushToSave(scratchGPR
);
687 MacroAssembler::Jump badStructure
= stubJit
.branchPtr(
688 MacroAssembler::NotEqual
,
689 MacroAssembler::Address(baseGPR
, JSCell::structureOffset()),
690 MacroAssembler::TrustedImmPtr(structure
));
692 #if ENABLE(WRITE_BARRIER_PROFILING)
694 scratchGPR2
= SpeculativeJIT::selectScratchGPR(baseGPR
, valueGPR
, scratchGPR
);
696 scratchGPR2
= SpeculativeJIT::selectScratchGPR(baseGPR
, valueGPR
, valueTagGPR
, scratchGPR
);
698 stubJit
.pushToSave(scratchGPR2
);
699 SpeculativeJIT::writeBarrier(stubJit
, baseGPR
, scratchGPR
, scratchGPR2
, WriteBarrierForPropertyAccess
);
700 stubJit
.popToRestore(scratchGPR2
);
704 if (isInlineOffset(slot
.cachedOffset()))
705 stubJit
.store64(valueGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot
.cachedOffset()) * sizeof(JSValue
)));
707 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()), scratchGPR
);
708 stubJit
.store64(valueGPR
, MacroAssembler::Address(scratchGPR
, offsetInButterfly(slot
.cachedOffset()) * sizeof(JSValue
)));
710 #elif USE(JSVALUE32_64)
711 if (isInlineOffset(slot
.cachedOffset())) {
712 stubJit
.store32(valueGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)));
713 stubJit
.store32(valueTagGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)));
715 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()), scratchGPR
);
716 stubJit
.store32(valueGPR
, MacroAssembler::Address(scratchGPR
, offsetInButterfly(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)));
717 stubJit
.store32(valueTagGPR
, MacroAssembler::Address(scratchGPR
, offsetInButterfly(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)));
721 MacroAssembler::Jump success
;
722 MacroAssembler::Jump failure
;
724 if (needToRestoreScratch
) {
725 stubJit
.popToRestore(scratchGPR
);
726 success
= stubJit
.jump();
728 badStructure
.link(&stubJit
);
729 stubJit
.popToRestore(scratchGPR
);
730 failure
= stubJit
.jump();
732 success
= stubJit
.jump();
733 failure
= badStructure
;
736 LinkBuffer
patchBuffer(*vm
, &stubJit
, exec
->codeBlock());
737 patchBuffer
.link(success
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToDone
));
738 patchBuffer
.link(failure
, failureLabel
);
740 stubRoutine
= FINALIZE_CODE_FOR_DFG_STUB(
742 ("DFG PutById replace stub for %s, return point %p",
743 toCString(*exec
->codeBlock()).data(), stubInfo
.callReturnLocation
.labelAtOffset(
744 stubInfo
.patch
.dfg
.deltaCallToDone
).executableAddress()));
747 static void emitPutTransitionStub(
751 const PutPropertySlot
& slot
,
752 StructureStubInfo
& stubInfo
,
754 Structure
* structure
,
755 Structure
* oldStructure
,
756 StructureChain
* prototypeChain
,
757 CodeLocationLabel failureLabel
,
758 RefPtr
<JITStubRoutine
>& stubRoutine
)
760 VM
* vm
= &exec
->vm();
762 GPRReg baseGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.baseGPR
);
763 #if USE(JSVALUE32_64)
764 GPRReg valueTagGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.valueTagGPR
);
766 GPRReg valueGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.valueGPR
);
768 ScratchRegisterAllocator
allocator(stubInfo
.patch
.dfg
.usedRegisters
);
769 allocator
.lock(baseGPR
);
770 #if USE(JSVALUE32_64)
771 allocator
.lock(valueTagGPR
);
773 allocator
.lock(valueGPR
);
775 CCallHelpers
stubJit(vm
);
777 GPRReg scratchGPR1
= allocator
.allocateScratchGPR();
778 ASSERT(scratchGPR1
!= baseGPR
);
779 ASSERT(scratchGPR1
!= valueGPR
);
781 bool needSecondScratch
= false;
782 bool needThirdScratch
= false;
783 #if ENABLE(WRITE_BARRIER_PROFILING)
784 needSecondScratch
= true;
786 if (structure
->outOfLineCapacity() != oldStructure
->outOfLineCapacity()
787 && oldStructure
->outOfLineCapacity()) {
788 needSecondScratch
= true;
789 needThirdScratch
= true;
793 if (needSecondScratch
) {
794 scratchGPR2
= allocator
.allocateScratchGPR();
795 ASSERT(scratchGPR2
!= baseGPR
);
796 ASSERT(scratchGPR2
!= valueGPR
);
797 ASSERT(scratchGPR2
!= scratchGPR1
);
799 scratchGPR2
= InvalidGPRReg
;
801 if (needThirdScratch
) {
802 scratchGPR3
= allocator
.allocateScratchGPR();
803 ASSERT(scratchGPR3
!= baseGPR
);
804 ASSERT(scratchGPR3
!= valueGPR
);
805 ASSERT(scratchGPR3
!= scratchGPR1
);
806 ASSERT(scratchGPR3
!= scratchGPR2
);
808 scratchGPR3
= InvalidGPRReg
;
810 allocator
.preserveReusedRegistersByPushing(stubJit
);
812 MacroAssembler::JumpList failureCases
;
814 ASSERT(oldStructure
->transitionWatchpointSetHasBeenInvalidated());
816 failureCases
.append(stubJit
.branchPtr(MacroAssembler::NotEqual
, MacroAssembler::Address(baseGPR
, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(oldStructure
)));
818 addStructureTransitionCheck(
819 oldStructure
->storedPrototype(), exec
->codeBlock(), stubInfo
, stubJit
, failureCases
,
822 if (putKind
== NotDirect
) {
823 for (WriteBarrier
<Structure
>* it
= prototypeChain
->head(); *it
; ++it
) {
824 addStructureTransitionCheck(
825 (*it
)->storedPrototype(), exec
->codeBlock(), stubInfo
, stubJit
, failureCases
,
830 #if ENABLE(WRITE_BARRIER_PROFILING)
831 ASSERT(needSecondScratch
);
832 ASSERT(scratchGPR2
!= InvalidGPRReg
);
833 // Must always emit this write barrier as the structure transition itself requires it
834 SpeculativeJIT::writeBarrier(stubJit
, baseGPR
, scratchGPR1
, scratchGPR2
, WriteBarrierForPropertyAccess
);
837 MacroAssembler::JumpList slowPath
;
839 bool scratchGPR1HasStorage
= false;
841 if (structure
->outOfLineCapacity() != oldStructure
->outOfLineCapacity()) {
842 size_t newSize
= structure
->outOfLineCapacity() * sizeof(JSValue
);
843 CopiedAllocator
* copiedAllocator
= &vm
->heap
.storageAllocator();
845 if (!oldStructure
->outOfLineCapacity()) {
846 stubJit
.loadPtr(&copiedAllocator
->m_currentRemaining
, scratchGPR1
);
847 slowPath
.append(stubJit
.branchSubPtr(MacroAssembler::Signed
, MacroAssembler::TrustedImm32(newSize
), scratchGPR1
));
848 stubJit
.storePtr(scratchGPR1
, &copiedAllocator
->m_currentRemaining
);
849 stubJit
.negPtr(scratchGPR1
);
850 stubJit
.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator
->m_currentPayloadEnd
), scratchGPR1
);
851 stubJit
.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue
)), scratchGPR1
);
853 size_t oldSize
= oldStructure
->outOfLineCapacity() * sizeof(JSValue
);
854 ASSERT(newSize
> oldSize
);
856 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()), scratchGPR3
);
857 stubJit
.loadPtr(&copiedAllocator
->m_currentRemaining
, scratchGPR1
);
858 slowPath
.append(stubJit
.branchSubPtr(MacroAssembler::Signed
, MacroAssembler::TrustedImm32(newSize
), scratchGPR1
));
859 stubJit
.storePtr(scratchGPR1
, &copiedAllocator
->m_currentRemaining
);
860 stubJit
.negPtr(scratchGPR1
);
861 stubJit
.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator
->m_currentPayloadEnd
), scratchGPR1
);
862 stubJit
.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue
)), scratchGPR1
);
863 // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
864 for (ptrdiff_t offset
= 0; offset
< static_cast<ptrdiff_t>(oldSize
); offset
+= sizeof(void*)) {
865 stubJit
.loadPtr(MacroAssembler::Address(scratchGPR3
, -(offset
+ sizeof(JSValue
) + sizeof(void*))), scratchGPR2
);
866 stubJit
.storePtr(scratchGPR2
, MacroAssembler::Address(scratchGPR1
, -(offset
+ sizeof(JSValue
) + sizeof(void*))));
870 stubJit
.storePtr(scratchGPR1
, MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()));
871 scratchGPR1HasStorage
= true;
874 stubJit
.storePtr(MacroAssembler::TrustedImmPtr(structure
), MacroAssembler::Address(baseGPR
, JSCell::structureOffset()));
876 if (isInlineOffset(slot
.cachedOffset()))
877 stubJit
.store64(valueGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot
.cachedOffset()) * sizeof(JSValue
)));
879 if (!scratchGPR1HasStorage
)
880 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()), scratchGPR1
);
881 stubJit
.store64(valueGPR
, MacroAssembler::Address(scratchGPR1
, offsetInButterfly(slot
.cachedOffset()) * sizeof(JSValue
)));
883 #elif USE(JSVALUE32_64)
884 if (isInlineOffset(slot
.cachedOffset())) {
885 stubJit
.store32(valueGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)));
886 stubJit
.store32(valueTagGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)));
888 if (!scratchGPR1HasStorage
)
889 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::butterflyOffset()), scratchGPR1
);
890 stubJit
.store32(valueGPR
, MacroAssembler::Address(scratchGPR1
, offsetInButterfly(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)));
891 stubJit
.store32(valueTagGPR
, MacroAssembler::Address(scratchGPR1
, offsetInButterfly(slot
.cachedOffset()) * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)));
895 MacroAssembler::Jump success
;
896 MacroAssembler::Jump failure
;
898 if (allocator
.didReuseRegisters()) {
899 allocator
.restoreReusedRegistersByPopping(stubJit
);
900 success
= stubJit
.jump();
902 failureCases
.link(&stubJit
);
903 allocator
.restoreReusedRegistersByPopping(stubJit
);
904 failure
= stubJit
.jump();
906 success
= stubJit
.jump();
908 MacroAssembler::Call operationCall
;
909 MacroAssembler::Jump successInSlowPath
;
911 if (structure
->outOfLineCapacity() != oldStructure
->outOfLineCapacity()) {
912 slowPath
.link(&stubJit
);
914 allocator
.restoreReusedRegistersByPopping(stubJit
);
915 ScratchBuffer
* scratchBuffer
= vm
->scratchBufferForSize(allocator
.desiredScratchBufferSize());
916 allocator
.preserveUsedRegistersToScratchBuffer(stubJit
, scratchBuffer
, scratchGPR1
);
918 stubJit
.setupArgumentsWithExecState(baseGPR
, MacroAssembler::TrustedImmPtr(structure
), MacroAssembler::TrustedImm32(slot
.cachedOffset()), valueGPR
);
920 stubJit
.setupArgumentsWithExecState(baseGPR
, MacroAssembler::TrustedImmPtr(structure
), MacroAssembler::TrustedImm32(slot
.cachedOffset()), valueGPR
, valueTagGPR
);
922 operationCall
= stubJit
.call();
923 allocator
.restoreUsedRegistersFromScratchBuffer(stubJit
, scratchBuffer
, scratchGPR1
);
924 successInSlowPath
= stubJit
.jump();
927 LinkBuffer
patchBuffer(*vm
, &stubJit
, exec
->codeBlock());
928 patchBuffer
.link(success
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToDone
));
929 if (allocator
.didReuseRegisters())
930 patchBuffer
.link(failure
, failureLabel
);
932 patchBuffer
.link(failureCases
, failureLabel
);
933 if (structure
->outOfLineCapacity() != oldStructure
->outOfLineCapacity()) {
934 patchBuffer
.link(operationCall
, operationReallocateStorageAndFinishPut
);
935 patchBuffer
.link(successInSlowPath
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToDone
));
939 createJITStubRoutine(
942 ("DFG PutById %stransition stub (%p -> %p) for %s, return point %p",
943 structure
->outOfLineCapacity() != oldStructure
->outOfLineCapacity() ? "reallocating " : "",
944 oldStructure
, structure
,
945 toCString(*exec
->codeBlock()).data(), stubInfo
.callReturnLocation
.labelAtOffset(
946 stubInfo
.patch
.dfg
.deltaCallToDone
).executableAddress())),
948 exec
->codeBlock()->ownerExecutable(),
949 structure
->outOfLineCapacity() != oldStructure
->outOfLineCapacity(),
953 static bool tryCachePutByID(ExecState
* exec
, JSValue baseValue
, const Identifier
& ident
, const PutPropertySlot
& slot
, StructureStubInfo
& stubInfo
, PutKind putKind
)
955 CodeBlock
* codeBlock
= exec
->codeBlock();
956 VM
* vm
= &exec
->vm();
958 if (!baseValue
.isCell())
960 JSCell
* baseCell
= baseValue
.asCell();
961 Structure
* structure
= baseCell
->structure();
962 Structure
* oldStructure
= structure
->previousID();
964 if (!slot
.isCacheable())
966 if (structure
->isUncacheableDictionary())
969 // Optimize self access.
970 if (slot
.base() == baseValue
) {
971 if (slot
.type() == PutPropertySlot::NewProperty
) {
972 if (structure
->isDictionary())
975 // Skip optimizing the case where we need a realloc, if we don't have
976 // enough registers to make it happen.
977 if (GPRInfo::numberOfRegisters
< 6
978 && oldStructure
->outOfLineCapacity() != structure
->outOfLineCapacity()
979 && oldStructure
->outOfLineCapacity())
982 // Skip optimizing the case where we need realloc, and the structure has
984 if (hasIndexingHeader(oldStructure
->indexingType()))
987 if (normalizePrototypeChain(exec
, baseCell
) == InvalidPrototypeChain
)
990 StructureChain
* prototypeChain
= structure
->prototypeChain(exec
);
992 emitPutTransitionStub(
993 exec
, baseValue
, ident
, slot
, stubInfo
, putKind
,
994 structure
, oldStructure
, prototypeChain
,
995 stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToSlowCase
),
996 stubInfo
.stubRoutine
);
998 RepatchBuffer
repatchBuffer(codeBlock
);
999 repatchBuffer
.relink(
1000 stubInfo
.callReturnLocation
.jumpAtOffset(
1001 stubInfo
.patch
.dfg
.deltaCallToStructCheck
),
1002 CodeLocationLabel(stubInfo
.stubRoutine
->code().code()));
1003 repatchBuffer
.relink(stubInfo
.callReturnLocation
, appropriateListBuildingPutByIdFunction(slot
, putKind
));
1005 stubInfo
.initPutByIdTransition(*vm
, codeBlock
->ownerExecutable(), oldStructure
, structure
, prototypeChain
, putKind
== Direct
);
1010 if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot
.cachedOffset())))
1013 dfgRepatchByIdSelfAccess(codeBlock
, stubInfo
, structure
, slot
.cachedOffset(), appropriateListBuildingPutByIdFunction(slot
, putKind
), false);
1014 stubInfo
.initPutByIdReplace(*vm
, codeBlock
->ownerExecutable(), structure
);
1021 void dfgRepatchPutByID(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PutPropertySlot
& slot
, StructureStubInfo
& stubInfo
, PutKind putKind
)
1023 bool cached
= tryCachePutByID(exec
, baseValue
, propertyName
, slot
, stubInfo
, putKind
);
1025 dfgRepatchCall(exec
->codeBlock(), stubInfo
.callReturnLocation
, appropriateGenericPutByIdFunction(slot
, putKind
));
1028 static bool tryBuildPutByIdList(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PutPropertySlot
& slot
, StructureStubInfo
& stubInfo
, PutKind putKind
)
1030 CodeBlock
* codeBlock
= exec
->codeBlock();
1031 VM
* vm
= &exec
->vm();
1033 if (!baseValue
.isCell())
1035 JSCell
* baseCell
= baseValue
.asCell();
1036 Structure
* structure
= baseCell
->structure();
1037 Structure
* oldStructure
= structure
->previousID();
1039 if (!slot
.isCacheable())
1041 if (structure
->isUncacheableDictionary())
1044 // Optimize self access.
1045 if (slot
.base() == baseValue
) {
1046 PolymorphicPutByIdList
* list
;
1047 RefPtr
<JITStubRoutine
> stubRoutine
;
1049 if (slot
.type() == PutPropertySlot::NewProperty
) {
1050 if (structure
->isDictionary())
1053 // Skip optimizing the case where we need a realloc, if we don't have
1054 // enough registers to make it happen.
1055 if (GPRInfo::numberOfRegisters
< 6
1056 && oldStructure
->outOfLineCapacity() != structure
->outOfLineCapacity()
1057 && oldStructure
->outOfLineCapacity())
1060 // Skip optimizing the case where we need realloc, and the structure has
1061 // indexing storage.
1062 if (hasIndexingHeader(oldStructure
->indexingType()))
1065 if (normalizePrototypeChain(exec
, baseCell
) == InvalidPrototypeChain
)
1068 StructureChain
* prototypeChain
= structure
->prototypeChain(exec
);
1070 // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1071 list
= PolymorphicPutByIdList::from(
1073 stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToSlowCase
));
1075 emitPutTransitionStub(
1076 exec
, baseValue
, propertyName
, slot
, stubInfo
, putKind
,
1077 structure
, oldStructure
, prototypeChain
,
1078 CodeLocationLabel(list
->currentSlowPathTarget()),
1082 PutByIdAccess::transition(
1083 *vm
, codeBlock
->ownerExecutable(),
1084 oldStructure
, structure
, prototypeChain
,
1087 // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1088 list
= PolymorphicPutByIdList::from(
1090 stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToSlowCase
));
1093 exec
, baseValue
, propertyName
, slot
, stubInfo
, putKind
,
1094 structure
, CodeLocationLabel(list
->currentSlowPathTarget()), stubRoutine
);
1097 PutByIdAccess::replace(
1098 *vm
, codeBlock
->ownerExecutable(),
1099 structure
, stubRoutine
));
1102 RepatchBuffer
repatchBuffer(codeBlock
);
1103 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.dfg
.deltaCallToStructCheck
), CodeLocationLabel(stubRoutine
->code().code()));
1106 repatchBuffer
.relink(stubInfo
.callReturnLocation
, appropriateGenericPutByIdFunction(slot
, putKind
));
1114 void dfgBuildPutByIdList(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PutPropertySlot
& slot
, StructureStubInfo
& stubInfo
, PutKind putKind
)
1116 bool cached
= tryBuildPutByIdList(exec
, baseValue
, propertyName
, slot
, stubInfo
, putKind
);
1118 dfgRepatchCall(exec
->codeBlock(), stubInfo
.callReturnLocation
, appropriateGenericPutByIdFunction(slot
, putKind
));
1121 static void linkSlowFor(RepatchBuffer
& repatchBuffer
, VM
* vm
, CallLinkInfo
& callLinkInfo
, CodeSpecializationKind kind
)
1123 if (kind
== CodeForCall
) {
1124 repatchBuffer
.relink(callLinkInfo
.callReturnLocation
, vm
->getCTIStub(virtualCallThunkGenerator
).code());
1127 ASSERT(kind
== CodeForConstruct
);
1128 repatchBuffer
.relink(callLinkInfo
.callReturnLocation
, vm
->getCTIStub(virtualConstructThunkGenerator
).code());
1131 void dfgLinkFor(ExecState
* exec
, CallLinkInfo
& callLinkInfo
, CodeBlock
* calleeCodeBlock
, JSFunction
* callee
, MacroAssemblerCodePtr codePtr
, CodeSpecializationKind kind
)
1133 ASSERT(!callLinkInfo
.stub
);
1135 CodeBlock
* callerCodeBlock
= exec
->callerFrame()->codeBlock();
1136 VM
* vm
= callerCodeBlock
->vm();
1138 RepatchBuffer
repatchBuffer(callerCodeBlock
);
1140 ASSERT(!callLinkInfo
.isLinked());
1141 callLinkInfo
.callee
.set(exec
->callerFrame()->vm(), callLinkInfo
.hotPathBegin
, callerCodeBlock
->ownerExecutable(), callee
);
1142 callLinkInfo
.lastSeenCallee
.set(exec
->callerFrame()->vm(), callerCodeBlock
->ownerExecutable(), callee
);
1143 repatchBuffer
.relink(callLinkInfo
.hotPathOther
, codePtr
);
1145 if (calleeCodeBlock
)
1146 calleeCodeBlock
->linkIncomingCall(&callLinkInfo
);
1148 if (kind
== CodeForCall
) {
1149 repatchBuffer
.relink(callLinkInfo
.callReturnLocation
, vm
->getCTIStub(linkClosureCallThunkGenerator
).code());
1153 ASSERT(kind
== CodeForConstruct
);
1154 linkSlowFor(repatchBuffer
, vm
, callLinkInfo
, CodeForConstruct
);
1157 void dfgLinkSlowFor(ExecState
* exec
, CallLinkInfo
& callLinkInfo
, CodeSpecializationKind kind
)
1159 CodeBlock
* callerCodeBlock
= exec
->callerFrame()->codeBlock();
1160 VM
* vm
= callerCodeBlock
->vm();
1162 RepatchBuffer
repatchBuffer(callerCodeBlock
);
1164 linkSlowFor(repatchBuffer
, vm
, callLinkInfo
, kind
);
1167 void dfgLinkClosureCall(ExecState
* exec
, CallLinkInfo
& callLinkInfo
, CodeBlock
* calleeCodeBlock
, Structure
* structure
, ExecutableBase
* executable
, MacroAssemblerCodePtr codePtr
)
1169 ASSERT(!callLinkInfo
.stub
);
1171 CodeBlock
* callerCodeBlock
= exec
->callerFrame()->codeBlock();
1172 VM
* vm
= callerCodeBlock
->vm();
1174 GPRReg calleeGPR
= static_cast<GPRReg
>(callLinkInfo
.calleeGPR
);
1176 CCallHelpers
stubJit(vm
, callerCodeBlock
);
1178 CCallHelpers::JumpList slowPath
;
1182 stubJit
.branchTest64(
1183 CCallHelpers::NonZero
, calleeGPR
, GPRInfo::tagMaskRegister
));
1185 // We would have already checked that the callee is a cell.
1190 CCallHelpers::NotEqual
,
1191 CCallHelpers::Address(calleeGPR
, JSCell::structureOffset()),
1192 CCallHelpers::TrustedImmPtr(structure
)));
1196 CCallHelpers::NotEqual
,
1197 CCallHelpers::Address(calleeGPR
, JSFunction::offsetOfExecutable()),
1198 CCallHelpers::TrustedImmPtr(executable
)));
1201 CCallHelpers::Address(calleeGPR
, JSFunction::offsetOfScopeChain()),
1202 GPRInfo::returnValueGPR
);
1206 GPRInfo::returnValueGPR
,
1207 CCallHelpers::Address(GPRInfo::callFrameRegister
, static_cast<ptrdiff_t>(sizeof(Register
) * JSStack::ScopeChain
)));
1210 GPRInfo::returnValueGPR
,
1211 CCallHelpers::Address(GPRInfo::callFrameRegister
, static_cast<ptrdiff_t>(sizeof(Register
) * JSStack::ScopeChain
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)));
1213 CCallHelpers::TrustedImm32(JSValue::CellTag
),
1214 CCallHelpers::Address(GPRInfo::callFrameRegister
, static_cast<ptrdiff_t>(sizeof(Register
) * JSStack::ScopeChain
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)));
1217 JITCompiler::Call call
= stubJit
.nearCall();
1218 JITCompiler::Jump done
= stubJit
.jump();
1220 slowPath
.link(&stubJit
);
1221 stubJit
.move(calleeGPR
, GPRInfo::nonArgGPR0
);
1222 #if USE(JSVALUE32_64)
1223 stubJit
.move(CCallHelpers::TrustedImm32(JSValue::CellTag
), GPRInfo::nonArgGPR1
);
1225 stubJit
.move(CCallHelpers::TrustedImmPtr(callLinkInfo
.callReturnLocation
.executableAddress()), GPRInfo::nonArgGPR2
);
1226 stubJit
.restoreReturnAddressBeforeReturn(GPRInfo::nonArgGPR2
);
1227 JITCompiler::Jump slow
= stubJit
.jump();
1229 LinkBuffer
patchBuffer(*vm
, &stubJit
, callerCodeBlock
);
1231 patchBuffer
.link(call
, FunctionPtr(codePtr
.executableAddress()));
1232 patchBuffer
.link(done
, callLinkInfo
.callReturnLocation
.labelAtOffset(0));
1233 patchBuffer
.link(slow
, CodeLocationLabel(vm
->getCTIStub(virtualCallThunkGenerator
).code()));
1235 RefPtr
<ClosureCallStubRoutine
> stubRoutine
= adoptRef(new ClosureCallStubRoutine(
1238 ("DFG closure call stub for %s, return point %p, target %p (%s)",
1239 toCString(*callerCodeBlock
).data(), callLinkInfo
.callReturnLocation
.labelAtOffset(0).executableAddress(),
1240 codePtr
.executableAddress(), toCString(pointerDump(calleeCodeBlock
)).data())),
1241 *vm
, callerCodeBlock
->ownerExecutable(), structure
, executable
, callLinkInfo
.codeOrigin
));
1243 RepatchBuffer
repatchBuffer(callerCodeBlock
);
1245 repatchBuffer
.replaceWithJump(
1246 RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo
.hotPathBegin
),
1247 CodeLocationLabel(stubRoutine
->code().code()));
1248 linkSlowFor(repatchBuffer
, vm
, callLinkInfo
, CodeForCall
);
1250 callLinkInfo
.stub
= stubRoutine
.release();
1252 ASSERT(!calleeCodeBlock
|| calleeCodeBlock
->isIncomingCallAlreadyLinked(&callLinkInfo
));
1255 void dfgResetGetByID(RepatchBuffer
& repatchBuffer
, StructureStubInfo
& stubInfo
)
1257 repatchBuffer
.relink(stubInfo
.callReturnLocation
, operationGetByIdOptimize
);
1258 CodeLocationDataLabelPtr structureLabel
= stubInfo
.callReturnLocation
.dataLabelPtrAtOffset(-(intptr_t)stubInfo
.patch
.dfg
.deltaCheckImmToCall
);
1259 if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) {
1260 repatchBuffer
.revertJumpReplacementToPatchableBranchPtrWithPatch(
1261 RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(structureLabel
),
1262 MacroAssembler::Address(
1263 static_cast<MacroAssembler::RegisterID
>(stubInfo
.patch
.dfg
.baseGPR
),
1264 JSCell::structureOffset()),
1265 reinterpret_cast<void*>(unusedPointer
));
1267 repatchBuffer
.repatch(structureLabel
, reinterpret_cast<void*>(unusedPointer
));
1269 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.dfg
.deltaCallToLoadOrStore
), 0);
1271 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.dfg
.deltaCallToTagLoadOrStore
), 0);
1272 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.dfg
.deltaCallToPayloadLoadOrStore
), 0);
1274 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.dfg
.deltaCallToStructCheck
), stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToSlowCase
));
1277 void dfgResetPutByID(RepatchBuffer
& repatchBuffer
, StructureStubInfo
& stubInfo
)
1279 V_DFGOperation_EJCI unoptimizedFunction
= bitwise_cast
<V_DFGOperation_EJCI
>(MacroAssembler::readCallTarget(stubInfo
.callReturnLocation
).executableAddress());
1280 V_DFGOperation_EJCI optimizedFunction
;
1281 if (unoptimizedFunction
== operationPutByIdStrict
|| unoptimizedFunction
== operationPutByIdStrictBuildList
)
1282 optimizedFunction
= operationPutByIdStrictOptimize
;
1283 else if (unoptimizedFunction
== operationPutByIdNonStrict
|| unoptimizedFunction
== operationPutByIdNonStrictBuildList
)
1284 optimizedFunction
= operationPutByIdNonStrictOptimize
;
1285 else if (unoptimizedFunction
== operationPutByIdDirectStrict
|| unoptimizedFunction
== operationPutByIdDirectStrictBuildList
)
1286 optimizedFunction
= operationPutByIdDirectStrictOptimize
;
1288 ASSERT(unoptimizedFunction
== operationPutByIdDirectNonStrict
|| unoptimizedFunction
== operationPutByIdDirectNonStrictBuildList
);
1289 optimizedFunction
= operationPutByIdDirectNonStrictOptimize
;
1291 repatchBuffer
.relink(stubInfo
.callReturnLocation
, optimizedFunction
);
1292 CodeLocationDataLabelPtr structureLabel
= stubInfo
.callReturnLocation
.dataLabelPtrAtOffset(-(intptr_t)stubInfo
.patch
.dfg
.deltaCheckImmToCall
);
1293 if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) {
1294 repatchBuffer
.revertJumpReplacementToPatchableBranchPtrWithPatch(
1295 RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(structureLabel
),
1296 MacroAssembler::Address(
1297 static_cast<MacroAssembler::RegisterID
>(stubInfo
.patch
.dfg
.baseGPR
),
1298 JSCell::structureOffset()),
1299 reinterpret_cast<void*>(unusedPointer
));
1301 repatchBuffer
.repatch(structureLabel
, reinterpret_cast<void*>(unusedPointer
));
1303 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.dfg
.deltaCallToLoadOrStore
), 0);
1305 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.dfg
.deltaCallToTagLoadOrStore
), 0);
1306 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.dfg
.deltaCallToPayloadLoadOrStore
), 0);
1308 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.dfg
.deltaCallToStructCheck
), stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToSlowCase
));
1311 } } // namespace JSC::DFG