2 * Copyright (C) 2011 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 #include "DFGRepatch.h"
31 #include "DFGCCallHelpers.h"
32 #include "DFGSpeculativeJIT.h"
33 #include "LinkBuffer.h"
34 #include "Operations.h"
35 #include "PolymorphicPutByIdList.h"
36 #include "RepatchBuffer.h"
38 namespace JSC
{ namespace DFG
{
40 static void dfgRepatchCall(CodeBlock
* codeblock
, CodeLocationCall call
, FunctionPtr newCalleeFunction
)
42 RepatchBuffer
repatchBuffer(codeblock
);
43 repatchBuffer
.relink(call
, newCalleeFunction
);
46 static void dfgRepatchByIdSelfAccess(CodeBlock
* codeBlock
, StructureStubInfo
& stubInfo
, Structure
* structure
, size_t offset
, const FunctionPtr
&slowPathFunction
, bool compact
)
48 RepatchBuffer
repatchBuffer(codeBlock
);
50 // Only optimize once!
51 repatchBuffer
.relink(stubInfo
.callReturnLocation
, slowPathFunction
);
53 // Patch the structure check & the offset of the load.
54 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelPtrAtOffset(-(intptr_t)stubInfo
.patch
.dfg
.deltaCheckImmToCall
), structure
);
57 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.dfg
.deltaCallToLoadOrStore
), sizeof(JSValue
) * offset
);
59 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.dfg
.deltaCallToLoadOrStore
), sizeof(JSValue
) * offset
);
60 #elif USE(JSVALUE32_64)
62 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.dfg
.deltaCallToTagLoadOrStore
), sizeof(JSValue
) * offset
+ OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
));
63 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.dfg
.deltaCallToPayloadLoadOrStore
), sizeof(JSValue
) * offset
+ OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
));
65 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.dfg
.deltaCallToTagLoadOrStore
), sizeof(JSValue
) * offset
+ OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
));
66 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.dfg
.deltaCallToPayloadLoadOrStore
), sizeof(JSValue
) * offset
+ OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
));
71 static void emitRestoreScratch(MacroAssembler
& stubJit
, bool needToRestoreScratch
, GPRReg scratchGPR
, MacroAssembler::Jump
& success
, MacroAssembler::Jump
& fail
, MacroAssembler::JumpList failureCases
)
73 if (needToRestoreScratch
) {
74 stubJit
.pop(scratchGPR
);
76 success
= stubJit
.jump();
78 // link failure cases here, so we can pop scratchGPR, and then jump back.
79 failureCases
.link(&stubJit
);
81 stubJit
.pop(scratchGPR
);
83 fail
= stubJit
.jump();
87 success
= stubJit
.jump();
90 static void linkRestoreScratch(LinkBuffer
& patchBuffer
, bool needToRestoreScratch
, MacroAssembler::Jump success
, MacroAssembler::Jump fail
, MacroAssembler::JumpList failureCases
, CodeLocationLabel successLabel
, CodeLocationLabel slowCaseBegin
)
92 patchBuffer
.link(success
, successLabel
);
94 if (needToRestoreScratch
) {
95 patchBuffer
.link(fail
, slowCaseBegin
);
99 // link failure cases directly back to normal path
100 patchBuffer
.link(failureCases
, slowCaseBegin
);
103 static void linkRestoreScratch(LinkBuffer
& patchBuffer
, bool needToRestoreScratch
, StructureStubInfo
& stubInfo
, MacroAssembler::Jump success
, MacroAssembler::Jump fail
, MacroAssembler::JumpList failureCases
)
105 linkRestoreScratch(patchBuffer
, needToRestoreScratch
, success
, fail
, failureCases
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToDone
), stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToSlowCase
));
108 static void generateProtoChainAccessStub(ExecState
* exec
, StructureStubInfo
& stubInfo
, StructureChain
* chain
, size_t count
, size_t offset
, Structure
* structure
, CodeLocationLabel successLabel
, CodeLocationLabel slowCaseLabel
, MacroAssemblerCodeRef
& stubRoutine
)
110 JSGlobalData
* globalData
= &exec
->globalData();
112 MacroAssembler stubJit
;
114 GPRReg baseGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.baseGPR
);
115 #if USE(JSVALUE32_64)
116 GPRReg resultTagGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.valueTagGPR
);
118 GPRReg resultGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.valueGPR
);
119 GPRReg scratchGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.scratchGPR
);
120 bool needToRestoreScratch
= false;
122 if (scratchGPR
== InvalidGPRReg
) {
123 scratchGPR
= SpeculativeJIT::selectScratchGPR(baseGPR
, resultGPR
);
124 stubJit
.push(scratchGPR
);
125 needToRestoreScratch
= true;
128 MacroAssembler::JumpList failureCases
;
130 failureCases
.append(stubJit
.branchPtr(MacroAssembler::NotEqual
, MacroAssembler::Address(baseGPR
, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure
)));
132 Structure
* currStructure
= structure
;
133 WriteBarrier
<Structure
>* it
= chain
->head();
134 JSObject
* protoObject
= 0;
135 for (unsigned i
= 0; i
< count
; ++i
, ++it
) {
136 protoObject
= asObject(currStructure
->prototypeForLookup(exec
));
137 stubJit
.move(MacroAssembler::TrustedImmPtr(protoObject
), scratchGPR
);
138 failureCases
.append(stubJit
.branchPtr(MacroAssembler::NotEqual
, MacroAssembler::Address(scratchGPR
, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(protoObject
->structure())));
139 currStructure
= it
->get();
142 stubJit
.loadPtr(protoObject
->addressOfPropertyStorage(), resultGPR
);
144 stubJit
.loadPtr(MacroAssembler::Address(resultGPR
, offset
* sizeof(WriteBarrier
<Unknown
>)), resultGPR
);
145 #elif USE(JSVALUE32_64)
146 stubJit
.load32(MacroAssembler::Address(resultGPR
, offset
* sizeof(WriteBarrier
<Unknown
>) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)), resultTagGPR
);
147 stubJit
.load32(MacroAssembler::Address(resultGPR
, offset
* sizeof(WriteBarrier
<Unknown
>) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)), resultGPR
);
150 MacroAssembler::Jump success
, fail
;
152 emitRestoreScratch(stubJit
, needToRestoreScratch
, scratchGPR
, success
, fail
, failureCases
);
154 LinkBuffer
patchBuffer(*globalData
, &stubJit
, exec
->codeBlock());
156 linkRestoreScratch(patchBuffer
, needToRestoreScratch
, success
, fail
, failureCases
, successLabel
, slowCaseLabel
);
158 stubRoutine
= patchBuffer
.finalizeCode();
161 static bool tryCacheGetByID(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
163 // FIXME: Write a test that proves we need to check for recursion here just
164 // like the interpreter does, then add a check for recursion.
166 CodeBlock
* codeBlock
= exec
->codeBlock();
167 JSGlobalData
* globalData
= &exec
->globalData();
169 if (isJSArray(baseValue
) && propertyName
== exec
->propertyNames().length
) {
170 GPRReg baseGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.baseGPR
);
171 #if USE(JSVALUE32_64)
172 GPRReg resultTagGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.valueTagGPR
);
174 GPRReg resultGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.valueGPR
);
175 GPRReg scratchGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.scratchGPR
);
176 bool needToRestoreScratch
= false;
178 MacroAssembler stubJit
;
180 if (scratchGPR
== InvalidGPRReg
) {
181 scratchGPR
= SpeculativeJIT::selectScratchGPR(baseGPR
, resultGPR
);
182 stubJit
.push(scratchGPR
);
183 needToRestoreScratch
= true;
186 MacroAssembler::JumpList failureCases
;
188 failureCases
.append(stubJit
.branchPtr(MacroAssembler::NotEqual
, MacroAssembler::Address(baseGPR
, JSCell::classInfoOffset()), MacroAssembler::TrustedImmPtr(&JSArray::s_info
)));
190 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSArray::storageOffset()), scratchGPR
);
191 stubJit
.load32(MacroAssembler::Address(scratchGPR
, OBJECT_OFFSETOF(ArrayStorage
, m_length
)), scratchGPR
);
192 failureCases
.append(stubJit
.branch32(MacroAssembler::LessThan
, scratchGPR
, MacroAssembler::TrustedImm32(0)));
195 stubJit
.orPtr(GPRInfo::tagTypeNumberRegister
, scratchGPR
, resultGPR
);
196 #elif USE(JSVALUE32_64)
197 stubJit
.move(scratchGPR
, resultGPR
);
198 stubJit
.move(JITCompiler::TrustedImm32(0xffffffff), resultTagGPR
); // JSValue::Int32Tag
201 MacroAssembler::Jump success
, fail
;
203 emitRestoreScratch(stubJit
, needToRestoreScratch
, scratchGPR
, success
, fail
, failureCases
);
205 LinkBuffer
patchBuffer(*globalData
, &stubJit
, codeBlock
);
207 linkRestoreScratch(patchBuffer
, needToRestoreScratch
, stubInfo
, success
, fail
, failureCases
);
209 stubInfo
.stubRoutine
= patchBuffer
.finalizeCode();
211 RepatchBuffer
repatchBuffer(codeBlock
);
212 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.dfg
.deltaCallToStructCheck
), CodeLocationLabel(stubInfo
.stubRoutine
.code()));
213 repatchBuffer
.relink(stubInfo
.callReturnLocation
, operationGetById
);
218 // FIXME: should support length access for String.
220 // FIXME: Cache property access for immediates.
221 if (!baseValue
.isCell())
223 JSCell
* baseCell
= baseValue
.asCell();
224 Structure
* structure
= baseCell
->structure();
225 if (!slot
.isCacheable())
227 if (structure
->isUncacheableDictionary() || structure
->typeInfo().prohibitsPropertyCaching())
230 // Optimize self access.
231 if (slot
.slotBase() == baseValue
) {
232 if ((slot
.cachedPropertyType() != PropertySlot::Value
) || ((slot
.cachedOffset() * sizeof(JSValue
)) > (unsigned)MacroAssembler::MaximumCompactPtrAlignedAddressOffset
)) {
233 dfgRepatchCall(codeBlock
, stubInfo
.callReturnLocation
, operationGetByIdBuildList
);
237 dfgRepatchByIdSelfAccess(codeBlock
, stubInfo
, structure
, slot
.cachedOffset(), operationGetByIdBuildList
, true);
238 stubInfo
.initGetByIdSelf(*globalData
, codeBlock
->ownerExecutable(), structure
);
242 if (structure
->isDictionary())
245 // FIXME: optimize getters and setters
246 if (slot
.cachedPropertyType() != PropertySlot::Value
)
249 size_t offset
= slot
.cachedOffset();
250 size_t count
= normalizePrototypeChain(exec
, baseValue
, slot
.slotBase(), propertyName
, offset
);
254 StructureChain
* prototypeChain
= structure
->prototypeChain(exec
);
256 ASSERT(slot
.slotBase().isObject());
258 generateProtoChainAccessStub(exec
, stubInfo
, prototypeChain
, count
, offset
, structure
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToDone
), stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToSlowCase
), stubInfo
.stubRoutine
);
260 RepatchBuffer
repatchBuffer(codeBlock
);
261 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.dfg
.deltaCallToStructCheck
), CodeLocationLabel(stubInfo
.stubRoutine
.code()));
262 repatchBuffer
.relink(stubInfo
.callReturnLocation
, operationGetByIdProtoBuildList
);
264 stubInfo
.initGetByIdChain(*globalData
, codeBlock
->ownerExecutable(), structure
, prototypeChain
);
268 void dfgRepatchGetByID(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
270 bool cached
= tryCacheGetByID(exec
, baseValue
, propertyName
, slot
, stubInfo
);
272 dfgRepatchCall(exec
->codeBlock(), stubInfo
.callReturnLocation
, operationGetById
);
275 static bool tryBuildGetByIDList(ExecState
* exec
, JSValue baseValue
, const Identifier
& ident
, const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
277 if (!baseValue
.isCell()
278 || !slot
.isCacheable()
279 || baseValue
.asCell()->structure()->isUncacheableDictionary()
280 || slot
.slotBase() != baseValue
)
283 if (!stubInfo
.patch
.dfg
.registersFlushed
) {
284 // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
285 // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
286 // if registers were not flushed, don't do non-Value caching.
287 if (slot
.cachedPropertyType() != PropertySlot::Value
)
291 CodeBlock
* codeBlock
= exec
->codeBlock();
292 JSCell
* baseCell
= baseValue
.asCell();
293 Structure
* structure
= baseCell
->structure();
294 JSGlobalData
* globalData
= &exec
->globalData();
296 ASSERT(slot
.slotBase().isObject());
298 PolymorphicAccessStructureList
* polymorphicStructureList
;
301 if (stubInfo
.accessType
== access_unset
) {
302 ASSERT(!stubInfo
.stubRoutine
);
303 polymorphicStructureList
= new PolymorphicAccessStructureList();
304 stubInfo
.initGetByIdSelfList(polymorphicStructureList
, 0);
306 } else if (stubInfo
.accessType
== access_get_by_id_self
) {
307 ASSERT(!stubInfo
.stubRoutine
);
308 polymorphicStructureList
= new PolymorphicAccessStructureList(*globalData
, codeBlock
->ownerExecutable(), MacroAssemblerCodeRef::createSelfManagedCodeRef(stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToSlowCase
)), stubInfo
.u
.getByIdSelf
.baseObjectStructure
.get(), true);
309 stubInfo
.initGetByIdSelfList(polymorphicStructureList
, 1);
312 polymorphicStructureList
= stubInfo
.u
.getByIdSelfList
.structureList
;
313 listIndex
= stubInfo
.u
.getByIdSelfList
.listSize
;
316 if (listIndex
< POLYMORPHIC_LIST_CACHE_SIZE
) {
317 stubInfo
.u
.getByIdSelfList
.listSize
++;
319 GPRReg baseGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.baseGPR
);
320 #if USE(JSVALUE32_64)
321 GPRReg resultTagGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.valueTagGPR
);
323 GPRReg resultGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.valueGPR
);
324 GPRReg scratchGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.scratchGPR
);
326 CCallHelpers
stubJit(globalData
, codeBlock
);
328 MacroAssembler::Jump wrongStruct
= stubJit
.branchPtr(MacroAssembler::NotEqual
, MacroAssembler::Address(baseGPR
, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure
));
330 // The strategy we use for stubs is as follows:
331 // 1) Call DFG helper that calls the getter.
332 // 2) Check if there was an exception, and if there was, call yet another
335 bool isDirect
= false;
336 MacroAssembler::Call operationCall
;
337 MacroAssembler::Call handlerCall
;
338 FunctionPtr operationFunction
;
339 MacroAssembler::Jump success
;
341 if (slot
.cachedPropertyType() == PropertySlot::Getter
342 || slot
.cachedPropertyType() == PropertySlot::Custom
) {
343 if (slot
.cachedPropertyType() == PropertySlot::Getter
) {
344 ASSERT(baseGPR
!= scratchGPR
);
345 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::offsetOfPropertyStorage()), scratchGPR
);
347 stubJit
.loadPtr(MacroAssembler::Address(scratchGPR
, slot
.cachedOffset() * sizeof(JSValue
)), scratchGPR
);
348 #elif USE(JSVALUE32_64)
349 stubJit
.load32(MacroAssembler::Address(scratchGPR
, slot
.cachedOffset() * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)), scratchGPR
);
351 stubJit
.setupArgumentsWithExecState(baseGPR
, scratchGPR
);
352 operationFunction
= operationCallGetter
;
354 stubJit
.setupArgumentsWithExecState(
356 MacroAssembler::TrustedImmPtr(FunctionPtr(slot
.customGetter()).executableAddress()),
357 MacroAssembler::TrustedImmPtr(const_cast<Identifier
*>(&ident
)));
358 operationFunction
= operationCallCustomGetter
;
361 // Need to make sure that whenever this call is made in the future, we remember the
362 // place that we made it from. It just so happens to be the place that we are at
365 MacroAssembler::TrustedImm32(exec
->codeOriginIndexForDFG()),
366 CCallHelpers::tagFor(static_cast<VirtualRegister
>(RegisterFile::ArgumentCount
)));
368 operationCall
= stubJit
.call();
370 stubJit
.move(GPRInfo::returnValueGPR
, resultGPR
);
372 stubJit
.setupResults(resultGPR
, resultTagGPR
);
374 success
= stubJit
.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck
);
376 stubJit
.setupArgumentsWithExecState(
377 MacroAssembler::TrustedImmPtr(&stubInfo
));
378 handlerCall
= stubJit
.call();
379 stubJit
.jump(GPRInfo::returnValueGPR2
);
381 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::offsetOfPropertyStorage()), resultGPR
);
383 stubJit
.loadPtr(MacroAssembler::Address(resultGPR
, slot
.cachedOffset() * sizeof(JSValue
)), resultGPR
);
384 #elif USE(JSVALUE32_64)
385 stubJit
.load32(MacroAssembler::Address(resultGPR
, slot
.cachedOffset() * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)), resultTagGPR
);
386 stubJit
.load32(MacroAssembler::Address(resultGPR
, slot
.cachedOffset() * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)), resultGPR
);
388 success
= stubJit
.jump();
392 LinkBuffer
patchBuffer(*globalData
, &stubJit
, codeBlock
);
394 CodeLocationLabel lastProtoBegin
;
396 lastProtoBegin
= CodeLocationLabel(polymorphicStructureList
->list
[listIndex
- 1].stubRoutine
.code());
398 lastProtoBegin
= stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToSlowCase
);
399 ASSERT(!!lastProtoBegin
);
401 patchBuffer
.link(wrongStruct
, lastProtoBegin
);
402 patchBuffer
.link(success
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToDone
));
404 patchBuffer
.link(operationCall
, operationFunction
);
405 patchBuffer
.link(handlerCall
, lookupExceptionHandlerInStub
);
408 MacroAssemblerCodeRef stubRoutine
= patchBuffer
.finalizeCode();
410 polymorphicStructureList
->list
[listIndex
].set(*globalData
, codeBlock
->ownerExecutable(), stubRoutine
, structure
, isDirect
);
412 CodeLocationJump jumpLocation
= stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.dfg
.deltaCallToStructCheck
);
413 RepatchBuffer
repatchBuffer(codeBlock
);
414 repatchBuffer
.relink(jumpLocation
, CodeLocationLabel(stubRoutine
.code()));
416 if (listIndex
< (POLYMORPHIC_LIST_CACHE_SIZE
- 1))
423 void dfgBuildGetByIDList(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
425 bool dontChangeCall
= tryBuildGetByIDList(exec
, baseValue
, propertyName
, slot
, stubInfo
);
427 dfgRepatchCall(exec
->codeBlock(), stubInfo
.callReturnLocation
, operationGetById
);
430 static bool tryBuildGetByIDProtoList(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
432 if (!baseValue
.isCell()
433 || !slot
.isCacheable()
434 || baseValue
.asCell()->structure()->isDictionary()
435 || baseValue
.asCell()->structure()->typeInfo().prohibitsPropertyCaching()
436 || slot
.slotBase() == baseValue
437 || slot
.cachedPropertyType() != PropertySlot::Value
)
440 ASSERT(slot
.slotBase().isObject());
442 size_t offset
= slot
.cachedOffset();
443 size_t count
= normalizePrototypeChain(exec
, baseValue
, slot
.slotBase(), propertyName
, offset
);
447 Structure
* structure
= baseValue
.asCell()->structure();
448 StructureChain
* prototypeChain
= structure
->prototypeChain(exec
);
449 CodeBlock
* codeBlock
= exec
->codeBlock();
450 JSGlobalData
* globalData
= &exec
->globalData();
452 PolymorphicAccessStructureList
* polymorphicStructureList
;
455 if (stubInfo
.accessType
== access_get_by_id_chain
) {
456 ASSERT(!!stubInfo
.stubRoutine
);
457 polymorphicStructureList
= new PolymorphicAccessStructureList(*globalData
, codeBlock
->ownerExecutable(), stubInfo
.stubRoutine
, stubInfo
.u
.getByIdChain
.baseObjectStructure
.get(), stubInfo
.u
.getByIdChain
.chain
.get(), true);
458 stubInfo
.stubRoutine
= MacroAssemblerCodeRef();
459 stubInfo
.initGetByIdProtoList(polymorphicStructureList
, 1);
461 ASSERT(stubInfo
.accessType
== access_get_by_id_proto_list
);
462 polymorphicStructureList
= stubInfo
.u
.getByIdProtoList
.structureList
;
463 listIndex
= stubInfo
.u
.getByIdProtoList
.listSize
;
466 if (listIndex
< POLYMORPHIC_LIST_CACHE_SIZE
) {
467 stubInfo
.u
.getByIdProtoList
.listSize
++;
469 CodeLocationLabel lastProtoBegin
= CodeLocationLabel(polymorphicStructureList
->list
[listIndex
- 1].stubRoutine
.code());
470 ASSERT(!!lastProtoBegin
);
472 MacroAssemblerCodeRef stubRoutine
;
474 generateProtoChainAccessStub(exec
, stubInfo
, prototypeChain
, count
, offset
, structure
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToDone
), lastProtoBegin
, stubRoutine
);
476 polymorphicStructureList
->list
[listIndex
].set(*globalData
, codeBlock
->ownerExecutable(), stubRoutine
, structure
, true);
478 CodeLocationJump jumpLocation
= stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.dfg
.deltaCallToStructCheck
);
479 RepatchBuffer
repatchBuffer(codeBlock
);
480 repatchBuffer
.relink(jumpLocation
, CodeLocationLabel(stubRoutine
.code()));
482 if (listIndex
< (POLYMORPHIC_LIST_CACHE_SIZE
- 1))
489 void dfgBuildGetByIDProtoList(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PropertySlot
& slot
, StructureStubInfo
& stubInfo
)
491 bool dontChangeCall
= tryBuildGetByIDProtoList(exec
, baseValue
, propertyName
, slot
, stubInfo
);
493 dfgRepatchCall(exec
->codeBlock(), stubInfo
.callReturnLocation
, operationGetById
);
496 static V_DFGOperation_EJCI
appropriateGenericPutByIdFunction(const PutPropertySlot
&slot
, PutKind putKind
)
498 if (slot
.isStrictMode()) {
499 if (putKind
== Direct
)
500 return operationPutByIdDirectStrict
;
501 return operationPutByIdStrict
;
503 if (putKind
== Direct
)
504 return operationPutByIdDirectNonStrict
;
505 return operationPutByIdNonStrict
;
508 static V_DFGOperation_EJCI
appropriateListBuildingPutByIdFunction(const PutPropertySlot
&slot
, PutKind putKind
)
510 if (slot
.isStrictMode()) {
511 if (putKind
== Direct
)
512 return operationPutByIdDirectStrictBuildList
;
513 return operationPutByIdStrictBuildList
;
515 if (putKind
== Direct
)
516 return operationPutByIdDirectNonStrictBuildList
;
517 return operationPutByIdNonStrictBuildList
;
520 static void testPrototype(MacroAssembler
&stubJit
, GPRReg scratchGPR
, JSValue prototype
, MacroAssembler::JumpList
& failureCases
)
522 if (prototype
.isNull())
525 ASSERT(prototype
.isCell());
527 stubJit
.move(MacroAssembler::TrustedImmPtr(prototype
.asCell()), scratchGPR
);
528 failureCases
.append(stubJit
.branchPtr(MacroAssembler::NotEqual
, MacroAssembler::Address(scratchGPR
, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(prototype
.asCell()->structure())));
531 static void emitPutReplaceStub(
535 const PutPropertySlot
& slot
,
536 StructureStubInfo
& stubInfo
,
538 Structure
* structure
,
539 CodeLocationLabel failureLabel
,
540 MacroAssemblerCodeRef
& stubRoutine
)
542 JSGlobalData
* globalData
= &exec
->globalData();
543 GPRReg baseGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.baseGPR
);
544 #if USE(JSVALUE32_64)
545 GPRReg valueTagGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.valueTagGPR
);
547 GPRReg valueGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.valueGPR
);
548 GPRReg scratchGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.scratchGPR
);
549 bool needToRestoreScratch
= false;
550 #if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
552 const bool writeBarrierNeeded
= true;
554 const bool writeBarrierNeeded
= false;
557 MacroAssembler stubJit
;
559 if (scratchGPR
== InvalidGPRReg
&& (writeBarrierNeeded
|| !structure
->isUsingInlineStorage())) {
560 scratchGPR
= SpeculativeJIT::selectScratchGPR(baseGPR
, valueGPR
);
561 needToRestoreScratch
= true;
562 stubJit
.push(scratchGPR
);
565 MacroAssembler::Jump badStructure
= stubJit
.branchPtr(
566 MacroAssembler::NotEqual
,
567 MacroAssembler::Address(baseGPR
, JSCell::structureOffset()),
568 MacroAssembler::TrustedImmPtr(structure
));
570 #if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
571 scratchGPR2
= SpeculativeJIT::selectScratchGPR(baseGPR
, valueGPR
, scratchGPR
);
572 stubJit
.push(scratchGPR2
);
573 SpeculativeJIT::writeBarrier(stubJit
, baseGPR
, scratchGPR
, scratchGPR2
, WriteBarrierForPropertyAccess
);
574 stubJit
.pop(scratchGPR2
);
578 if (structure
->isUsingInlineStorage())
579 stubJit
.storePtr(valueGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + slot
.cachedOffset() * sizeof(JSValue
)));
581 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::offsetOfPropertyStorage()), scratchGPR
);
582 stubJit
.storePtr(valueGPR
, MacroAssembler::Address(scratchGPR
, slot
.cachedOffset() * sizeof(JSValue
)));
584 #elif USE(JSVALUE32_64)
585 if (structure
->isUsingInlineStorage()) {
586 stubJit
.store32(valueGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + slot
.cachedOffset() * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)));
587 stubJit
.store32(valueTagGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + slot
.cachedOffset() * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)));
589 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::offsetOfPropertyStorage()), scratchGPR
);
590 stubJit
.store32(valueGPR
, MacroAssembler::Address(scratchGPR
, slot
.cachedOffset() * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)));
591 stubJit
.store32(valueTagGPR
, MacroAssembler::Address(scratchGPR
, slot
.cachedOffset() * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)));
595 MacroAssembler::Jump success
;
596 MacroAssembler::Jump failure
;
598 if (needToRestoreScratch
) {
599 stubJit
.pop(scratchGPR
);
600 success
= stubJit
.jump();
602 badStructure
.link(&stubJit
);
603 stubJit
.pop(scratchGPR
);
604 failure
= stubJit
.jump();
606 success
= stubJit
.jump();
607 failure
= badStructure
;
610 LinkBuffer
patchBuffer(*globalData
, &stubJit
, exec
->codeBlock());
611 patchBuffer
.link(success
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToDone
));
612 patchBuffer
.link(failure
, failureLabel
);
614 stubRoutine
= patchBuffer
.finalizeCode();
617 static void emitPutTransitionStub(
621 const PutPropertySlot
& slot
,
622 StructureStubInfo
& stubInfo
,
624 Structure
* structure
,
625 Structure
* oldStructure
,
626 StructureChain
* prototypeChain
,
627 CodeLocationLabel failureLabel
,
628 MacroAssemblerCodeRef
& stubRoutine
)
630 JSGlobalData
* globalData
= &exec
->globalData();
632 GPRReg baseGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.baseGPR
);
633 #if USE(JSVALUE32_64)
634 GPRReg valueTagGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.valueTagGPR
);
636 GPRReg valueGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.valueGPR
);
637 GPRReg scratchGPR
= static_cast<GPRReg
>(stubInfo
.patch
.dfg
.scratchGPR
);
638 bool needToRestoreScratch
= false;
640 ASSERT(scratchGPR
!= baseGPR
);
642 MacroAssembler stubJit
;
644 MacroAssembler::JumpList failureCases
;
646 if (scratchGPR
== InvalidGPRReg
) {
647 scratchGPR
= SpeculativeJIT::selectScratchGPR(baseGPR
, valueGPR
);
648 stubJit
.push(scratchGPR
);
649 needToRestoreScratch
= true;
652 failureCases
.append(stubJit
.branchPtr(MacroAssembler::NotEqual
, MacroAssembler::Address(baseGPR
, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(oldStructure
)));
654 testPrototype(stubJit
, scratchGPR
, oldStructure
->storedPrototype(), failureCases
);
656 if (putKind
== NotDirect
) {
657 for (WriteBarrier
<Structure
>* it
= prototypeChain
->head(); *it
; ++it
)
658 testPrototype(stubJit
, scratchGPR
, (*it
)->storedPrototype(), failureCases
);
661 #if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
662 // Must always emit this write barrier as the structure transition itself requires it
663 GPRReg scratch2
= SpeculativeJIT::selectScratchGPR(baseGPR
, valueGPR
, scratchGPR
);
664 stubJit
.push(scratch2
);
665 SpeculativeJIT::writeBarrier(stubJit
, baseGPR
, scratchGPR
, scratch2
, WriteBarrierForPropertyAccess
);
666 stubJit
.pop(scratch2
);
669 stubJit
.storePtr(MacroAssembler::TrustedImmPtr(structure
), MacroAssembler::Address(baseGPR
, JSCell::structureOffset()));
671 if (structure
->isUsingInlineStorage())
672 stubJit
.storePtr(valueGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + slot
.cachedOffset() * sizeof(JSValue
)));
674 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::offsetOfPropertyStorage()), scratchGPR
);
675 stubJit
.storePtr(valueGPR
, MacroAssembler::Address(scratchGPR
, slot
.cachedOffset() * sizeof(JSValue
)));
677 #elif USE(JSVALUE32_64)
678 if (structure
->isUsingInlineStorage()) {
679 stubJit
.store32(valueGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + slot
.cachedOffset() * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)));
680 stubJit
.store32(valueTagGPR
, MacroAssembler::Address(baseGPR
, JSObject::offsetOfInlineStorage() + slot
.cachedOffset() * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)));
682 stubJit
.loadPtr(MacroAssembler::Address(baseGPR
, JSObject::offsetOfPropertyStorage()), scratchGPR
);
683 stubJit
.store32(valueGPR
, MacroAssembler::Address(scratchGPR
, slot
.cachedOffset() * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.payload
)));
684 stubJit
.store32(valueTagGPR
, MacroAssembler::Address(scratchGPR
, slot
.cachedOffset() * sizeof(JSValue
) + OBJECT_OFFSETOF(EncodedValueDescriptor
, asBits
.tag
)));
688 MacroAssembler::Jump success
;
689 MacroAssembler::Jump failure
;
691 if (needToRestoreScratch
) {
692 stubJit
.pop(scratchGPR
);
693 success
= stubJit
.jump();
695 failureCases
.link(&stubJit
);
696 stubJit
.pop(scratchGPR
);
697 failure
= stubJit
.jump();
699 success
= stubJit
.jump();
701 LinkBuffer
patchBuffer(*globalData
, &stubJit
, exec
->codeBlock());
702 patchBuffer
.link(success
, stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToDone
));
703 if (needToRestoreScratch
)
704 patchBuffer
.link(failure
, failureLabel
);
706 patchBuffer
.link(failureCases
, failureLabel
);
708 stubRoutine
= patchBuffer
.finalizeCode();
711 static bool tryCachePutByID(ExecState
* exec
, JSValue baseValue
, const Identifier
& ident
, const PutPropertySlot
& slot
, StructureStubInfo
& stubInfo
, PutKind putKind
)
713 CodeBlock
* codeBlock
= exec
->codeBlock();
714 JSGlobalData
* globalData
= &exec
->globalData();
716 if (!baseValue
.isCell())
718 JSCell
* baseCell
= baseValue
.asCell();
719 Structure
* structure
= baseCell
->structure();
720 Structure
* oldStructure
= structure
->previousID();
722 if (!slot
.isCacheable())
724 if (structure
->isUncacheableDictionary())
727 // Optimize self access.
728 if (slot
.base() == baseValue
) {
729 if (slot
.type() == PutPropertySlot::NewProperty
) {
730 if (structure
->isDictionary())
733 // skip optimizing the case where we need a realloc
734 if (oldStructure
->propertyStorageCapacity() != structure
->propertyStorageCapacity())
737 normalizePrototypeChain(exec
, baseCell
);
739 StructureChain
* prototypeChain
= structure
->prototypeChain(exec
);
741 emitPutTransitionStub(
742 exec
, baseValue
, ident
, slot
, stubInfo
, putKind
,
743 structure
, oldStructure
, prototypeChain
,
744 stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToSlowCase
),
745 stubInfo
.stubRoutine
);
747 RepatchBuffer
repatchBuffer(codeBlock
);
748 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.dfg
.deltaCallToStructCheck
), CodeLocationLabel(stubInfo
.stubRoutine
.code()));
749 repatchBuffer
.relink(stubInfo
.callReturnLocation
, appropriateListBuildingPutByIdFunction(slot
, putKind
));
751 stubInfo
.initPutByIdTransition(*globalData
, codeBlock
->ownerExecutable(), oldStructure
, structure
, prototypeChain
, putKind
== Direct
);
756 dfgRepatchByIdSelfAccess(codeBlock
, stubInfo
, structure
, slot
.cachedOffset(), appropriateListBuildingPutByIdFunction(slot
, putKind
), false);
757 stubInfo
.initPutByIdReplace(*globalData
, codeBlock
->ownerExecutable(), structure
);
764 void dfgRepatchPutByID(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PutPropertySlot
& slot
, StructureStubInfo
& stubInfo
, PutKind putKind
)
766 bool cached
= tryCachePutByID(exec
, baseValue
, propertyName
, slot
, stubInfo
, putKind
);
768 dfgRepatchCall(exec
->codeBlock(), stubInfo
.callReturnLocation
, appropriateGenericPutByIdFunction(slot
, putKind
));
771 static bool tryBuildPutByIdList(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PutPropertySlot
& slot
, StructureStubInfo
& stubInfo
, PutKind putKind
)
773 CodeBlock
* codeBlock
= exec
->codeBlock();
774 JSGlobalData
* globalData
= &exec
->globalData();
776 if (!baseValue
.isCell())
778 JSCell
* baseCell
= baseValue
.asCell();
779 Structure
* structure
= baseCell
->structure();
780 Structure
* oldStructure
= structure
->previousID();
782 if (!slot
.isCacheable())
784 if (structure
->isUncacheableDictionary())
787 // Optimize self access.
788 if (slot
.base() == baseValue
) {
789 PolymorphicPutByIdList
* list
;
790 MacroAssemblerCodeRef stubRoutine
;
792 if (slot
.type() == PutPropertySlot::NewProperty
) {
793 if (structure
->isDictionary())
796 // skip optimizing the case where we need a realloc
797 if (oldStructure
->propertyStorageCapacity() != structure
->propertyStorageCapacity())
800 normalizePrototypeChain(exec
, baseCell
);
802 StructureChain
* prototypeChain
= structure
->prototypeChain(exec
);
804 // We're now committed to creating the stub. Mogrify the meta-data accordingly.
805 list
= PolymorphicPutByIdList::from(
807 stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToSlowCase
));
809 emitPutTransitionStub(
810 exec
, baseValue
, propertyName
, slot
, stubInfo
, putKind
,
811 structure
, oldStructure
, prototypeChain
,
812 CodeLocationLabel(list
->currentSlowPathTarget()),
816 PutByIdAccess::transition(
817 *globalData
, codeBlock
->ownerExecutable(),
818 oldStructure
, structure
, prototypeChain
,
821 // We're now committed to creating the stub. Mogrify the meta-data accordingly.
822 list
= PolymorphicPutByIdList::from(
824 stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToSlowCase
));
827 exec
, baseValue
, propertyName
, slot
, stubInfo
, putKind
,
828 structure
, CodeLocationLabel(list
->currentSlowPathTarget()), stubRoutine
);
831 PutByIdAccess::replace(
832 *globalData
, codeBlock
->ownerExecutable(),
833 structure
, stubRoutine
));
836 RepatchBuffer
repatchBuffer(codeBlock
);
837 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.dfg
.deltaCallToStructCheck
), CodeLocationLabel(stubRoutine
.code()));
840 repatchBuffer
.relink(stubInfo
.callReturnLocation
, appropriateGenericPutByIdFunction(slot
, putKind
));
848 void dfgBuildPutByIdList(ExecState
* exec
, JSValue baseValue
, const Identifier
& propertyName
, const PutPropertySlot
& slot
, StructureStubInfo
& stubInfo
, PutKind putKind
)
850 bool cached
= tryBuildPutByIdList(exec
, baseValue
, propertyName
, slot
, stubInfo
, putKind
);
852 dfgRepatchCall(exec
->codeBlock(), stubInfo
.callReturnLocation
, appropriateGenericPutByIdFunction(slot
, putKind
));
855 void dfgLinkFor(ExecState
* exec
, CallLinkInfo
& callLinkInfo
, CodeBlock
* calleeCodeBlock
, JSFunction
* callee
, MacroAssemblerCodePtr codePtr
, CodeSpecializationKind kind
)
857 CodeBlock
* callerCodeBlock
= exec
->callerFrame()->codeBlock();
859 RepatchBuffer
repatchBuffer(callerCodeBlock
);
861 ASSERT(!callLinkInfo
.isLinked());
862 callLinkInfo
.callee
.set(exec
->callerFrame()->globalData(), callLinkInfo
.hotPathBegin
, callerCodeBlock
->ownerExecutable(), callee
);
863 callLinkInfo
.lastSeenCallee
.set(exec
->callerFrame()->globalData(), callerCodeBlock
->ownerExecutable(), callee
);
864 repatchBuffer
.relink(callLinkInfo
.hotPathOther
, codePtr
);
867 calleeCodeBlock
->linkIncomingCall(&callLinkInfo
);
869 if (kind
== CodeForCall
) {
870 repatchBuffer
.relink(CodeLocationCall(callLinkInfo
.callReturnLocation
), operationVirtualCall
);
873 ASSERT(kind
== CodeForConstruct
);
874 repatchBuffer
.relink(CodeLocationCall(callLinkInfo
.callReturnLocation
), operationVirtualConstruct
);
877 void dfgResetGetByID(RepatchBuffer
& repatchBuffer
, StructureStubInfo
& stubInfo
)
879 repatchBuffer
.relink(stubInfo
.callReturnLocation
, operationGetByIdOptimize
);
880 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelPtrAtOffset(-(uintptr_t)stubInfo
.patch
.dfg
.deltaCheckImmToCall
), reinterpret_cast<void*>(-1));
882 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.dfg
.deltaCallToLoadOrStore
), 0);
884 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.dfg
.deltaCallToTagLoadOrStore
), 0);
885 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelCompactAtOffset(stubInfo
.patch
.dfg
.deltaCallToPayloadLoadOrStore
), 0);
887 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.dfg
.deltaCallToStructCheck
), stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToSlowCase
));
890 void dfgResetPutByID(RepatchBuffer
& repatchBuffer
, StructureStubInfo
& stubInfo
)
892 V_DFGOperation_EJCI unoptimizedFunction
= bitwise_cast
<V_DFGOperation_EJCI
>(MacroAssembler::readCallTarget(stubInfo
.callReturnLocation
).executableAddress());
893 V_DFGOperation_EJCI optimizedFunction
;
894 if (unoptimizedFunction
== operationPutByIdStrict
|| unoptimizedFunction
== operationPutByIdStrictBuildList
)
895 optimizedFunction
= operationPutByIdStrictOptimize
;
896 else if (unoptimizedFunction
== operationPutByIdNonStrict
|| unoptimizedFunction
== operationPutByIdNonStrictBuildList
)
897 optimizedFunction
= operationPutByIdNonStrictOptimize
;
898 else if (unoptimizedFunction
== operationPutByIdDirectStrict
|| unoptimizedFunction
== operationPutByIdDirectStrictBuildList
)
899 optimizedFunction
= operationPutByIdDirectStrictOptimize
;
901 ASSERT(unoptimizedFunction
== operationPutByIdDirectNonStrict
|| unoptimizedFunction
== operationPutByIdDirectNonStrictBuildList
);
902 optimizedFunction
= operationPutByIdDirectNonStrictOptimize
;
904 repatchBuffer
.relink(stubInfo
.callReturnLocation
, optimizedFunction
);
905 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabelPtrAtOffset(-(uintptr_t)stubInfo
.patch
.dfg
.deltaCheckImmToCall
), reinterpret_cast<void*>(-1));
907 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.dfg
.deltaCallToLoadOrStore
), 0);
909 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.dfg
.deltaCallToTagLoadOrStore
), 0);
910 repatchBuffer
.repatch(stubInfo
.callReturnLocation
.dataLabel32AtOffset(stubInfo
.patch
.dfg
.deltaCallToPayloadLoadOrStore
), 0);
912 repatchBuffer
.relink(stubInfo
.callReturnLocation
.jumpAtOffset(stubInfo
.patch
.dfg
.deltaCallToStructCheck
), stubInfo
.callReturnLocation
.labelAtOffset(stubInfo
.patch
.dfg
.deltaCallToSlowCase
));
915 } } // namespace JSC::DFG