]>
Commit | Line | Data |
---|---|---|
1 | /* | |
2 | * Copyright (C) 2011, 2012 Apple Inc. All rights reserved. | |
3 | * | |
4 | * Redistribution and use in source and binary forms, with or without | |
5 | * modification, are permitted provided that the following conditions | |
6 | * are met: | |
7 | * 1. Redistributions of source code must retain the above copyright | |
8 | * notice, this list of conditions and the following disclaimer. | |
9 | * 2. Redistributions in binary form must reproduce the above copyright | |
10 | * notice, this list of conditions and the following disclaimer in the | |
11 | * documentation and/or other materials provided with the distribution. | |
12 | * | |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY | |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR | |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, | |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR | |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY | |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
24 | */ | |
25 | ||
26 | #include "config.h" | |
27 | #include "DFGRepatch.h" | |
28 | ||
29 | #if ENABLE(DFG_JIT) | |
30 | ||
31 | #include "DFGCCallHelpers.h" | |
32 | #include "DFGScratchRegisterAllocator.h" | |
33 | #include "DFGSpeculativeJIT.h" | |
34 | #include "DFGThunks.h" | |
35 | #include "GCAwareJITStubRoutine.h" | |
36 | #include "LinkBuffer.h" | |
37 | #include "Operations.h" | |
38 | #include "PolymorphicPutByIdList.h" | |
39 | #include "RepatchBuffer.h" | |
40 | #include "StructureRareDataInlines.h" | |
41 | #include <wtf/StringPrintStream.h> | |
42 | ||
43 | namespace JSC { namespace DFG { | |
44 | ||
45 | static void dfgRepatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction) | |
46 | { | |
47 | RepatchBuffer repatchBuffer(codeblock); | |
48 | repatchBuffer.relink(call, newCalleeFunction); | |
49 | } | |
50 | ||
51 | static void dfgRepatchByIdSelfAccess(CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure, PropertyOffset offset, const FunctionPtr &slowPathFunction, bool compact) | |
52 | { | |
53 | RepatchBuffer repatchBuffer(codeBlock); | |
54 | ||
55 | // Only optimize once! | |
56 | repatchBuffer.relink(stubInfo.callReturnLocation, slowPathFunction); | |
57 | ||
58 | // Patch the structure check & the offset of the load. | |
59 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall), structure); | |
60 | repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.dfg.deltaCallToStorageLoad), isOutOfLineOffset(offset)); | |
61 | #if USE(JSVALUE64) | |
62 | if (compact) | |
63 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset)); | |
64 | else | |
65 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset)); | |
66 | #elif USE(JSVALUE32_64) | |
67 | if (compact) { | |
68 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)); | |
69 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)); | |
70 | } else { | |
71 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)); | |
72 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)); | |
73 | } | |
74 | #endif | |
75 | } | |
76 | ||
77 | static void addStructureTransitionCheck( | |
78 | JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo, | |
79 | MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR) | |
80 | { | |
81 | if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) { | |
82 | structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock)); | |
83 | #if DFG_ENABLE(JIT_ASSERT) | |
84 | // If we execute this code, the object must have the structure we expect. Assert | |
85 | // this in debug modes. | |
86 | jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR); | |
87 | MacroAssembler::Jump ok = jit.branchPtr( | |
88 | MacroAssembler::Equal, | |
89 | MacroAssembler::Address(scratchGPR, JSCell::structureOffset()), | |
90 | MacroAssembler::TrustedImmPtr(structure)); | |
91 | jit.breakpoint(); | |
92 | ok.link(&jit); | |
93 | #endif | |
94 | return; | |
95 | } | |
96 | ||
97 | jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR); | |
98 | failureCases.append( | |
99 | jit.branchPtr( | |
100 | MacroAssembler::NotEqual, | |
101 | MacroAssembler::Address(scratchGPR, JSCell::structureOffset()), | |
102 | MacroAssembler::TrustedImmPtr(structure))); | |
103 | } | |
104 | ||
105 | static void addStructureTransitionCheck( | |
106 | JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo, | |
107 | MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR) | |
108 | { | |
109 | if (prototype.isNull()) | |
110 | return; | |
111 | ||
112 | ASSERT(prototype.isCell()); | |
113 | ||
114 | addStructureTransitionCheck( | |
115 | prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit, | |
116 | failureCases, scratchGPR); | |
117 | } | |
118 | ||
119 | static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target) | |
120 | { | |
121 | if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) { | |
122 | repatchBuffer.replaceWithJump( | |
123 | RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress( | |
124 | stubInfo.callReturnLocation.dataLabelPtrAtOffset( | |
125 | -(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall)), | |
126 | CodeLocationLabel(target)); | |
127 | return; | |
128 | } | |
129 | ||
130 | repatchBuffer.relink( | |
131 | stubInfo.callReturnLocation.jumpAtOffset( | |
132 | stubInfo.patch.dfg.deltaCallToStructCheck), | |
133 | CodeLocationLabel(target)); | |
134 | } | |
135 | ||
136 | static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases) | |
137 | { | |
138 | if (needToRestoreScratch) { | |
139 | stubJit.popToRestore(scratchGPR); | |
140 | ||
141 | success = stubJit.jump(); | |
142 | ||
143 | // link failure cases here, so we can pop scratchGPR, and then jump back. | |
144 | failureCases.link(&stubJit); | |
145 | ||
146 | stubJit.popToRestore(scratchGPR); | |
147 | ||
148 | fail = stubJit.jump(); | |
149 | return; | |
150 | } | |
151 | ||
152 | success = stubJit.jump(); | |
153 | } | |
154 | ||
155 | static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin) | |
156 | { | |
157 | patchBuffer.link(success, successLabel); | |
158 | ||
159 | if (needToRestoreScratch) { | |
160 | patchBuffer.link(fail, slowCaseBegin); | |
161 | return; | |
162 | } | |
163 | ||
164 | // link failure cases directly back to normal path | |
165 | patchBuffer.link(failureCases, slowCaseBegin); | |
166 | } | |
167 | ||
168 | static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases) | |
169 | { | |
170 | linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase)); | |
171 | } | |
172 | ||
173 | static void generateProtoChainAccessStub(ExecState* exec, StructureStubInfo& stubInfo, StructureChain* chain, size_t count, PropertyOffset offset, Structure* structure, CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine) | |
174 | { | |
175 | VM* vm = &exec->vm(); | |
176 | ||
177 | MacroAssembler stubJit; | |
178 | ||
179 | GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR); | |
180 | #if USE(JSVALUE32_64) | |
181 | GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR); | |
182 | #endif | |
183 | GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR); | |
184 | GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR(); | |
185 | bool needToRestoreScratch = false; | |
186 | ||
187 | if (scratchGPR == InvalidGPRReg) { | |
188 | #if USE(JSVALUE64) | |
189 | scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR); | |
190 | #else | |
191 | scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR, resultTagGPR); | |
192 | #endif | |
193 | stubJit.pushToSave(scratchGPR); | |
194 | needToRestoreScratch = true; | |
195 | } | |
196 | ||
197 | MacroAssembler::JumpList failureCases; | |
198 | ||
199 | failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure))); | |
200 | ||
201 | Structure* currStructure = structure; | |
202 | WriteBarrier<Structure>* it = chain->head(); | |
203 | JSObject* protoObject = 0; | |
204 | for (unsigned i = 0; i < count; ++i, ++it) { | |
205 | protoObject = asObject(currStructure->prototypeForLookup(exec)); | |
206 | addStructureTransitionCheck( | |
207 | protoObject, protoObject->structure(), exec->codeBlock(), stubInfo, stubJit, | |
208 | failureCases, scratchGPR); | |
209 | currStructure = it->get(); | |
210 | } | |
211 | ||
212 | if (isInlineOffset(offset)) { | |
213 | #if USE(JSVALUE64) | |
214 | stubJit.load64(protoObject->locationForOffset(offset), resultGPR); | |
215 | #elif USE(JSVALUE32_64) | |
216 | stubJit.move(MacroAssembler::TrustedImmPtr(protoObject->locationForOffset(offset)), resultGPR); | |
217 | stubJit.load32(MacroAssembler::Address(resultGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR); | |
218 | stubJit.load32(MacroAssembler::Address(resultGPR, OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR); | |
219 | #endif | |
220 | } else { | |
221 | stubJit.loadPtr(protoObject->butterflyAddress(), resultGPR); | |
222 | #if USE(JSVALUE64) | |
223 | stubJit.load64(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>)), resultGPR); | |
224 | #elif USE(JSVALUE32_64) | |
225 | stubJit.load32(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR); | |
226 | stubJit.load32(MacroAssembler::Address(resultGPR, offsetInButterfly(offset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR); | |
227 | #endif | |
228 | } | |
229 | ||
230 | MacroAssembler::Jump success, fail; | |
231 | ||
232 | emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases); | |
233 | ||
234 | LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock()); | |
235 | ||
236 | linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel); | |
237 | ||
238 | stubRoutine = FINALIZE_CODE_FOR_DFG_STUB( | |
239 | patchBuffer, | |
240 | ("DFG prototype chain access stub for %s, return point %p", | |
241 | toCString(*exec->codeBlock()).data(), successLabel.executableAddress())); | |
242 | } | |
243 | ||
244 | static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo) | |
245 | { | |
246 | // FIXME: Write a test that proves we need to check for recursion here just | |
247 | // like the interpreter does, then add a check for recursion. | |
248 | ||
249 | CodeBlock* codeBlock = exec->codeBlock(); | |
250 | VM* vm = &exec->vm(); | |
251 | ||
252 | if (isJSArray(baseValue) && propertyName == exec->propertyNames().length) { | |
253 | GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR); | |
254 | #if USE(JSVALUE32_64) | |
255 | GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR); | |
256 | #endif | |
257 | GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR); | |
258 | GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR(); | |
259 | bool needToRestoreScratch = false; | |
260 | ||
261 | MacroAssembler stubJit; | |
262 | ||
263 | if (scratchGPR == InvalidGPRReg) { | |
264 | #if USE(JSVALUE64) | |
265 | scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR); | |
266 | #else | |
267 | scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR, resultTagGPR); | |
268 | #endif | |
269 | stubJit.pushToSave(scratchGPR); | |
270 | needToRestoreScratch = true; | |
271 | } | |
272 | ||
273 | MacroAssembler::JumpList failureCases; | |
274 | ||
275 | stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSCell::structureOffset()), scratchGPR); | |
276 | stubJit.load8(MacroAssembler::Address(scratchGPR, Structure::indexingTypeOffset()), scratchGPR); | |
277 | failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray))); | |
278 | failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask))); | |
279 | ||
280 | stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR); | |
281 | stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR); | |
282 | failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0))); | |
283 | ||
284 | #if USE(JSVALUE64) | |
285 | stubJit.or64(GPRInfo::tagTypeNumberRegister, scratchGPR, resultGPR); | |
286 | #elif USE(JSVALUE32_64) | |
287 | stubJit.move(scratchGPR, resultGPR); | |
288 | stubJit.move(JITCompiler::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag | |
289 | #endif | |
290 | ||
291 | MacroAssembler::Jump success, fail; | |
292 | ||
293 | emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases); | |
294 | ||
295 | LinkBuffer patchBuffer(*vm, &stubJit, codeBlock); | |
296 | ||
297 | linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases); | |
298 | ||
299 | stubInfo.stubRoutine = FINALIZE_CODE_FOR_DFG_STUB( | |
300 | patchBuffer, | |
301 | ("DFG GetById array length stub for %s, return point %p", | |
302 | toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset( | |
303 | stubInfo.patch.dfg.deltaCallToDone).executableAddress())); | |
304 | ||
305 | RepatchBuffer repatchBuffer(codeBlock); | |
306 | replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code()); | |
307 | repatchBuffer.relink(stubInfo.callReturnLocation, operationGetById); | |
308 | ||
309 | return true; | |
310 | } | |
311 | ||
312 | // FIXME: should support length access for String. | |
313 | ||
314 | // FIXME: Cache property access for immediates. | |
315 | if (!baseValue.isCell()) | |
316 | return false; | |
317 | JSCell* baseCell = baseValue.asCell(); | |
318 | Structure* structure = baseCell->structure(); | |
319 | if (!slot.isCacheable()) | |
320 | return false; | |
321 | if (!structure->propertyAccessesAreCacheable()) | |
322 | return false; | |
323 | ||
324 | // Optimize self access. | |
325 | if (slot.slotBase() == baseValue) { | |
326 | if ((slot.cachedPropertyType() != PropertySlot::Value) | |
327 | || !MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) { | |
328 | dfgRepatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList); | |
329 | return true; | |
330 | } | |
331 | ||
332 | dfgRepatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), operationGetByIdBuildList, true); | |
333 | stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure); | |
334 | return true; | |
335 | } | |
336 | ||
337 | if (structure->isDictionary()) | |
338 | return false; | |
339 | ||
340 | // FIXME: optimize getters and setters | |
341 | if (slot.cachedPropertyType() != PropertySlot::Value) | |
342 | return false; | |
343 | ||
344 | PropertyOffset offset = slot.cachedOffset(); | |
345 | size_t count = normalizePrototypeChainForChainAccess(exec, baseValue, slot.slotBase(), propertyName, offset); | |
346 | if (count == InvalidPrototypeChain) | |
347 | return false; | |
348 | ||
349 | StructureChain* prototypeChain = structure->prototypeChain(exec); | |
350 | ||
351 | ASSERT(slot.slotBase().isObject()); | |
352 | ||
353 | generateProtoChainAccessStub(exec, stubInfo, prototypeChain, count, offset, structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase), stubInfo.stubRoutine); | |
354 | ||
355 | RepatchBuffer repatchBuffer(codeBlock); | |
356 | replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code()); | |
357 | repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdProtoBuildList); | |
358 | ||
359 | stubInfo.initGetByIdChain(*vm, codeBlock->ownerExecutable(), structure, prototypeChain, count, true); | |
360 | return true; | |
361 | } | |
362 | ||
363 | void dfgRepatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo) | |
364 | { | |
365 | bool cached = tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo); | |
366 | if (!cached) | |
367 | dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById); | |
368 | } | |
369 | ||
370 | static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo) | |
371 | { | |
372 | if (!baseValue.isCell() | |
373 | || !slot.isCacheable() | |
374 | || baseValue.asCell()->structure()->isUncacheableDictionary() | |
375 | || slot.slotBase() != baseValue) | |
376 | return false; | |
377 | ||
378 | if (!stubInfo.patch.dfg.registersFlushed) { | |
379 | // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular, | |
380 | // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus, | |
381 | // if registers were not flushed, don't do non-Value caching. | |
382 | if (slot.cachedPropertyType() != PropertySlot::Value) | |
383 | return false; | |
384 | } | |
385 | ||
386 | CodeBlock* codeBlock = exec->codeBlock(); | |
387 | JSCell* baseCell = baseValue.asCell(); | |
388 | Structure* structure = baseCell->structure(); | |
389 | VM* vm = &exec->vm(); | |
390 | ||
391 | ASSERT(slot.slotBase().isObject()); | |
392 | ||
393 | PolymorphicAccessStructureList* polymorphicStructureList; | |
394 | int listIndex; | |
395 | ||
396 | if (stubInfo.accessType == access_unset) { | |
397 | ASSERT(!stubInfo.stubRoutine); | |
398 | polymorphicStructureList = new PolymorphicAccessStructureList(); | |
399 | stubInfo.initGetByIdSelfList(polymorphicStructureList, 0); | |
400 | listIndex = 0; | |
401 | } else if (stubInfo.accessType == access_get_by_id_self) { | |
402 | ASSERT(!stubInfo.stubRoutine); | |
403 | polymorphicStructureList = new PolymorphicAccessStructureList(*vm, codeBlock->ownerExecutable(), JITStubRoutine::createSelfManagedRoutine(stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase)), stubInfo.u.getByIdSelf.baseObjectStructure.get(), true); | |
404 | stubInfo.initGetByIdSelfList(polymorphicStructureList, 1); | |
405 | listIndex = 1; | |
406 | } else { | |
407 | polymorphicStructureList = stubInfo.u.getByIdSelfList.structureList; | |
408 | listIndex = stubInfo.u.getByIdSelfList.listSize; | |
409 | } | |
410 | ||
411 | if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) { | |
412 | stubInfo.u.getByIdSelfList.listSize++; | |
413 | ||
414 | GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR); | |
415 | #if USE(JSVALUE32_64) | |
416 | GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR); | |
417 | #endif | |
418 | GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR); | |
419 | GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR(); | |
420 | ||
421 | CCallHelpers stubJit(vm, codeBlock); | |
422 | ||
423 | MacroAssembler::Jump wrongStruct = stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure)); | |
424 | ||
425 | // The strategy we use for stubs is as follows: | |
426 | // 1) Call DFG helper that calls the getter. | |
427 | // 2) Check if there was an exception, and if there was, call yet another | |
428 | // helper. | |
429 | ||
430 | bool isDirect = false; | |
431 | MacroAssembler::Call operationCall; | |
432 | MacroAssembler::Call handlerCall; | |
433 | FunctionPtr operationFunction; | |
434 | MacroAssembler::Jump success; | |
435 | ||
436 | if (slot.cachedPropertyType() == PropertySlot::Getter | |
437 | || slot.cachedPropertyType() == PropertySlot::Custom) { | |
438 | if (slot.cachedPropertyType() == PropertySlot::Getter) { | |
439 | ASSERT(scratchGPR != InvalidGPRReg); | |
440 | ASSERT(baseGPR != scratchGPR); | |
441 | if (isInlineOffset(slot.cachedOffset())) { | |
442 | #if USE(JSVALUE64) | |
443 | stubJit.load64(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR); | |
444 | #else | |
445 | stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR); | |
446 | #endif | |
447 | } else { | |
448 | stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR); | |
449 | #if USE(JSVALUE64) | |
450 | stubJit.load64(MacroAssembler::Address(scratchGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR); | |
451 | #else | |
452 | stubJit.load32(MacroAssembler::Address(scratchGPR, offsetRelativeToBase(slot.cachedOffset())), scratchGPR); | |
453 | #endif | |
454 | } | |
455 | stubJit.setupArgumentsWithExecState(baseGPR, scratchGPR); | |
456 | operationFunction = operationCallGetter; | |
457 | } else { | |
458 | stubJit.setupArgumentsWithExecState( | |
459 | baseGPR, | |
460 | MacroAssembler::TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()), | |
461 | MacroAssembler::TrustedImmPtr(const_cast<Identifier*>(&ident))); | |
462 | operationFunction = operationCallCustomGetter; | |
463 | } | |
464 | ||
465 | // Need to make sure that whenever this call is made in the future, we remember the | |
466 | // place that we made it from. It just so happens to be the place that we are at | |
467 | // right now! | |
468 | stubJit.store32( | |
469 | MacroAssembler::TrustedImm32(exec->codeOriginIndexForDFG()), | |
470 | CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount))); | |
471 | ||
472 | operationCall = stubJit.call(); | |
473 | #if USE(JSVALUE64) | |
474 | stubJit.move(GPRInfo::returnValueGPR, resultGPR); | |
475 | #else | |
476 | stubJit.setupResults(resultGPR, resultTagGPR); | |
477 | #endif | |
478 | success = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck); | |
479 | ||
480 | stubJit.setupArgumentsWithExecState( | |
481 | MacroAssembler::TrustedImmPtr(&stubInfo)); | |
482 | handlerCall = stubJit.call(); | |
483 | stubJit.jump(GPRInfo::returnValueGPR2); | |
484 | } else { | |
485 | if (isInlineOffset(slot.cachedOffset())) { | |
486 | #if USE(JSVALUE64) | |
487 | stubJit.load64(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset())), resultGPR); | |
488 | #else | |
489 | if (baseGPR == resultTagGPR) { | |
490 | stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR); | |
491 | stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR); | |
492 | } else { | |
493 | stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR); | |
494 | stubJit.load32(MacroAssembler::Address(baseGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR); | |
495 | } | |
496 | #endif | |
497 | } else { | |
498 | stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), resultGPR); | |
499 | #if USE(JSVALUE64) | |
500 | stubJit.load64(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset())), resultGPR); | |
501 | #else | |
502 | stubJit.load32(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR); | |
503 | stubJit.load32(MacroAssembler::Address(resultGPR, offsetRelativeToBase(slot.cachedOffset()) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR); | |
504 | #endif | |
505 | } | |
506 | success = stubJit.jump(); | |
507 | isDirect = true; | |
508 | } | |
509 | ||
510 | LinkBuffer patchBuffer(*vm, &stubJit, codeBlock); | |
511 | ||
512 | CodeLocationLabel lastProtoBegin; | |
513 | if (listIndex) | |
514 | lastProtoBegin = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code()); | |
515 | else | |
516 | lastProtoBegin = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase); | |
517 | ASSERT(!!lastProtoBegin); | |
518 | ||
519 | patchBuffer.link(wrongStruct, lastProtoBegin); | |
520 | patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone)); | |
521 | if (!isDirect) { | |
522 | patchBuffer.link(operationCall, operationFunction); | |
523 | patchBuffer.link(handlerCall, lookupExceptionHandlerInStub); | |
524 | } | |
525 | ||
526 | RefPtr<JITStubRoutine> stubRoutine = | |
527 | createJITStubRoutine( | |
528 | FINALIZE_DFG_CODE( | |
529 | patchBuffer, | |
530 | ("DFG GetById polymorphic list access for %s, return point %p", | |
531 | toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset( | |
532 | stubInfo.patch.dfg.deltaCallToDone).executableAddress())), | |
533 | *vm, | |
534 | codeBlock->ownerExecutable(), | |
535 | slot.cachedPropertyType() == PropertySlot::Getter | |
536 | || slot.cachedPropertyType() == PropertySlot::Custom); | |
537 | ||
538 | polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, isDirect); | |
539 | ||
540 | RepatchBuffer repatchBuffer(codeBlock); | |
541 | repatchBuffer.relink( | |
542 | stubInfo.callReturnLocation.jumpAtOffset( | |
543 | stubInfo.patch.dfg.deltaCallToStructCheck), | |
544 | CodeLocationLabel(stubRoutine->code().code())); | |
545 | ||
546 | if (listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1)) | |
547 | return true; | |
548 | } | |
549 | ||
550 | return false; | |
551 | } | |
552 | ||
553 | void dfgBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo) | |
554 | { | |
555 | bool dontChangeCall = tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo); | |
556 | if (!dontChangeCall) | |
557 | dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById); | |
558 | } | |
559 | ||
560 | static bool tryBuildGetByIDProtoList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo) | |
561 | { | |
562 | if (!baseValue.isCell() | |
563 | || !slot.isCacheable() | |
564 | || baseValue.asCell()->structure()->isDictionary() | |
565 | || baseValue.asCell()->structure()->typeInfo().prohibitsPropertyCaching() | |
566 | || slot.slotBase() == baseValue | |
567 | || slot.cachedPropertyType() != PropertySlot::Value) | |
568 | return false; | |
569 | ||
570 | ASSERT(slot.slotBase().isObject()); | |
571 | ||
572 | PropertyOffset offset = slot.cachedOffset(); | |
573 | size_t count = normalizePrototypeChainForChainAccess(exec, baseValue, slot.slotBase(), propertyName, offset); | |
574 | if (count == InvalidPrototypeChain) | |
575 | return false; | |
576 | ||
577 | Structure* structure = baseValue.asCell()->structure(); | |
578 | StructureChain* prototypeChain = structure->prototypeChain(exec); | |
579 | CodeBlock* codeBlock = exec->codeBlock(); | |
580 | VM* vm = &exec->vm(); | |
581 | ||
582 | PolymorphicAccessStructureList* polymorphicStructureList; | |
583 | int listIndex = 1; | |
584 | ||
585 | if (stubInfo.accessType == access_get_by_id_chain) { | |
586 | ASSERT(!!stubInfo.stubRoutine); | |
587 | polymorphicStructureList = new PolymorphicAccessStructureList(*vm, codeBlock->ownerExecutable(), stubInfo.stubRoutine, stubInfo.u.getByIdChain.baseObjectStructure.get(), stubInfo.u.getByIdChain.chain.get(), true); | |
588 | stubInfo.stubRoutine.clear(); | |
589 | stubInfo.initGetByIdProtoList(polymorphicStructureList, 1); | |
590 | } else { | |
591 | ASSERT(stubInfo.accessType == access_get_by_id_proto_list); | |
592 | polymorphicStructureList = stubInfo.u.getByIdProtoList.structureList; | |
593 | listIndex = stubInfo.u.getByIdProtoList.listSize; | |
594 | } | |
595 | ||
596 | if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) { | |
597 | stubInfo.u.getByIdProtoList.listSize++; | |
598 | ||
599 | CodeLocationLabel lastProtoBegin = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code()); | |
600 | ASSERT(!!lastProtoBegin); | |
601 | ||
602 | RefPtr<JITStubRoutine> stubRoutine; | |
603 | ||
604 | generateProtoChainAccessStub(exec, stubInfo, prototypeChain, count, offset, structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), lastProtoBegin, stubRoutine); | |
605 | ||
606 | polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true); | |
607 | ||
608 | RepatchBuffer repatchBuffer(codeBlock); | |
609 | replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code()); | |
610 | ||
611 | if (listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1)) | |
612 | return true; | |
613 | } | |
614 | ||
615 | return false; | |
616 | } | |
617 | ||
618 | void dfgBuildGetByIDProtoList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo) | |
619 | { | |
620 | bool dontChangeCall = tryBuildGetByIDProtoList(exec, baseValue, propertyName, slot, stubInfo); | |
621 | if (!dontChangeCall) | |
622 | dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById); | |
623 | } | |
624 | ||
625 | static V_DFGOperation_EJCI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind) | |
626 | { | |
627 | if (slot.isStrictMode()) { | |
628 | if (putKind == Direct) | |
629 | return operationPutByIdDirectStrict; | |
630 | return operationPutByIdStrict; | |
631 | } | |
632 | if (putKind == Direct) | |
633 | return operationPutByIdDirectNonStrict; | |
634 | return operationPutByIdNonStrict; | |
635 | } | |
636 | ||
637 | static V_DFGOperation_EJCI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind) | |
638 | { | |
639 | if (slot.isStrictMode()) { | |
640 | if (putKind == Direct) | |
641 | return operationPutByIdDirectStrictBuildList; | |
642 | return operationPutByIdStrictBuildList; | |
643 | } | |
644 | if (putKind == Direct) | |
645 | return operationPutByIdDirectNonStrictBuildList; | |
646 | return operationPutByIdNonStrictBuildList; | |
647 | } | |
648 | ||
649 | static void emitPutReplaceStub( | |
650 | ExecState* exec, | |
651 | JSValue, | |
652 | const Identifier&, | |
653 | const PutPropertySlot& slot, | |
654 | StructureStubInfo& stubInfo, | |
655 | PutKind, | |
656 | Structure* structure, | |
657 | CodeLocationLabel failureLabel, | |
658 | RefPtr<JITStubRoutine>& stubRoutine) | |
659 | { | |
660 | VM* vm = &exec->vm(); | |
661 | GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR); | |
662 | #if USE(JSVALUE32_64) | |
663 | GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR); | |
664 | #endif | |
665 | GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR); | |
666 | GPRReg scratchGPR = RegisterSet(stubInfo.patch.dfg.usedRegisters).getFreeGPR(); | |
667 | bool needToRestoreScratch = false; | |
668 | #if ENABLE(WRITE_BARRIER_PROFILING) | |
669 | GPRReg scratchGPR2; | |
670 | const bool writeBarrierNeeded = true; | |
671 | #else | |
672 | const bool writeBarrierNeeded = false; | |
673 | #endif | |
674 | ||
675 | MacroAssembler stubJit; | |
676 | ||
677 | if (scratchGPR == InvalidGPRReg && (writeBarrierNeeded || isOutOfLineOffset(slot.cachedOffset()))) { | |
678 | #if USE(JSVALUE64) | |
679 | scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR); | |
680 | #else | |
681 | scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, valueTagGPR); | |
682 | #endif | |
683 | needToRestoreScratch = true; | |
684 | stubJit.pushToSave(scratchGPR); | |
685 | } | |
686 | ||
687 | MacroAssembler::Jump badStructure = stubJit.branchPtr( | |
688 | MacroAssembler::NotEqual, | |
689 | MacroAssembler::Address(baseGPR, JSCell::structureOffset()), | |
690 | MacroAssembler::TrustedImmPtr(structure)); | |
691 | ||
692 | #if ENABLE(WRITE_BARRIER_PROFILING) | |
693 | #if USE(JSVALUE64) | |
694 | scratchGPR2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, scratchGPR); | |
695 | #else | |
696 | scratchGPR2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, valueTagGPR, scratchGPR); | |
697 | #endif | |
698 | stubJit.pushToSave(scratchGPR2); | |
699 | SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR, scratchGPR2, WriteBarrierForPropertyAccess); | |
700 | stubJit.popToRestore(scratchGPR2); | |
701 | #endif | |
702 | ||
703 | #if USE(JSVALUE64) | |
704 | if (isInlineOffset(slot.cachedOffset())) | |
705 | stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue))); | |
706 | else { | |
707 | stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR); | |
708 | stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue))); | |
709 | } | |
710 | #elif USE(JSVALUE32_64) | |
711 | if (isInlineOffset(slot.cachedOffset())) { | |
712 | stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload))); | |
713 | stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag))); | |
714 | } else { | |
715 | stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR); | |
716 | stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload))); | |
717 | stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag))); | |
718 | } | |
719 | #endif | |
720 | ||
721 | MacroAssembler::Jump success; | |
722 | MacroAssembler::Jump failure; | |
723 | ||
724 | if (needToRestoreScratch) { | |
725 | stubJit.popToRestore(scratchGPR); | |
726 | success = stubJit.jump(); | |
727 | ||
728 | badStructure.link(&stubJit); | |
729 | stubJit.popToRestore(scratchGPR); | |
730 | failure = stubJit.jump(); | |
731 | } else { | |
732 | success = stubJit.jump(); | |
733 | failure = badStructure; | |
734 | } | |
735 | ||
736 | LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock()); | |
737 | patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone)); | |
738 | patchBuffer.link(failure, failureLabel); | |
739 | ||
740 | stubRoutine = FINALIZE_CODE_FOR_DFG_STUB( | |
741 | patchBuffer, | |
742 | ("DFG PutById replace stub for %s, return point %p", | |
743 | toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset( | |
744 | stubInfo.patch.dfg.deltaCallToDone).executableAddress())); | |
745 | } | |
746 | ||
747 | static void emitPutTransitionStub( | |
748 | ExecState* exec, | |
749 | JSValue, | |
750 | const Identifier&, | |
751 | const PutPropertySlot& slot, | |
752 | StructureStubInfo& stubInfo, | |
753 | PutKind putKind, | |
754 | Structure* structure, | |
755 | Structure* oldStructure, | |
756 | StructureChain* prototypeChain, | |
757 | CodeLocationLabel failureLabel, | |
758 | RefPtr<JITStubRoutine>& stubRoutine) | |
759 | { | |
760 | VM* vm = &exec->vm(); | |
761 | ||
762 | GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR); | |
763 | #if USE(JSVALUE32_64) | |
764 | GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR); | |
765 | #endif | |
766 | GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR); | |
767 | ||
768 | ScratchRegisterAllocator allocator(stubInfo.patch.dfg.usedRegisters); | |
769 | allocator.lock(baseGPR); | |
770 | #if USE(JSVALUE32_64) | |
771 | allocator.lock(valueTagGPR); | |
772 | #endif | |
773 | allocator.lock(valueGPR); | |
774 | ||
775 | CCallHelpers stubJit(vm); | |
776 | ||
777 | GPRReg scratchGPR1 = allocator.allocateScratchGPR(); | |
778 | ASSERT(scratchGPR1 != baseGPR); | |
779 | ASSERT(scratchGPR1 != valueGPR); | |
780 | ||
781 | bool needSecondScratch = false; | |
782 | bool needThirdScratch = false; | |
783 | #if ENABLE(WRITE_BARRIER_PROFILING) | |
784 | needSecondScratch = true; | |
785 | #endif | |
786 | if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() | |
787 | && oldStructure->outOfLineCapacity()) { | |
788 | needSecondScratch = true; | |
789 | needThirdScratch = true; | |
790 | } | |
791 | ||
792 | GPRReg scratchGPR2; | |
793 | if (needSecondScratch) { | |
794 | scratchGPR2 = allocator.allocateScratchGPR(); | |
795 | ASSERT(scratchGPR2 != baseGPR); | |
796 | ASSERT(scratchGPR2 != valueGPR); | |
797 | ASSERT(scratchGPR2 != scratchGPR1); | |
798 | } else | |
799 | scratchGPR2 = InvalidGPRReg; | |
800 | GPRReg scratchGPR3; | |
801 | if (needThirdScratch) { | |
802 | scratchGPR3 = allocator.allocateScratchGPR(); | |
803 | ASSERT(scratchGPR3 != baseGPR); | |
804 | ASSERT(scratchGPR3 != valueGPR); | |
805 | ASSERT(scratchGPR3 != scratchGPR1); | |
806 | ASSERT(scratchGPR3 != scratchGPR2); | |
807 | } else | |
808 | scratchGPR3 = InvalidGPRReg; | |
809 | ||
810 | allocator.preserveReusedRegistersByPushing(stubJit); | |
811 | ||
812 | MacroAssembler::JumpList failureCases; | |
813 | ||
814 | ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated()); | |
815 | ||
816 | failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(oldStructure))); | |
817 | ||
818 | addStructureTransitionCheck( | |
819 | oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases, | |
820 | scratchGPR1); | |
821 | ||
822 | if (putKind == NotDirect) { | |
823 | for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) { | |
824 | addStructureTransitionCheck( | |
825 | (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases, | |
826 | scratchGPR1); | |
827 | } | |
828 | } | |
829 | ||
830 | #if ENABLE(WRITE_BARRIER_PROFILING) | |
831 | ASSERT(needSecondScratch); | |
832 | ASSERT(scratchGPR2 != InvalidGPRReg); | |
833 | // Must always emit this write barrier as the structure transition itself requires it | |
834 | SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR1, scratchGPR2, WriteBarrierForPropertyAccess); | |
835 | #endif | |
836 | ||
837 | MacroAssembler::JumpList slowPath; | |
838 | ||
839 | bool scratchGPR1HasStorage = false; | |
840 | ||
841 | if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) { | |
842 | size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue); | |
843 | CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator(); | |
844 | ||
845 | if (!oldStructure->outOfLineCapacity()) { | |
846 | stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1); | |
847 | slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1)); | |
848 | stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining); | |
849 | stubJit.negPtr(scratchGPR1); | |
850 | stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1); | |
851 | stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1); | |
852 | } else { | |
853 | size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue); | |
854 | ASSERT(newSize > oldSize); | |
855 | ||
856 | stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3); | |
857 | stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1); | |
858 | slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1)); | |
859 | stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining); | |
860 | stubJit.negPtr(scratchGPR1); | |
861 | stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1); | |
862 | stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1); | |
863 | // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available | |
864 | for (ptrdiff_t offset = 0; offset < static_cast<ptrdiff_t>(oldSize); offset += sizeof(void*)) { | |
865 | stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2); | |
866 | stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -(offset + sizeof(JSValue) + sizeof(void*)))); | |
867 | } | |
868 | } | |
869 | ||
870 | stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset())); | |
871 | scratchGPR1HasStorage = true; | |
872 | } | |
873 | ||
874 | stubJit.storePtr(MacroAssembler::TrustedImmPtr(structure), MacroAssembler::Address(baseGPR, JSCell::structureOffset())); | |
875 | #if USE(JSVALUE64) | |
876 | if (isInlineOffset(slot.cachedOffset())) | |
877 | stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue))); | |
878 | else { | |
879 | if (!scratchGPR1HasStorage) | |
880 | stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1); | |
881 | stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue))); | |
882 | } | |
883 | #elif USE(JSVALUE32_64) | |
884 | if (isInlineOffset(slot.cachedOffset())) { | |
885 | stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload))); | |
886 | stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag))); | |
887 | } else { | |
888 | if (!scratchGPR1HasStorage) | |
889 | stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1); | |
890 | stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload))); | |
891 | stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag))); | |
892 | } | |
893 | #endif | |
894 | ||
895 | MacroAssembler::Jump success; | |
896 | MacroAssembler::Jump failure; | |
897 | ||
898 | if (allocator.didReuseRegisters()) { | |
899 | allocator.restoreReusedRegistersByPopping(stubJit); | |
900 | success = stubJit.jump(); | |
901 | ||
902 | failureCases.link(&stubJit); | |
903 | allocator.restoreReusedRegistersByPopping(stubJit); | |
904 | failure = stubJit.jump(); | |
905 | } else | |
906 | success = stubJit.jump(); | |
907 | ||
908 | MacroAssembler::Call operationCall; | |
909 | MacroAssembler::Jump successInSlowPath; | |
910 | ||
911 | if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) { | |
912 | slowPath.link(&stubJit); | |
913 | ||
914 | allocator.restoreReusedRegistersByPopping(stubJit); | |
915 | ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSize()); | |
916 | allocator.preserveUsedRegistersToScratchBuffer(stubJit, scratchBuffer, scratchGPR1); | |
917 | #if USE(JSVALUE64) | |
918 | stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR); | |
919 | #else | |
920 | stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR); | |
921 | #endif | |
922 | operationCall = stubJit.call(); | |
923 | allocator.restoreUsedRegistersFromScratchBuffer(stubJit, scratchBuffer, scratchGPR1); | |
924 | successInSlowPath = stubJit.jump(); | |
925 | } | |
926 | ||
927 | LinkBuffer patchBuffer(*vm, &stubJit, exec->codeBlock()); | |
928 | patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone)); | |
929 | if (allocator.didReuseRegisters()) | |
930 | patchBuffer.link(failure, failureLabel); | |
931 | else | |
932 | patchBuffer.link(failureCases, failureLabel); | |
933 | if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) { | |
934 | patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut); | |
935 | patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone)); | |
936 | } | |
937 | ||
938 | stubRoutine = | |
939 | createJITStubRoutine( | |
940 | FINALIZE_DFG_CODE( | |
941 | patchBuffer, | |
942 | ("DFG PutById %stransition stub (%p -> %p) for %s, return point %p", | |
943 | structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "", | |
944 | oldStructure, structure, | |
945 | toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset( | |
946 | stubInfo.patch.dfg.deltaCallToDone).executableAddress())), | |
947 | *vm, | |
948 | exec->codeBlock()->ownerExecutable(), | |
949 | structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(), | |
950 | structure); | |
951 | } | |
952 | ||
953 | static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind) | |
954 | { | |
955 | CodeBlock* codeBlock = exec->codeBlock(); | |
956 | VM* vm = &exec->vm(); | |
957 | ||
958 | if (!baseValue.isCell()) | |
959 | return false; | |
960 | JSCell* baseCell = baseValue.asCell(); | |
961 | Structure* structure = baseCell->structure(); | |
962 | Structure* oldStructure = structure->previousID(); | |
963 | ||
964 | if (!slot.isCacheable()) | |
965 | return false; | |
966 | if (structure->isUncacheableDictionary()) | |
967 | return false; | |
968 | ||
969 | // Optimize self access. | |
970 | if (slot.base() == baseValue) { | |
971 | if (slot.type() == PutPropertySlot::NewProperty) { | |
972 | if (structure->isDictionary()) | |
973 | return false; | |
974 | ||
975 | // Skip optimizing the case where we need a realloc, if we don't have | |
976 | // enough registers to make it happen. | |
977 | if (GPRInfo::numberOfRegisters < 6 | |
978 | && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity() | |
979 | && oldStructure->outOfLineCapacity()) | |
980 | return false; | |
981 | ||
982 | // Skip optimizing the case where we need realloc, and the structure has | |
983 | // indexing storage. | |
984 | if (hasIndexingHeader(oldStructure->indexingType())) | |
985 | return false; | |
986 | ||
987 | if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain) | |
988 | return false; | |
989 | ||
990 | StructureChain* prototypeChain = structure->prototypeChain(exec); | |
991 | ||
992 | emitPutTransitionStub( | |
993 | exec, baseValue, ident, slot, stubInfo, putKind, | |
994 | structure, oldStructure, prototypeChain, | |
995 | stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase), | |
996 | stubInfo.stubRoutine); | |
997 | ||
998 | RepatchBuffer repatchBuffer(codeBlock); | |
999 | repatchBuffer.relink( | |
1000 | stubInfo.callReturnLocation.jumpAtOffset( | |
1001 | stubInfo.patch.dfg.deltaCallToStructCheck), | |
1002 | CodeLocationLabel(stubInfo.stubRoutine->code().code())); | |
1003 | repatchBuffer.relink(stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind)); | |
1004 | ||
1005 | stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct); | |
1006 | ||
1007 | return true; | |
1008 | } | |
1009 | ||
1010 | if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset()))) | |
1011 | return false; | |
1012 | ||
1013 | dfgRepatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false); | |
1014 | stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure); | |
1015 | return true; | |
1016 | } | |
1017 | ||
1018 | return false; | |
1019 | } | |
1020 | ||
1021 | void dfgRepatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind) | |
1022 | { | |
1023 | bool cached = tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind); | |
1024 | if (!cached) | |
1025 | dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind)); | |
1026 | } | |
1027 | ||
1028 | static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind) | |
1029 | { | |
1030 | CodeBlock* codeBlock = exec->codeBlock(); | |
1031 | VM* vm = &exec->vm(); | |
1032 | ||
1033 | if (!baseValue.isCell()) | |
1034 | return false; | |
1035 | JSCell* baseCell = baseValue.asCell(); | |
1036 | Structure* structure = baseCell->structure(); | |
1037 | Structure* oldStructure = structure->previousID(); | |
1038 | ||
1039 | if (!slot.isCacheable()) | |
1040 | return false; | |
1041 | if (structure->isUncacheableDictionary()) | |
1042 | return false; | |
1043 | ||
1044 | // Optimize self access. | |
1045 | if (slot.base() == baseValue) { | |
1046 | PolymorphicPutByIdList* list; | |
1047 | RefPtr<JITStubRoutine> stubRoutine; | |
1048 | ||
1049 | if (slot.type() == PutPropertySlot::NewProperty) { | |
1050 | if (structure->isDictionary()) | |
1051 | return false; | |
1052 | ||
1053 | // Skip optimizing the case where we need a realloc, if we don't have | |
1054 | // enough registers to make it happen. | |
1055 | if (GPRInfo::numberOfRegisters < 6 | |
1056 | && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity() | |
1057 | && oldStructure->outOfLineCapacity()) | |
1058 | return false; | |
1059 | ||
1060 | // Skip optimizing the case where we need realloc, and the structure has | |
1061 | // indexing storage. | |
1062 | if (hasIndexingHeader(oldStructure->indexingType())) | |
1063 | return false; | |
1064 | ||
1065 | if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain) | |
1066 | return false; | |
1067 | ||
1068 | StructureChain* prototypeChain = structure->prototypeChain(exec); | |
1069 | ||
1070 | // We're now committed to creating the stub. Mogrify the meta-data accordingly. | |
1071 | list = PolymorphicPutByIdList::from( | |
1072 | putKind, stubInfo, | |
1073 | stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase)); | |
1074 | ||
1075 | emitPutTransitionStub( | |
1076 | exec, baseValue, propertyName, slot, stubInfo, putKind, | |
1077 | structure, oldStructure, prototypeChain, | |
1078 | CodeLocationLabel(list->currentSlowPathTarget()), | |
1079 | stubRoutine); | |
1080 | ||
1081 | list->addAccess( | |
1082 | PutByIdAccess::transition( | |
1083 | *vm, codeBlock->ownerExecutable(), | |
1084 | oldStructure, structure, prototypeChain, | |
1085 | stubRoutine)); | |
1086 | } else { | |
1087 | // We're now committed to creating the stub. Mogrify the meta-data accordingly. | |
1088 | list = PolymorphicPutByIdList::from( | |
1089 | putKind, stubInfo, | |
1090 | stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase)); | |
1091 | ||
1092 | emitPutReplaceStub( | |
1093 | exec, baseValue, propertyName, slot, stubInfo, putKind, | |
1094 | structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine); | |
1095 | ||
1096 | list->addAccess( | |
1097 | PutByIdAccess::replace( | |
1098 | *vm, codeBlock->ownerExecutable(), | |
1099 | structure, stubRoutine)); | |
1100 | } | |
1101 | ||
1102 | RepatchBuffer repatchBuffer(codeBlock); | |
1103 | repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubRoutine->code().code())); | |
1104 | ||
1105 | if (list->isFull()) | |
1106 | repatchBuffer.relink(stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind)); | |
1107 | ||
1108 | return true; | |
1109 | } | |
1110 | ||
1111 | return false; | |
1112 | } | |
1113 | ||
1114 | void dfgBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind) | |
1115 | { | |
1116 | bool cached = tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind); | |
1117 | if (!cached) | |
1118 | dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind)); | |
1119 | } | |
1120 | ||
1121 | static void linkSlowFor(RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind) | |
1122 | { | |
1123 | if (kind == CodeForCall) { | |
1124 | repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(virtualCallThunkGenerator).code()); | |
1125 | return; | |
1126 | } | |
1127 | ASSERT(kind == CodeForConstruct); | |
1128 | repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(virtualConstructThunkGenerator).code()); | |
1129 | } | |
1130 | ||
1131 | void dfgLinkFor(ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind) | |
1132 | { | |
1133 | ASSERT(!callLinkInfo.stub); | |
1134 | ||
1135 | CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock(); | |
1136 | VM* vm = callerCodeBlock->vm(); | |
1137 | ||
1138 | RepatchBuffer repatchBuffer(callerCodeBlock); | |
1139 | ||
1140 | ASSERT(!callLinkInfo.isLinked()); | |
1141 | callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee); | |
1142 | callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee); | |
1143 | repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr); | |
1144 | ||
1145 | if (calleeCodeBlock) | |
1146 | calleeCodeBlock->linkIncomingCall(&callLinkInfo); | |
1147 | ||
1148 | if (kind == CodeForCall) { | |
1149 | repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGenerator).code()); | |
1150 | return; | |
1151 | } | |
1152 | ||
1153 | ASSERT(kind == CodeForConstruct); | |
1154 | linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct); | |
1155 | } | |
1156 | ||
1157 | void dfgLinkSlowFor(ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind) | |
1158 | { | |
1159 | CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock(); | |
1160 | VM* vm = callerCodeBlock->vm(); | |
1161 | ||
1162 | RepatchBuffer repatchBuffer(callerCodeBlock); | |
1163 | ||
1164 | linkSlowFor(repatchBuffer, vm, callLinkInfo, kind); | |
1165 | } | |
1166 | ||
1167 | void dfgLinkClosureCall(ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, Structure* structure, ExecutableBase* executable, MacroAssemblerCodePtr codePtr) | |
1168 | { | |
1169 | ASSERT(!callLinkInfo.stub); | |
1170 | ||
1171 | CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock(); | |
1172 | VM* vm = callerCodeBlock->vm(); | |
1173 | ||
1174 | GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR); | |
1175 | ||
1176 | CCallHelpers stubJit(vm, callerCodeBlock); | |
1177 | ||
1178 | CCallHelpers::JumpList slowPath; | |
1179 | ||
1180 | #if USE(JSVALUE64) | |
1181 | slowPath.append( | |
1182 | stubJit.branchTest64( | |
1183 | CCallHelpers::NonZero, calleeGPR, GPRInfo::tagMaskRegister)); | |
1184 | #else | |
1185 | // We would have already checked that the callee is a cell. | |
1186 | #endif | |
1187 | ||
1188 | slowPath.append( | |
1189 | stubJit.branchPtr( | |
1190 | CCallHelpers::NotEqual, | |
1191 | CCallHelpers::Address(calleeGPR, JSCell::structureOffset()), | |
1192 | CCallHelpers::TrustedImmPtr(structure))); | |
1193 | ||
1194 | slowPath.append( | |
1195 | stubJit.branchPtr( | |
1196 | CCallHelpers::NotEqual, | |
1197 | CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()), | |
1198 | CCallHelpers::TrustedImmPtr(executable))); | |
1199 | ||
1200 | stubJit.loadPtr( | |
1201 | CCallHelpers::Address(calleeGPR, JSFunction::offsetOfScopeChain()), | |
1202 | GPRInfo::returnValueGPR); | |
1203 | ||
1204 | #if USE(JSVALUE64) | |
1205 | stubJit.store64( | |
1206 | GPRInfo::returnValueGPR, | |
1207 | CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain))); | |
1208 | #else | |
1209 | stubJit.storePtr( | |
1210 | GPRInfo::returnValueGPR, | |
1211 | CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload))); | |
1212 | stubJit.store32( | |
1213 | CCallHelpers::TrustedImm32(JSValue::CellTag), | |
1214 | CCallHelpers::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag))); | |
1215 | #endif | |
1216 | ||
1217 | JITCompiler::Call call = stubJit.nearCall(); | |
1218 | JITCompiler::Jump done = stubJit.jump(); | |
1219 | ||
1220 | slowPath.link(&stubJit); | |
1221 | stubJit.move(calleeGPR, GPRInfo::nonArgGPR0); | |
1222 | #if USE(JSVALUE32_64) | |
1223 | stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::nonArgGPR1); | |
1224 | #endif | |
1225 | stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::nonArgGPR2); | |
1226 | stubJit.restoreReturnAddressBeforeReturn(GPRInfo::nonArgGPR2); | |
1227 | JITCompiler::Jump slow = stubJit.jump(); | |
1228 | ||
1229 | LinkBuffer patchBuffer(*vm, &stubJit, callerCodeBlock); | |
1230 | ||
1231 | patchBuffer.link(call, FunctionPtr(codePtr.executableAddress())); | |
1232 | patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0)); | |
1233 | patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualCallThunkGenerator).code())); | |
1234 | ||
1235 | RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine( | |
1236 | FINALIZE_DFG_CODE( | |
1237 | patchBuffer, | |
1238 | ("DFG closure call stub for %s, return point %p, target %p (%s)", | |
1239 | toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(), | |
1240 | codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())), | |
1241 | *vm, callerCodeBlock->ownerExecutable(), structure, executable, callLinkInfo.codeOrigin)); | |
1242 | ||
1243 | RepatchBuffer repatchBuffer(callerCodeBlock); | |
1244 | ||
1245 | repatchBuffer.replaceWithJump( | |
1246 | RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin), | |
1247 | CodeLocationLabel(stubRoutine->code().code())); | |
1248 | linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall); | |
1249 | ||
1250 | callLinkInfo.stub = stubRoutine.release(); | |
1251 | ||
1252 | ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo)); | |
1253 | } | |
1254 | ||
1255 | void dfgResetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo) | |
1256 | { | |
1257 | repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdOptimize); | |
1258 | CodeLocationDataLabelPtr structureLabel = stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall); | |
1259 | if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) { | |
1260 | repatchBuffer.revertJumpReplacementToPatchableBranchPtrWithPatch( | |
1261 | RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(structureLabel), | |
1262 | MacroAssembler::Address( | |
1263 | static_cast<MacroAssembler::RegisterID>(stubInfo.patch.dfg.baseGPR), | |
1264 | JSCell::structureOffset()), | |
1265 | reinterpret_cast<void*>(unusedPointer)); | |
1266 | } | |
1267 | repatchBuffer.repatch(structureLabel, reinterpret_cast<void*>(unusedPointer)); | |
1268 | #if USE(JSVALUE64) | |
1269 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), 0); | |
1270 | #else | |
1271 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), 0); | |
1272 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), 0); | |
1273 | #endif | |
1274 | repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase)); | |
1275 | } | |
1276 | ||
1277 | void dfgResetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo) | |
1278 | { | |
1279 | V_DFGOperation_EJCI unoptimizedFunction = bitwise_cast<V_DFGOperation_EJCI>(MacroAssembler::readCallTarget(stubInfo.callReturnLocation).executableAddress()); | |
1280 | V_DFGOperation_EJCI optimizedFunction; | |
1281 | if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList) | |
1282 | optimizedFunction = operationPutByIdStrictOptimize; | |
1283 | else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList) | |
1284 | optimizedFunction = operationPutByIdNonStrictOptimize; | |
1285 | else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList) | |
1286 | optimizedFunction = operationPutByIdDirectStrictOptimize; | |
1287 | else { | |
1288 | ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList); | |
1289 | optimizedFunction = operationPutByIdDirectNonStrictOptimize; | |
1290 | } | |
1291 | repatchBuffer.relink(stubInfo.callReturnLocation, optimizedFunction); | |
1292 | CodeLocationDataLabelPtr structureLabel = stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall); | |
1293 | if (MacroAssembler::canJumpReplacePatchableBranchPtrWithPatch()) { | |
1294 | repatchBuffer.revertJumpReplacementToPatchableBranchPtrWithPatch( | |
1295 | RepatchBuffer::startOfPatchableBranchPtrWithPatchOnAddress(structureLabel), | |
1296 | MacroAssembler::Address( | |
1297 | static_cast<MacroAssembler::RegisterID>(stubInfo.patch.dfg.baseGPR), | |
1298 | JSCell::structureOffset()), | |
1299 | reinterpret_cast<void*>(unusedPointer)); | |
1300 | } | |
1301 | repatchBuffer.repatch(structureLabel, reinterpret_cast<void*>(unusedPointer)); | |
1302 | #if USE(JSVALUE64) | |
1303 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), 0); | |
1304 | #else | |
1305 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), 0); | |
1306 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), 0); | |
1307 | #endif | |
1308 | repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase)); | |
1309 | } | |
1310 | ||
1311 | } } // namespace JSC::DFG | |
1312 | ||
1313 | #endif |