]>
Commit | Line | Data |
---|---|---|
1 | /* | |
2 | * Copyright (C) 2011 Apple Inc. All rights reserved. | |
3 | * | |
4 | * Redistribution and use in source and binary forms, with or without | |
5 | * modification, are permitted provided that the following conditions | |
6 | * are met: | |
7 | * 1. Redistributions of source code must retain the above copyright | |
8 | * notice, this list of conditions and the following disclaimer. | |
9 | * 2. Redistributions in binary form must reproduce the above copyright | |
10 | * notice, this list of conditions and the following disclaimer in the | |
11 | * documentation and/or other materials provided with the distribution. | |
12 | * | |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY | |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR | |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, | |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR | |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY | |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
24 | */ | |
25 | ||
26 | #include "config.h" | |
27 | #include "DFGRepatch.h" | |
28 | ||
29 | #if ENABLE(DFG_JIT) | |
30 | ||
31 | #include "DFGCCallHelpers.h" | |
32 | #include "DFGSpeculativeJIT.h" | |
33 | #include "LinkBuffer.h" | |
34 | #include "Operations.h" | |
35 | #include "PolymorphicPutByIdList.h" | |
36 | #include "RepatchBuffer.h" | |
37 | ||
38 | namespace JSC { namespace DFG { | |
39 | ||
40 | static void dfgRepatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction) | |
41 | { | |
42 | RepatchBuffer repatchBuffer(codeblock); | |
43 | repatchBuffer.relink(call, newCalleeFunction); | |
44 | } | |
45 | ||
46 | static void dfgRepatchByIdSelfAccess(CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure, size_t offset, const FunctionPtr &slowPathFunction, bool compact) | |
47 | { | |
48 | RepatchBuffer repatchBuffer(codeBlock); | |
49 | ||
50 | // Only optimize once! | |
51 | repatchBuffer.relink(stubInfo.callReturnLocation, slowPathFunction); | |
52 | ||
53 | // Patch the structure check & the offset of the load. | |
54 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(intptr_t)stubInfo.patch.dfg.deltaCheckImmToCall), structure); | |
55 | #if USE(JSVALUE64) | |
56 | if (compact) | |
57 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), sizeof(JSValue) * offset); | |
58 | else | |
59 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), sizeof(JSValue) * offset); | |
60 | #elif USE(JSVALUE32_64) | |
61 | if (compact) { | |
62 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)); | |
63 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)); | |
64 | } else { | |
65 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)); | |
66 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), sizeof(JSValue) * offset + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)); | |
67 | } | |
68 | #endif | |
69 | } | |
70 | ||
71 | static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases) | |
72 | { | |
73 | if (needToRestoreScratch) { | |
74 | stubJit.pop(scratchGPR); | |
75 | ||
76 | success = stubJit.jump(); | |
77 | ||
78 | // link failure cases here, so we can pop scratchGPR, and then jump back. | |
79 | failureCases.link(&stubJit); | |
80 | ||
81 | stubJit.pop(scratchGPR); | |
82 | ||
83 | fail = stubJit.jump(); | |
84 | return; | |
85 | } | |
86 | ||
87 | success = stubJit.jump(); | |
88 | } | |
89 | ||
90 | static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin) | |
91 | { | |
92 | patchBuffer.link(success, successLabel); | |
93 | ||
94 | if (needToRestoreScratch) { | |
95 | patchBuffer.link(fail, slowCaseBegin); | |
96 | return; | |
97 | } | |
98 | ||
99 | // link failure cases directly back to normal path | |
100 | patchBuffer.link(failureCases, slowCaseBegin); | |
101 | } | |
102 | ||
103 | static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases) | |
104 | { | |
105 | linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase)); | |
106 | } | |
107 | ||
108 | static void generateProtoChainAccessStub(ExecState* exec, StructureStubInfo& stubInfo, StructureChain* chain, size_t count, size_t offset, Structure* structure, CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, MacroAssemblerCodeRef& stubRoutine) | |
109 | { | |
110 | JSGlobalData* globalData = &exec->globalData(); | |
111 | ||
112 | MacroAssembler stubJit; | |
113 | ||
114 | GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR); | |
115 | #if USE(JSVALUE32_64) | |
116 | GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR); | |
117 | #endif | |
118 | GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR); | |
119 | GPRReg scratchGPR = static_cast<GPRReg>(stubInfo.patch.dfg.scratchGPR); | |
120 | bool needToRestoreScratch = false; | |
121 | ||
122 | if (scratchGPR == InvalidGPRReg) { | |
123 | scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR); | |
124 | stubJit.push(scratchGPR); | |
125 | needToRestoreScratch = true; | |
126 | } | |
127 | ||
128 | MacroAssembler::JumpList failureCases; | |
129 | ||
130 | failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure))); | |
131 | ||
132 | Structure* currStructure = structure; | |
133 | WriteBarrier<Structure>* it = chain->head(); | |
134 | JSObject* protoObject = 0; | |
135 | for (unsigned i = 0; i < count; ++i, ++it) { | |
136 | protoObject = asObject(currStructure->prototypeForLookup(exec)); | |
137 | stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR); | |
138 | failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(scratchGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(protoObject->structure()))); | |
139 | currStructure = it->get(); | |
140 | } | |
141 | ||
142 | stubJit.loadPtr(protoObject->addressOfPropertyStorage(), resultGPR); | |
143 | #if USE(JSVALUE64) | |
144 | stubJit.loadPtr(MacroAssembler::Address(resultGPR, offset * sizeof(WriteBarrier<Unknown>)), resultGPR); | |
145 | #elif USE(JSVALUE32_64) | |
146 | stubJit.load32(MacroAssembler::Address(resultGPR, offset * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR); | |
147 | stubJit.load32(MacroAssembler::Address(resultGPR, offset * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR); | |
148 | #endif | |
149 | ||
150 | MacroAssembler::Jump success, fail; | |
151 | ||
152 | emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases); | |
153 | ||
154 | LinkBuffer patchBuffer(*globalData, &stubJit, exec->codeBlock()); | |
155 | ||
156 | linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel); | |
157 | ||
158 | stubRoutine = patchBuffer.finalizeCode(); | |
159 | } | |
160 | ||
161 | static bool tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo) | |
162 | { | |
163 | // FIXME: Write a test that proves we need to check for recursion here just | |
164 | // like the interpreter does, then add a check for recursion. | |
165 | ||
166 | CodeBlock* codeBlock = exec->codeBlock(); | |
167 | JSGlobalData* globalData = &exec->globalData(); | |
168 | ||
169 | if (isJSArray(baseValue) && propertyName == exec->propertyNames().length) { | |
170 | GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR); | |
171 | #if USE(JSVALUE32_64) | |
172 | GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR); | |
173 | #endif | |
174 | GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR); | |
175 | GPRReg scratchGPR = static_cast<GPRReg>(stubInfo.patch.dfg.scratchGPR); | |
176 | bool needToRestoreScratch = false; | |
177 | ||
178 | MacroAssembler stubJit; | |
179 | ||
180 | if (scratchGPR == InvalidGPRReg) { | |
181 | scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, resultGPR); | |
182 | stubJit.push(scratchGPR); | |
183 | needToRestoreScratch = true; | |
184 | } | |
185 | ||
186 | MacroAssembler::JumpList failureCases; | |
187 | ||
188 | failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::classInfoOffset()), MacroAssembler::TrustedImmPtr(&JSArray::s_info))); | |
189 | ||
190 | stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSArray::storageOffset()), scratchGPR); | |
191 | stubJit.load32(MacroAssembler::Address(scratchGPR, OBJECT_OFFSETOF(ArrayStorage, m_length)), scratchGPR); | |
192 | failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0))); | |
193 | ||
194 | #if USE(JSVALUE64) | |
195 | stubJit.orPtr(GPRInfo::tagTypeNumberRegister, scratchGPR, resultGPR); | |
196 | #elif USE(JSVALUE32_64) | |
197 | stubJit.move(scratchGPR, resultGPR); | |
198 | stubJit.move(JITCompiler::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag | |
199 | #endif | |
200 | ||
201 | MacroAssembler::Jump success, fail; | |
202 | ||
203 | emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases); | |
204 | ||
205 | LinkBuffer patchBuffer(*globalData, &stubJit, codeBlock); | |
206 | ||
207 | linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases); | |
208 | ||
209 | stubInfo.stubRoutine = patchBuffer.finalizeCode(); | |
210 | ||
211 | RepatchBuffer repatchBuffer(codeBlock); | |
212 | repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubInfo.stubRoutine.code())); | |
213 | repatchBuffer.relink(stubInfo.callReturnLocation, operationGetById); | |
214 | ||
215 | return true; | |
216 | } | |
217 | ||
218 | // FIXME: should support length access for String. | |
219 | ||
220 | // FIXME: Cache property access for immediates. | |
221 | if (!baseValue.isCell()) | |
222 | return false; | |
223 | JSCell* baseCell = baseValue.asCell(); | |
224 | Structure* structure = baseCell->structure(); | |
225 | if (!slot.isCacheable()) | |
226 | return false; | |
227 | if (structure->isUncacheableDictionary() || structure->typeInfo().prohibitsPropertyCaching()) | |
228 | return false; | |
229 | ||
230 | // Optimize self access. | |
231 | if (slot.slotBase() == baseValue) { | |
232 | if ((slot.cachedPropertyType() != PropertySlot::Value) || ((slot.cachedOffset() * sizeof(JSValue)) > (unsigned)MacroAssembler::MaximumCompactPtrAlignedAddressOffset)) { | |
233 | dfgRepatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList); | |
234 | return true; | |
235 | } | |
236 | ||
237 | dfgRepatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), operationGetByIdBuildList, true); | |
238 | stubInfo.initGetByIdSelf(*globalData, codeBlock->ownerExecutable(), structure); | |
239 | return true; | |
240 | } | |
241 | ||
242 | if (structure->isDictionary()) | |
243 | return false; | |
244 | ||
245 | // FIXME: optimize getters and setters | |
246 | if (slot.cachedPropertyType() != PropertySlot::Value) | |
247 | return false; | |
248 | ||
249 | size_t offset = slot.cachedOffset(); | |
250 | size_t count = normalizePrototypeChain(exec, baseValue, slot.slotBase(), propertyName, offset); | |
251 | if (!count) | |
252 | return false; | |
253 | ||
254 | StructureChain* prototypeChain = structure->prototypeChain(exec); | |
255 | ||
256 | ASSERT(slot.slotBase().isObject()); | |
257 | ||
258 | generateProtoChainAccessStub(exec, stubInfo, prototypeChain, count, offset, structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase), stubInfo.stubRoutine); | |
259 | ||
260 | RepatchBuffer repatchBuffer(codeBlock); | |
261 | repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubInfo.stubRoutine.code())); | |
262 | repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdProtoBuildList); | |
263 | ||
264 | stubInfo.initGetByIdChain(*globalData, codeBlock->ownerExecutable(), structure, prototypeChain); | |
265 | return true; | |
266 | } | |
267 | ||
268 | void dfgRepatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo) | |
269 | { | |
270 | bool cached = tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo); | |
271 | if (!cached) | |
272 | dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById); | |
273 | } | |
274 | ||
275 | static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo) | |
276 | { | |
277 | if (!baseValue.isCell() | |
278 | || !slot.isCacheable() | |
279 | || baseValue.asCell()->structure()->isUncacheableDictionary() | |
280 | || slot.slotBase() != baseValue) | |
281 | return false; | |
282 | ||
283 | if (!stubInfo.patch.dfg.registersFlushed) { | |
284 | // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular, | |
285 | // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus, | |
286 | // if registers were not flushed, don't do non-Value caching. | |
287 | if (slot.cachedPropertyType() != PropertySlot::Value) | |
288 | return false; | |
289 | } | |
290 | ||
291 | CodeBlock* codeBlock = exec->codeBlock(); | |
292 | JSCell* baseCell = baseValue.asCell(); | |
293 | Structure* structure = baseCell->structure(); | |
294 | JSGlobalData* globalData = &exec->globalData(); | |
295 | ||
296 | ASSERT(slot.slotBase().isObject()); | |
297 | ||
298 | PolymorphicAccessStructureList* polymorphicStructureList; | |
299 | int listIndex; | |
300 | ||
301 | if (stubInfo.accessType == access_unset) { | |
302 | ASSERT(!stubInfo.stubRoutine); | |
303 | polymorphicStructureList = new PolymorphicAccessStructureList(); | |
304 | stubInfo.initGetByIdSelfList(polymorphicStructureList, 0); | |
305 | listIndex = 0; | |
306 | } else if (stubInfo.accessType == access_get_by_id_self) { | |
307 | ASSERT(!stubInfo.stubRoutine); | |
308 | polymorphicStructureList = new PolymorphicAccessStructureList(*globalData, codeBlock->ownerExecutable(), MacroAssemblerCodeRef::createSelfManagedCodeRef(stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase)), stubInfo.u.getByIdSelf.baseObjectStructure.get(), true); | |
309 | stubInfo.initGetByIdSelfList(polymorphicStructureList, 1); | |
310 | listIndex = 1; | |
311 | } else { | |
312 | polymorphicStructureList = stubInfo.u.getByIdSelfList.structureList; | |
313 | listIndex = stubInfo.u.getByIdSelfList.listSize; | |
314 | } | |
315 | ||
316 | if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) { | |
317 | stubInfo.u.getByIdSelfList.listSize++; | |
318 | ||
319 | GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR); | |
320 | #if USE(JSVALUE32_64) | |
321 | GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR); | |
322 | #endif | |
323 | GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR); | |
324 | GPRReg scratchGPR = static_cast<GPRReg>(stubInfo.patch.dfg.scratchGPR); | |
325 | ||
326 | CCallHelpers stubJit(globalData, codeBlock); | |
327 | ||
328 | MacroAssembler::Jump wrongStruct = stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(structure)); | |
329 | ||
330 | // The strategy we use for stubs is as follows: | |
331 | // 1) Call DFG helper that calls the getter. | |
332 | // 2) Check if there was an exception, and if there was, call yet another | |
333 | // helper. | |
334 | ||
335 | bool isDirect = false; | |
336 | MacroAssembler::Call operationCall; | |
337 | MacroAssembler::Call handlerCall; | |
338 | FunctionPtr operationFunction; | |
339 | MacroAssembler::Jump success; | |
340 | ||
341 | if (slot.cachedPropertyType() == PropertySlot::Getter | |
342 | || slot.cachedPropertyType() == PropertySlot::Custom) { | |
343 | if (slot.cachedPropertyType() == PropertySlot::Getter) { | |
344 | ASSERT(baseGPR != scratchGPR); | |
345 | stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfPropertyStorage()), scratchGPR); | |
346 | #if USE(JSVALUE64) | |
347 | stubJit.loadPtr(MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue)), scratchGPR); | |
348 | #elif USE(JSVALUE32_64) | |
349 | stubJit.load32(MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), scratchGPR); | |
350 | #endif | |
351 | stubJit.setupArgumentsWithExecState(baseGPR, scratchGPR); | |
352 | operationFunction = operationCallGetter; | |
353 | } else { | |
354 | stubJit.setupArgumentsWithExecState( | |
355 | baseGPR, | |
356 | MacroAssembler::TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()), | |
357 | MacroAssembler::TrustedImmPtr(const_cast<Identifier*>(&ident))); | |
358 | operationFunction = operationCallCustomGetter; | |
359 | } | |
360 | ||
361 | // Need to make sure that whenever this call is made in the future, we remember the | |
362 | // place that we made it from. It just so happens to be the place that we are at | |
363 | // right now! | |
364 | stubJit.store32( | |
365 | MacroAssembler::TrustedImm32(exec->codeOriginIndexForDFG()), | |
366 | CCallHelpers::tagFor(static_cast<VirtualRegister>(RegisterFile::ArgumentCount))); | |
367 | ||
368 | operationCall = stubJit.call(); | |
369 | #if USE(JSVALUE64) | |
370 | stubJit.move(GPRInfo::returnValueGPR, resultGPR); | |
371 | #else | |
372 | stubJit.setupResults(resultGPR, resultTagGPR); | |
373 | #endif | |
374 | success = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck); | |
375 | ||
376 | stubJit.setupArgumentsWithExecState( | |
377 | MacroAssembler::TrustedImmPtr(&stubInfo)); | |
378 | handlerCall = stubJit.call(); | |
379 | stubJit.jump(GPRInfo::returnValueGPR2); | |
380 | } else { | |
381 | stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfPropertyStorage()), resultGPR); | |
382 | #if USE(JSVALUE64) | |
383 | stubJit.loadPtr(MacroAssembler::Address(resultGPR, slot.cachedOffset() * sizeof(JSValue)), resultGPR); | |
384 | #elif USE(JSVALUE32_64) | |
385 | stubJit.load32(MacroAssembler::Address(resultGPR, slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)), resultTagGPR); | |
386 | stubJit.load32(MacroAssembler::Address(resultGPR, slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)), resultGPR); | |
387 | #endif | |
388 | success = stubJit.jump(); | |
389 | isDirect = true; | |
390 | } | |
391 | ||
392 | LinkBuffer patchBuffer(*globalData, &stubJit, codeBlock); | |
393 | ||
394 | CodeLocationLabel lastProtoBegin; | |
395 | if (listIndex) | |
396 | lastProtoBegin = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine.code()); | |
397 | else | |
398 | lastProtoBegin = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase); | |
399 | ASSERT(!!lastProtoBegin); | |
400 | ||
401 | patchBuffer.link(wrongStruct, lastProtoBegin); | |
402 | patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone)); | |
403 | if (!isDirect) { | |
404 | patchBuffer.link(operationCall, operationFunction); | |
405 | patchBuffer.link(handlerCall, lookupExceptionHandlerInStub); | |
406 | } | |
407 | ||
408 | MacroAssemblerCodeRef stubRoutine = patchBuffer.finalizeCode(); | |
409 | ||
410 | polymorphicStructureList->list[listIndex].set(*globalData, codeBlock->ownerExecutable(), stubRoutine, structure, isDirect); | |
411 | ||
412 | CodeLocationJump jumpLocation = stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck); | |
413 | RepatchBuffer repatchBuffer(codeBlock); | |
414 | repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code())); | |
415 | ||
416 | if (listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1)) | |
417 | return true; | |
418 | } | |
419 | ||
420 | return false; | |
421 | } | |
422 | ||
423 | void dfgBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo) | |
424 | { | |
425 | bool dontChangeCall = tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo); | |
426 | if (!dontChangeCall) | |
427 | dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById); | |
428 | } | |
429 | ||
430 | static bool tryBuildGetByIDProtoList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo) | |
431 | { | |
432 | if (!baseValue.isCell() | |
433 | || !slot.isCacheable() | |
434 | || baseValue.asCell()->structure()->isDictionary() | |
435 | || baseValue.asCell()->structure()->typeInfo().prohibitsPropertyCaching() | |
436 | || slot.slotBase() == baseValue | |
437 | || slot.cachedPropertyType() != PropertySlot::Value) | |
438 | return false; | |
439 | ||
440 | ASSERT(slot.slotBase().isObject()); | |
441 | ||
442 | size_t offset = slot.cachedOffset(); | |
443 | size_t count = normalizePrototypeChain(exec, baseValue, slot.slotBase(), propertyName, offset); | |
444 | if (!count) | |
445 | return false; | |
446 | ||
447 | Structure* structure = baseValue.asCell()->structure(); | |
448 | StructureChain* prototypeChain = structure->prototypeChain(exec); | |
449 | CodeBlock* codeBlock = exec->codeBlock(); | |
450 | JSGlobalData* globalData = &exec->globalData(); | |
451 | ||
452 | PolymorphicAccessStructureList* polymorphicStructureList; | |
453 | int listIndex = 1; | |
454 | ||
455 | if (stubInfo.accessType == access_get_by_id_chain) { | |
456 | ASSERT(!!stubInfo.stubRoutine); | |
457 | polymorphicStructureList = new PolymorphicAccessStructureList(*globalData, codeBlock->ownerExecutable(), stubInfo.stubRoutine, stubInfo.u.getByIdChain.baseObjectStructure.get(), stubInfo.u.getByIdChain.chain.get(), true); | |
458 | stubInfo.stubRoutine = MacroAssemblerCodeRef(); | |
459 | stubInfo.initGetByIdProtoList(polymorphicStructureList, 1); | |
460 | } else { | |
461 | ASSERT(stubInfo.accessType == access_get_by_id_proto_list); | |
462 | polymorphicStructureList = stubInfo.u.getByIdProtoList.structureList; | |
463 | listIndex = stubInfo.u.getByIdProtoList.listSize; | |
464 | } | |
465 | ||
466 | if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) { | |
467 | stubInfo.u.getByIdProtoList.listSize++; | |
468 | ||
469 | CodeLocationLabel lastProtoBegin = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine.code()); | |
470 | ASSERT(!!lastProtoBegin); | |
471 | ||
472 | MacroAssemblerCodeRef stubRoutine; | |
473 | ||
474 | generateProtoChainAccessStub(exec, stubInfo, prototypeChain, count, offset, structure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone), lastProtoBegin, stubRoutine); | |
475 | ||
476 | polymorphicStructureList->list[listIndex].set(*globalData, codeBlock->ownerExecutable(), stubRoutine, structure, true); | |
477 | ||
478 | CodeLocationJump jumpLocation = stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck); | |
479 | RepatchBuffer repatchBuffer(codeBlock); | |
480 | repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine.code())); | |
481 | ||
482 | if (listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1)) | |
483 | return true; | |
484 | } | |
485 | ||
486 | return false; | |
487 | } | |
488 | ||
489 | void dfgBuildGetByIDProtoList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo) | |
490 | { | |
491 | bool dontChangeCall = tryBuildGetByIDProtoList(exec, baseValue, propertyName, slot, stubInfo); | |
492 | if (!dontChangeCall) | |
493 | dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById); | |
494 | } | |
495 | ||
496 | static V_DFGOperation_EJCI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind) | |
497 | { | |
498 | if (slot.isStrictMode()) { | |
499 | if (putKind == Direct) | |
500 | return operationPutByIdDirectStrict; | |
501 | return operationPutByIdStrict; | |
502 | } | |
503 | if (putKind == Direct) | |
504 | return operationPutByIdDirectNonStrict; | |
505 | return operationPutByIdNonStrict; | |
506 | } | |
507 | ||
508 | static V_DFGOperation_EJCI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind) | |
509 | { | |
510 | if (slot.isStrictMode()) { | |
511 | if (putKind == Direct) | |
512 | return operationPutByIdDirectStrictBuildList; | |
513 | return operationPutByIdStrictBuildList; | |
514 | } | |
515 | if (putKind == Direct) | |
516 | return operationPutByIdDirectNonStrictBuildList; | |
517 | return operationPutByIdNonStrictBuildList; | |
518 | } | |
519 | ||
520 | static void testPrototype(MacroAssembler &stubJit, GPRReg scratchGPR, JSValue prototype, MacroAssembler::JumpList& failureCases) | |
521 | { | |
522 | if (prototype.isNull()) | |
523 | return; | |
524 | ||
525 | ASSERT(prototype.isCell()); | |
526 | ||
527 | stubJit.move(MacroAssembler::TrustedImmPtr(prototype.asCell()), scratchGPR); | |
528 | failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(scratchGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(prototype.asCell()->structure()))); | |
529 | } | |
530 | ||
531 | static void emitPutReplaceStub( | |
532 | ExecState* exec, | |
533 | JSValue, | |
534 | const Identifier&, | |
535 | const PutPropertySlot& slot, | |
536 | StructureStubInfo& stubInfo, | |
537 | PutKind, | |
538 | Structure* structure, | |
539 | CodeLocationLabel failureLabel, | |
540 | MacroAssemblerCodeRef& stubRoutine) | |
541 | { | |
542 | JSGlobalData* globalData = &exec->globalData(); | |
543 | GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR); | |
544 | #if USE(JSVALUE32_64) | |
545 | GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR); | |
546 | #endif | |
547 | GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR); | |
548 | GPRReg scratchGPR = static_cast<GPRReg>(stubInfo.patch.dfg.scratchGPR); | |
549 | bool needToRestoreScratch = false; | |
550 | #if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING) | |
551 | GPRReg scratchGPR2; | |
552 | const bool writeBarrierNeeded = true; | |
553 | #else | |
554 | const bool writeBarrierNeeded = false; | |
555 | #endif | |
556 | ||
557 | MacroAssembler stubJit; | |
558 | ||
559 | if (scratchGPR == InvalidGPRReg && (writeBarrierNeeded || !structure->isUsingInlineStorage())) { | |
560 | scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR); | |
561 | needToRestoreScratch = true; | |
562 | stubJit.push(scratchGPR); | |
563 | } | |
564 | ||
565 | MacroAssembler::Jump badStructure = stubJit.branchPtr( | |
566 | MacroAssembler::NotEqual, | |
567 | MacroAssembler::Address(baseGPR, JSCell::structureOffset()), | |
568 | MacroAssembler::TrustedImmPtr(structure)); | |
569 | ||
570 | #if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING) | |
571 | scratchGPR2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, scratchGPR); | |
572 | stubJit.push(scratchGPR2); | |
573 | SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR, scratchGPR2, WriteBarrierForPropertyAccess); | |
574 | stubJit.pop(scratchGPR2); | |
575 | #endif | |
576 | ||
577 | #if USE(JSVALUE64) | |
578 | if (structure->isUsingInlineStorage()) | |
579 | stubJit.storePtr(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue))); | |
580 | else { | |
581 | stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfPropertyStorage()), scratchGPR); | |
582 | stubJit.storePtr(valueGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue))); | |
583 | } | |
584 | #elif USE(JSVALUE32_64) | |
585 | if (structure->isUsingInlineStorage()) { | |
586 | stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload))); | |
587 | stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag))); | |
588 | } else { | |
589 | stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfPropertyStorage()), scratchGPR); | |
590 | stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload))); | |
591 | stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag))); | |
592 | } | |
593 | #endif | |
594 | ||
595 | MacroAssembler::Jump success; | |
596 | MacroAssembler::Jump failure; | |
597 | ||
598 | if (needToRestoreScratch) { | |
599 | stubJit.pop(scratchGPR); | |
600 | success = stubJit.jump(); | |
601 | ||
602 | badStructure.link(&stubJit); | |
603 | stubJit.pop(scratchGPR); | |
604 | failure = stubJit.jump(); | |
605 | } else { | |
606 | success = stubJit.jump(); | |
607 | failure = badStructure; | |
608 | } | |
609 | ||
610 | LinkBuffer patchBuffer(*globalData, &stubJit, exec->codeBlock()); | |
611 | patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone)); | |
612 | patchBuffer.link(failure, failureLabel); | |
613 | ||
614 | stubRoutine = patchBuffer.finalizeCode(); | |
615 | } | |
616 | ||
617 | static void emitPutTransitionStub( | |
618 | ExecState* exec, | |
619 | JSValue, | |
620 | const Identifier&, | |
621 | const PutPropertySlot& slot, | |
622 | StructureStubInfo& stubInfo, | |
623 | PutKind putKind, | |
624 | Structure* structure, | |
625 | Structure* oldStructure, | |
626 | StructureChain* prototypeChain, | |
627 | CodeLocationLabel failureLabel, | |
628 | MacroAssemblerCodeRef& stubRoutine) | |
629 | { | |
630 | JSGlobalData* globalData = &exec->globalData(); | |
631 | ||
632 | GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.dfg.baseGPR); | |
633 | #if USE(JSVALUE32_64) | |
634 | GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueTagGPR); | |
635 | #endif | |
636 | GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.dfg.valueGPR); | |
637 | GPRReg scratchGPR = static_cast<GPRReg>(stubInfo.patch.dfg.scratchGPR); | |
638 | bool needToRestoreScratch = false; | |
639 | ||
640 | ASSERT(scratchGPR != baseGPR); | |
641 | ||
642 | MacroAssembler stubJit; | |
643 | ||
644 | MacroAssembler::JumpList failureCases; | |
645 | ||
646 | if (scratchGPR == InvalidGPRReg) { | |
647 | scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR); | |
648 | stubJit.push(scratchGPR); | |
649 | needToRestoreScratch = true; | |
650 | } | |
651 | ||
652 | failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(oldStructure))); | |
653 | ||
654 | testPrototype(stubJit, scratchGPR, oldStructure->storedPrototype(), failureCases); | |
655 | ||
656 | if (putKind == NotDirect) { | |
657 | for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) | |
658 | testPrototype(stubJit, scratchGPR, (*it)->storedPrototype(), failureCases); | |
659 | } | |
660 | ||
661 | #if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING) | |
662 | // Must always emit this write barrier as the structure transition itself requires it | |
663 | GPRReg scratch2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, scratchGPR); | |
664 | stubJit.push(scratch2); | |
665 | SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR, scratch2, WriteBarrierForPropertyAccess); | |
666 | stubJit.pop(scratch2); | |
667 | #endif | |
668 | ||
669 | stubJit.storePtr(MacroAssembler::TrustedImmPtr(structure), MacroAssembler::Address(baseGPR, JSCell::structureOffset())); | |
670 | #if USE(JSVALUE64) | |
671 | if (structure->isUsingInlineStorage()) | |
672 | stubJit.storePtr(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue))); | |
673 | else { | |
674 | stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfPropertyStorage()), scratchGPR); | |
675 | stubJit.storePtr(valueGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue))); | |
676 | } | |
677 | #elif USE(JSVALUE32_64) | |
678 | if (structure->isUsingInlineStorage()) { | |
679 | stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload))); | |
680 | stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag))); | |
681 | } else { | |
682 | stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfPropertyStorage()), scratchGPR); | |
683 | stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload))); | |
684 | stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag))); | |
685 | } | |
686 | #endif | |
687 | ||
688 | MacroAssembler::Jump success; | |
689 | MacroAssembler::Jump failure; | |
690 | ||
691 | if (needToRestoreScratch) { | |
692 | stubJit.pop(scratchGPR); | |
693 | success = stubJit.jump(); | |
694 | ||
695 | failureCases.link(&stubJit); | |
696 | stubJit.pop(scratchGPR); | |
697 | failure = stubJit.jump(); | |
698 | } else | |
699 | success = stubJit.jump(); | |
700 | ||
701 | LinkBuffer patchBuffer(*globalData, &stubJit, exec->codeBlock()); | |
702 | patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToDone)); | |
703 | if (needToRestoreScratch) | |
704 | patchBuffer.link(failure, failureLabel); | |
705 | else | |
706 | patchBuffer.link(failureCases, failureLabel); | |
707 | ||
708 | stubRoutine = patchBuffer.finalizeCode(); | |
709 | } | |
710 | ||
711 | static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind) | |
712 | { | |
713 | CodeBlock* codeBlock = exec->codeBlock(); | |
714 | JSGlobalData* globalData = &exec->globalData(); | |
715 | ||
716 | if (!baseValue.isCell()) | |
717 | return false; | |
718 | JSCell* baseCell = baseValue.asCell(); | |
719 | Structure* structure = baseCell->structure(); | |
720 | Structure* oldStructure = structure->previousID(); | |
721 | ||
722 | if (!slot.isCacheable()) | |
723 | return false; | |
724 | if (structure->isUncacheableDictionary()) | |
725 | return false; | |
726 | ||
727 | // Optimize self access. | |
728 | if (slot.base() == baseValue) { | |
729 | if (slot.type() == PutPropertySlot::NewProperty) { | |
730 | if (structure->isDictionary()) | |
731 | return false; | |
732 | ||
733 | // skip optimizing the case where we need a realloc | |
734 | if (oldStructure->propertyStorageCapacity() != structure->propertyStorageCapacity()) | |
735 | return false; | |
736 | ||
737 | normalizePrototypeChain(exec, baseCell); | |
738 | ||
739 | StructureChain* prototypeChain = structure->prototypeChain(exec); | |
740 | ||
741 | emitPutTransitionStub( | |
742 | exec, baseValue, ident, slot, stubInfo, putKind, | |
743 | structure, oldStructure, prototypeChain, | |
744 | stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase), | |
745 | stubInfo.stubRoutine); | |
746 | ||
747 | RepatchBuffer repatchBuffer(codeBlock); | |
748 | repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubInfo.stubRoutine.code())); | |
749 | repatchBuffer.relink(stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind)); | |
750 | ||
751 | stubInfo.initPutByIdTransition(*globalData, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct); | |
752 | ||
753 | return true; | |
754 | } | |
755 | ||
756 | dfgRepatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false); | |
757 | stubInfo.initPutByIdReplace(*globalData, codeBlock->ownerExecutable(), structure); | |
758 | return true; | |
759 | } | |
760 | ||
761 | return false; | |
762 | } | |
763 | ||
764 | void dfgRepatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind) | |
765 | { | |
766 | bool cached = tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind); | |
767 | if (!cached) | |
768 | dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind)); | |
769 | } | |
770 | ||
771 | static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind) | |
772 | { | |
773 | CodeBlock* codeBlock = exec->codeBlock(); | |
774 | JSGlobalData* globalData = &exec->globalData(); | |
775 | ||
776 | if (!baseValue.isCell()) | |
777 | return false; | |
778 | JSCell* baseCell = baseValue.asCell(); | |
779 | Structure* structure = baseCell->structure(); | |
780 | Structure* oldStructure = structure->previousID(); | |
781 | ||
782 | if (!slot.isCacheable()) | |
783 | return false; | |
784 | if (structure->isUncacheableDictionary()) | |
785 | return false; | |
786 | ||
787 | // Optimize self access. | |
788 | if (slot.base() == baseValue) { | |
789 | PolymorphicPutByIdList* list; | |
790 | MacroAssemblerCodeRef stubRoutine; | |
791 | ||
792 | if (slot.type() == PutPropertySlot::NewProperty) { | |
793 | if (structure->isDictionary()) | |
794 | return false; | |
795 | ||
796 | // skip optimizing the case where we need a realloc | |
797 | if (oldStructure->propertyStorageCapacity() != structure->propertyStorageCapacity()) | |
798 | return false; | |
799 | ||
800 | normalizePrototypeChain(exec, baseCell); | |
801 | ||
802 | StructureChain* prototypeChain = structure->prototypeChain(exec); | |
803 | ||
804 | // We're now committed to creating the stub. Mogrify the meta-data accordingly. | |
805 | list = PolymorphicPutByIdList::from( | |
806 | putKind, stubInfo, | |
807 | stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase)); | |
808 | ||
809 | emitPutTransitionStub( | |
810 | exec, baseValue, propertyName, slot, stubInfo, putKind, | |
811 | structure, oldStructure, prototypeChain, | |
812 | CodeLocationLabel(list->currentSlowPathTarget()), | |
813 | stubRoutine); | |
814 | ||
815 | list->addAccess( | |
816 | PutByIdAccess::transition( | |
817 | *globalData, codeBlock->ownerExecutable(), | |
818 | oldStructure, structure, prototypeChain, | |
819 | stubRoutine)); | |
820 | } else { | |
821 | // We're now committed to creating the stub. Mogrify the meta-data accordingly. | |
822 | list = PolymorphicPutByIdList::from( | |
823 | putKind, stubInfo, | |
824 | stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase)); | |
825 | ||
826 | emitPutReplaceStub( | |
827 | exec, baseValue, propertyName, slot, stubInfo, putKind, | |
828 | structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine); | |
829 | ||
830 | list->addAccess( | |
831 | PutByIdAccess::replace( | |
832 | *globalData, codeBlock->ownerExecutable(), | |
833 | structure, stubRoutine)); | |
834 | } | |
835 | ||
836 | RepatchBuffer repatchBuffer(codeBlock); | |
837 | repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), CodeLocationLabel(stubRoutine.code())); | |
838 | ||
839 | if (list->isFull()) | |
840 | repatchBuffer.relink(stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind)); | |
841 | ||
842 | return true; | |
843 | } | |
844 | ||
845 | return false; | |
846 | } | |
847 | ||
848 | void dfgBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind) | |
849 | { | |
850 | bool cached = tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind); | |
851 | if (!cached) | |
852 | dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind)); | |
853 | } | |
854 | ||
855 | void dfgLinkFor(ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind) | |
856 | { | |
857 | CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock(); | |
858 | ||
859 | RepatchBuffer repatchBuffer(callerCodeBlock); | |
860 | ||
861 | ASSERT(!callLinkInfo.isLinked()); | |
862 | callLinkInfo.callee.set(exec->callerFrame()->globalData(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee); | |
863 | callLinkInfo.lastSeenCallee.set(exec->callerFrame()->globalData(), callerCodeBlock->ownerExecutable(), callee); | |
864 | repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr); | |
865 | ||
866 | if (calleeCodeBlock) | |
867 | calleeCodeBlock->linkIncomingCall(&callLinkInfo); | |
868 | ||
869 | if (kind == CodeForCall) { | |
870 | repatchBuffer.relink(CodeLocationCall(callLinkInfo.callReturnLocation), operationVirtualCall); | |
871 | return; | |
872 | } | |
873 | ASSERT(kind == CodeForConstruct); | |
874 | repatchBuffer.relink(CodeLocationCall(callLinkInfo.callReturnLocation), operationVirtualConstruct); | |
875 | } | |
876 | ||
877 | void dfgResetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo) | |
878 | { | |
879 | repatchBuffer.relink(stubInfo.callReturnLocation, operationGetByIdOptimize); | |
880 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(uintptr_t)stubInfo.patch.dfg.deltaCheckImmToCall), reinterpret_cast<void*>(-1)); | |
881 | #if USE(JSVALUE64) | |
882 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), 0); | |
883 | #else | |
884 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), 0); | |
885 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), 0); | |
886 | #endif | |
887 | repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase)); | |
888 | } | |
889 | ||
890 | void dfgResetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo) | |
891 | { | |
892 | V_DFGOperation_EJCI unoptimizedFunction = bitwise_cast<V_DFGOperation_EJCI>(MacroAssembler::readCallTarget(stubInfo.callReturnLocation).executableAddress()); | |
893 | V_DFGOperation_EJCI optimizedFunction; | |
894 | if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList) | |
895 | optimizedFunction = operationPutByIdStrictOptimize; | |
896 | else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList) | |
897 | optimizedFunction = operationPutByIdNonStrictOptimize; | |
898 | else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList) | |
899 | optimizedFunction = operationPutByIdDirectStrictOptimize; | |
900 | else { | |
901 | ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList); | |
902 | optimizedFunction = operationPutByIdDirectNonStrictOptimize; | |
903 | } | |
904 | repatchBuffer.relink(stubInfo.callReturnLocation, optimizedFunction); | |
905 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelPtrAtOffset(-(uintptr_t)stubInfo.patch.dfg.deltaCheckImmToCall), reinterpret_cast<void*>(-1)); | |
906 | #if USE(JSVALUE64) | |
907 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToLoadOrStore), 0); | |
908 | #else | |
909 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToTagLoadOrStore), 0); | |
910 | repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.dfg.deltaCallToPayloadLoadOrStore), 0); | |
911 | #endif | |
912 | repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.dfg.deltaCallToStructCheck), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.dfg.deltaCallToSlowCase)); | |
913 | } | |
914 | ||
915 | } } // namespace JSC::DFG | |
916 | ||
917 | #endif |