]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JITPropertyAccess.cpp
JavaScriptCore-521.tar.gz
[apple/javascriptcore.git] / jit / JITPropertyAccess.cpp
1 /*
2 * Copyright (C) 2008 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #include "config.h"
27 #include "JIT.h"
28
29 #if ENABLE(JIT)
30
31 #include "CodeBlock.h"
32 #include "JITInlineMethods.h"
33 #include "JSArray.h"
34 #include "JSFunction.h"
35 #include "Interpreter.h"
36 #include "ResultType.h"
37 #include "SamplingTool.h"
38
39 #ifndef NDEBUG
40 #include <stdio.h>
41 #endif
42
43 using namespace std;
44
45 namespace JSC {
46
47 #if !ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
48
49 void JIT::compileGetByIdHotPath(int resultVReg, int baseVReg, Identifier* ident, unsigned)
50 {
51 // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
52 // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
53 // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
54 // to jump back to if one of these trampolies finds a match.
55
56 emitGetVirtualRegister(baseVReg, X86::eax);
57
58 emitPutJITStubArg(X86::eax, 1);
59 emitPutJITStubArgConstant(ident, 2);
60 emitCTICall(Interpreter::cti_op_get_by_id_generic);
61 emitPutVirtualRegister(resultVReg);
62 }
63
64
65 void JIT::compileGetByIdSlowCase(int, int, Identifier*, Vector<SlowCaseEntry>::iterator&, unsigned)
66 {
67 ASSERT_NOT_REACHED();
68 }
69
70 void JIT::compilePutByIdHotPath(int baseVReg, Identifier* ident, int valueVReg, unsigned)
71 {
72 // In order to be able to patch both the Structure, and the object offset, we store one pointer,
73 // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
74 // such that the Structure & offset are always at the same distance from this.
75
76 emitGetVirtualRegisters(baseVReg, X86::eax, valueVReg, X86::edx);
77
78 emitPutJITStubArgConstant(ident, 2);
79 emitPutJITStubArg(X86::eax, 1);
80 emitPutJITStubArg(X86::edx, 3);
81 emitCTICall(Interpreter::cti_op_put_by_id_generic);
82 }
83
84 void JIT::compilePutByIdSlowCase(int, Identifier*, int, Vector<SlowCaseEntry>::iterator&, unsigned)
85 {
86 ASSERT_NOT_REACHED();
87 }
88
89 #else
90
91 void JIT::compileGetByIdHotPath(int resultVReg, int baseVReg, Identifier*, unsigned propertyAccessInstructionIndex)
92 {
93 // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
94 // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
95 // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
96 // to jump back to if one of these trampolies finds a match.
97
98 emitGetVirtualRegister(baseVReg, X86::eax);
99
100 emitJumpSlowCaseIfNotJSCell(X86::eax, baseVReg);
101
102 Label hotPathBegin(this);
103 m_propertyAccessCompilationInfo[propertyAccessInstructionIndex].hotPathBegin = hotPathBegin;
104
105 DataLabelPtr structureToCompare;
106 Jump structureCheck = jnePtrWithPatch(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), structureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
107 addSlowCase(structureCheck);
108 ASSERT(differenceBetween(hotPathBegin, structureToCompare) == patchOffsetGetByIdStructure);
109 ASSERT(differenceBetween(hotPathBegin, structureCheck) == patchOffsetGetByIdBranchToSlowCase);
110
111 loadPtr(Address(X86::eax, FIELD_OFFSET(JSObject, m_propertyStorage)), X86::eax);
112 DataLabel32 displacementLabel = loadPtrWithAddressOffsetPatch(Address(X86::eax, patchGetByIdDefaultOffset), X86::eax);
113 ASSERT(differenceBetween(hotPathBegin, displacementLabel) == patchOffsetGetByIdPropertyMapOffset);
114
115 Label putResult(this);
116 ASSERT(differenceBetween(hotPathBegin, putResult) == patchOffsetGetByIdPutResult);
117 emitPutVirtualRegister(resultVReg);
118 }
119
120
121 void JIT::compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, unsigned propertyAccessInstructionIndex)
122 {
123 // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
124 // so that we only need track one pointer into the slow case code - we track a pointer to the location
125 // of the call (which we can use to look up the patch information), but should a array-length or
126 // prototype access trampoline fail we want to bail out back to here. To do so we can subtract back
127 // the distance from the call to the head of the slow case.
128
129 linkSlowCaseIfNotJSCell(iter, baseVReg);
130 linkSlowCase(iter);
131
132 #ifndef NDEBUG
133 Label coldPathBegin(this);
134 #endif
135 emitPutJITStubArg(X86::eax, 1);
136 emitPutJITStubArgConstant(ident, 2);
137 Jump call = emitCTICall(Interpreter::cti_op_get_by_id);
138 emitPutVirtualRegister(resultVReg);
139
140 ASSERT(differenceBetween(coldPathBegin, call) == patchOffsetGetByIdSlowCaseCall);
141
142 // Track the location of the call; this will be used to recover patch information.
143 m_propertyAccessCompilationInfo[propertyAccessInstructionIndex].callReturnLocation = call;
144 }
145
146 void JIT::compilePutByIdHotPath(int baseVReg, Identifier*, int valueVReg, unsigned propertyAccessInstructionIndex)
147 {
148 // In order to be able to patch both the Structure, and the object offset, we store one pointer,
149 // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
150 // such that the Structure & offset are always at the same distance from this.
151
152 emitGetVirtualRegisters(baseVReg, X86::eax, valueVReg, X86::edx);
153
154 // Jump to a slow case if either the base object is an immediate, or if the Structure does not match.
155 emitJumpSlowCaseIfNotJSCell(X86::eax, baseVReg);
156
157 Label hotPathBegin(this);
158 m_propertyAccessCompilationInfo[propertyAccessInstructionIndex].hotPathBegin = hotPathBegin;
159
160 // It is important that the following instruction plants a 32bit immediate, in order that it can be patched over.
161 DataLabelPtr structureToCompare;
162 addSlowCase(jnePtrWithPatch(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), structureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))));
163 ASSERT(differenceBetween(hotPathBegin, structureToCompare) == patchOffsetPutByIdStructure);
164
165 // Plant a load from a bogus ofset in the object's property map; we will patch this later, if it is to be used.
166 loadPtr(Address(X86::eax, FIELD_OFFSET(JSObject, m_propertyStorage)), X86::eax);
167 DataLabel32 displacementLabel = storePtrWithAddressOffsetPatch(X86::edx, Address(X86::eax, patchGetByIdDefaultOffset));
168 ASSERT(differenceBetween(hotPathBegin, displacementLabel) == patchOffsetPutByIdPropertyMapOffset);
169 }
170
171 void JIT::compilePutByIdSlowCase(int baseVReg, Identifier* ident, int, Vector<SlowCaseEntry>::iterator& iter, unsigned propertyAccessInstructionIndex)
172 {
173 linkSlowCaseIfNotJSCell(iter, baseVReg);
174 linkSlowCase(iter);
175
176 emitPutJITStubArgConstant(ident, 2);
177 emitPutJITStubArg(X86::eax, 1);
178 emitPutJITStubArg(X86::edx, 3);
179 Jump call = emitCTICall(Interpreter::cti_op_put_by_id);
180
181 // Track the location of the call; this will be used to recover patch information.
182 m_propertyAccessCompilationInfo[propertyAccessInstructionIndex].callReturnLocation = call;
183 }
184
185 static JSObject* resizePropertyStorage(JSObject* baseObject, int32_t oldSize, int32_t newSize)
186 {
187 baseObject->allocatePropertyStorage(oldSize, newSize);
188 return baseObject;
189 }
190
191 static inline bool transitionWillNeedStorageRealloc(Structure* oldStructure, Structure* newStructure)
192 {
193 return oldStructure->propertyStorageCapacity() != newStructure->propertyStorageCapacity();
194 }
195
196 void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, void* returnAddress)
197 {
198 JumpList failureCases;
199 // Check eax is an object of the right Structure.
200 failureCases.append(emitJumpIfNotJSCell(X86::eax));
201 failureCases.append(jnePtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), ImmPtr(oldStructure)));
202 JumpList successCases;
203
204 // ecx = baseObject
205 loadPtr(Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
206 // proto(ecx) = baseObject->structure()->prototype()
207 failureCases.append(jne32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo) + FIELD_OFFSET(TypeInfo, m_type)), Imm32(ObjectType)));
208
209 loadPtr(Address(X86::ecx, FIELD_OFFSET(Structure, m_prototype)), X86::ecx);
210
211 // ecx = baseObject->m_structure
212 for (RefPtr<Structure>* it = chain->head(); *it; ++it) {
213 // null check the prototype
214 successCases.append(jePtr(X86::ecx, ImmPtr(JSValuePtr::encode(jsNull()))));
215
216 // Check the structure id
217 failureCases.append(jnePtr(Address(X86::ecx, FIELD_OFFSET(JSCell, m_structure)), ImmPtr(it->get())));
218
219 loadPtr(Address(X86::ecx, FIELD_OFFSET(JSCell, m_structure)), X86::ecx);
220 failureCases.append(jne32(Address(X86::ecx, FIELD_OFFSET(Structure, m_typeInfo) + FIELD_OFFSET(TypeInfo, m_type)), Imm32(ObjectType)));
221 loadPtr(Address(X86::ecx, FIELD_OFFSET(Structure, m_prototype)), X86::ecx);
222 }
223
224 successCases.link(this);
225
226 Jump callTarget;
227
228 // emit a call only if storage realloc is needed
229 if (transitionWillNeedStorageRealloc(oldStructure, newStructure)) {
230 pop(X86::ebx);
231 #if PLATFORM(X86_64)
232 move(Imm32(newStructure->propertyStorageCapacity()), X86::edx);
233 move(Imm32(oldStructure->propertyStorageCapacity()), X86::esi);
234 move(X86::eax, X86::edi);
235 callTarget = call();
236 #else
237 push(Imm32(newStructure->propertyStorageCapacity()));
238 push(Imm32(oldStructure->propertyStorageCapacity()));
239 push(X86::eax);
240 callTarget = call();
241 addPtr(Imm32(3 * sizeof(void*)), X86::esp);
242 #endif
243 emitGetJITStubArg(3, X86::edx);
244 push(X86::ebx);
245 }
246
247 // Assumes m_refCount can be decremented easily, refcount decrement is safe as
248 // codeblock should ensure oldStructure->m_refCount > 0
249 sub32(Imm32(1), AbsoluteAddress(oldStructure->addressOfCount()));
250 add32(Imm32(1), AbsoluteAddress(newStructure->addressOfCount()));
251 storePtr(ImmPtr(newStructure), Address(X86::eax, FIELD_OFFSET(JSCell, m_structure)));
252
253 // write the value
254 loadPtr(Address(X86::eax, FIELD_OFFSET(JSObject, m_propertyStorage)), X86::eax);
255 storePtr(X86::edx, Address(X86::eax, cachedOffset * sizeof(JSValuePtr)));
256
257 ret();
258
259 Jump failureJump;
260 bool plantedFailureJump = false;
261 if (!failureCases.empty()) {
262 failureCases.link(this);
263 restoreArgumentReferenceForTrampoline();
264 failureJump = jump();
265 plantedFailureJump = true;
266 }
267
268 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
269 PatchBuffer patchBuffer(code);
270
271 if (plantedFailureJump)
272 patchBuffer.link(failureJump, reinterpret_cast<void*>(Interpreter::cti_op_put_by_id_fail));
273
274 if (transitionWillNeedStorageRealloc(oldStructure, newStructure))
275 patchBuffer.link(callTarget, reinterpret_cast<void*>(resizePropertyStorage));
276
277 stubInfo->stubRoutine = code;
278
279 Jump::patch(returnAddress, code);
280 }
281
282 void JIT::patchGetByIdSelf(StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, void* returnAddress)
283 {
284 // We don't want to patch more than once - in future go to cti_op_get_by_id_generic.
285 // Should probably go to Interpreter::cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
286 Jump::patch(returnAddress, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_self_fail));
287
288 // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
289 void* structureAddress = reinterpret_cast<void*>(reinterpret_cast<intptr_t>(stubInfo->hotPathBegin) + patchOffsetGetByIdStructure);
290 void* displacementAddress = reinterpret_cast<void*>(reinterpret_cast<intptr_t>(stubInfo->hotPathBegin) + patchOffsetGetByIdPropertyMapOffset);
291 DataLabelPtr::patch(structureAddress, structure);
292 DataLabel32::patch(displacementAddress, cachedOffset * sizeof(JSValuePtr));
293 }
294
295 void JIT::patchPutByIdReplace(StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, void* returnAddress)
296 {
297 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
298 // Should probably go to Interpreter::cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
299 Jump::patch(returnAddress, reinterpret_cast<void*>(Interpreter::cti_op_put_by_id_generic));
300
301 // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
302 void* structureAddress = reinterpret_cast<char*>(stubInfo->hotPathBegin) + patchOffsetPutByIdStructure;
303 void* displacementAddress = reinterpret_cast<char*>(stubInfo->hotPathBegin) + patchOffsetPutByIdPropertyMapOffset;
304 DataLabelPtr::patch(structureAddress, structure);
305 DataLabel32::patch(displacementAddress, cachedOffset * sizeof(JSValuePtr));
306 }
307
308 void JIT::privateCompilePatchGetArrayLength(void* returnAddress)
309 {
310 StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress);
311
312 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
313 Jump::patch(returnAddress, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_array_fail));
314
315 // Check eax is an array
316 Jump failureCases1 = jnePtr(Address(X86::eax), ImmPtr(m_interpreter->m_jsArrayVptr));
317
318 // Checks out okay! - get the length from the storage
319 loadPtr(Address(X86::eax, FIELD_OFFSET(JSArray, m_storage)), X86::ecx);
320 load32(Address(X86::ecx, FIELD_OFFSET(ArrayStorage, m_length)), X86::ecx);
321
322 Jump failureCases2 = ja32(X86::ecx, Imm32(JSImmediate::maxImmediateInt));
323
324 emitFastArithIntToImmNoCheck(X86::ecx, X86::eax);
325 Jump success = jump();
326
327 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
328 PatchBuffer patchBuffer(code);
329
330 // Use the patch information to link the failure cases back to the original slow case routine.
331 void* slowCaseBegin = reinterpret_cast<char*>(stubInfo->callReturnLocation) - patchOffsetGetByIdSlowCaseCall;
332 patchBuffer.link(failureCases1, slowCaseBegin);
333 patchBuffer.link(failureCases2, slowCaseBegin);
334
335 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
336 void* hotPathPutResult = reinterpret_cast<char*>(stubInfo->hotPathBegin) + patchOffsetGetByIdPutResult;
337 patchBuffer.link(success, hotPathPutResult);
338
339 // Track the stub we have created so that it will be deleted later.
340 stubInfo->stubRoutine = code;
341
342 // Finally patch the jump to sow case back in the hot path to jump here instead.
343 void* jumpLocation = reinterpret_cast<char*>(stubInfo->hotPathBegin) + patchOffsetGetByIdBranchToSlowCase;
344 Jump::patch(jumpLocation, code);
345 }
346
347 void JIT::privateCompileGetByIdSelf(StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, void* returnAddress)
348 {
349 // Check eax is an object of the right Structure.
350 Jump failureCases1 = emitJumpIfNotJSCell(X86::eax);
351 Jump failureCases2 = checkStructure(X86::eax, structure);
352
353 // Checks out okay! - getDirectOffset
354 loadPtr(Address(X86::eax, FIELD_OFFSET(JSObject, m_propertyStorage)), X86::eax);
355 loadPtr(Address(X86::eax, cachedOffset * sizeof(JSValuePtr)), X86::eax);
356 ret();
357
358 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
359 PatchBuffer patchBuffer(code);
360
361 patchBuffer.link(failureCases1, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_self_fail));
362 patchBuffer.link(failureCases2, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_self_fail));
363
364 stubInfo->stubRoutine = code;
365
366 Jump::patch(returnAddress, code);
367 }
368
369 void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, size_t cachedOffset, void* returnAddress, CallFrame* callFrame)
370 {
371 #if USE(CTI_REPATCH_PIC)
372 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
373 Jump::patch(returnAddress, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_proto_list));
374
375 // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
376 // referencing the prototype object - let's speculatively load it's table nice and early!)
377 JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
378 PropertyStorage* protoPropertyStorage = &protoObject->m_propertyStorage;
379 loadPtr(static_cast<void*>(protoPropertyStorage), X86::edx);
380
381 // Check eax is an object of the right Structure.
382 Jump failureCases1 = checkStructure(X86::eax, structure);
383
384 // Check the prototype object's Structure had not changed.
385 Structure** prototypeStructureAddress = &(protoObject->m_structure);
386 #if PLATFORM(X86_64)
387 move(ImmPtr(prototypeStructure), X86::ebx);
388 Jump failureCases2 = jnePtr(X86::ebx, AbsoluteAddress(prototypeStructureAddress));
389 #else
390 Jump failureCases2 = jnePtr(AbsoluteAddress(prototypeStructureAddress), ImmPtr(prototypeStructure));
391 #endif
392
393 // Checks out okay! - getDirectOffset
394 loadPtr(Address(X86::edx, cachedOffset * sizeof(JSValuePtr)), X86::eax);
395
396 Jump success = jump();
397
398 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
399 PatchBuffer patchBuffer(code);
400
401 // Use the patch information to link the failure cases back to the original slow case routine.
402 void* slowCaseBegin = reinterpret_cast<char*>(stubInfo->callReturnLocation) - patchOffsetGetByIdSlowCaseCall;
403 patchBuffer.link(failureCases1, slowCaseBegin);
404 patchBuffer.link(failureCases2, slowCaseBegin);
405
406 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
407 intptr_t successDest = reinterpret_cast<intptr_t>(stubInfo->hotPathBegin) + patchOffsetGetByIdPutResult;
408 patchBuffer.link(success, reinterpret_cast<void*>(successDest));
409
410 // Track the stub we have created so that it will be deleted later.
411 stubInfo->stubRoutine = code;
412
413 // Finally patch the jump to slow case back in the hot path to jump here instead.
414 void* jumpLocation = reinterpret_cast<char*>(stubInfo->hotPathBegin) + patchOffsetGetByIdBranchToSlowCase;
415 Jump::patch(jumpLocation, code);
416 #else
417 // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
418 // referencing the prototype object - let's speculatively load it's table nice and early!)
419 JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
420 PropertyStorage* protoPropertyStorage = &protoObject->m_propertyStorage;
421 loadPtr(protoPropertyStorage, X86::edx);
422
423 // Check eax is an object of the right Structure.
424 Jump failureCases1 = emitJumpIfNotJSCell(X86::eax);
425 Jump failureCases2 = checkStructure(X86::eax, structure);
426
427 // Check the prototype object's Structure had not changed.
428 Structure** prototypeStructureAddress = &(protoObject->m_structure);
429 Jump failureCases3 = jnePtr(AbsoluteAddress(prototypeStructureAddress), ImmPtr(prototypeStructure));
430
431 // Checks out okay! - getDirectOffset
432 loadPtr(Address(X86::edx, cachedOffset * sizeof(JSValuePtr)), X86::eax);
433
434 ret();
435
436 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
437 PatchBuffer patchBuffer(code);
438
439 patchBuffer.link(failureCases1, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_proto_fail));
440 patchBuffer.link(failureCases2, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_proto_fail));
441 patchBuffer.link(failureCases3, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_proto_fail));
442
443 stubInfo->stubRoutine = code;
444
445 Jump::patch(returnAddress, code);
446 #endif
447 }
448
449 #if USE(CTI_REPATCH_PIC)
450 void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, size_t cachedOffset)
451 {
452 Jump failureCase = checkStructure(X86::eax, structure);
453 loadPtr(Address(X86::eax, FIELD_OFFSET(JSObject, m_propertyStorage)), X86::eax);
454 loadPtr(Address(X86::eax, cachedOffset * sizeof(JSValuePtr)), X86::eax);
455 Jump success = jump();
456
457 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
458 ASSERT(code);
459 PatchBuffer patchBuffer(code);
460
461 // Use the patch information to link the failure cases back to the original slow case routine.
462 void* lastProtoBegin = polymorphicStructures->list[currentIndex - 1].stubRoutine;
463 if (!lastProtoBegin)
464 lastProtoBegin = reinterpret_cast<char*>(stubInfo->callReturnLocation) - patchOffsetGetByIdSlowCaseCall;
465
466 patchBuffer.link(failureCase, lastProtoBegin);
467
468 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
469 intptr_t successDest = reinterpret_cast<intptr_t>(stubInfo->hotPathBegin) + patchOffsetGetByIdPutResult;
470 patchBuffer.link(success, reinterpret_cast<void*>(successDest));
471
472 structure->ref();
473 polymorphicStructures->list[currentIndex].set(code, structure);
474
475 // Finally patch the jump to slow case back in the hot path to jump here instead.
476 void* jumpLocation = reinterpret_cast<char*>(stubInfo->hotPathBegin) + patchOffsetGetByIdBranchToSlowCase;
477 Jump::patch(jumpLocation, code);
478 }
479
480 void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, size_t cachedOffset, CallFrame* callFrame)
481 {
482 // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
483 // referencing the prototype object - let's speculatively load it's table nice and early!)
484 JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
485 PropertyStorage* protoPropertyStorage = &protoObject->m_propertyStorage;
486 loadPtr(protoPropertyStorage, X86::edx);
487
488 // Check eax is an object of the right Structure.
489 Jump failureCases1 = checkStructure(X86::eax, structure);
490
491 // Check the prototype object's Structure had not changed.
492 Structure** prototypeStructureAddress = &(protoObject->m_structure);
493 #if PLATFORM(X86_64)
494 move(ImmPtr(prototypeStructure), X86::ebx);
495 Jump failureCases2 = jnePtr(X86::ebx, AbsoluteAddress(prototypeStructureAddress));
496 #else
497 Jump failureCases2 = jnePtr(AbsoluteAddress(prototypeStructureAddress), ImmPtr(prototypeStructure));
498 #endif
499
500 // Checks out okay! - getDirectOffset
501 loadPtr(Address(X86::edx, cachedOffset * sizeof(JSValuePtr)), X86::eax);
502
503 Jump success = jump();
504
505 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
506 PatchBuffer patchBuffer(code);
507
508 // Use the patch information to link the failure cases back to the original slow case routine.
509 void* lastProtoBegin = prototypeStructures->list[currentIndex - 1].stubRoutine;
510 patchBuffer.link(failureCases1, lastProtoBegin);
511 patchBuffer.link(failureCases2, lastProtoBegin);
512
513 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
514 intptr_t successDest = reinterpret_cast<intptr_t>(stubInfo->hotPathBegin) + patchOffsetGetByIdPutResult;
515 patchBuffer.link(success, reinterpret_cast<void*>(successDest));
516
517 structure->ref();
518 prototypeStructure->ref();
519 prototypeStructures->list[currentIndex].set(code, structure, prototypeStructure);
520
521 // Finally patch the jump to slow case back in the hot path to jump here instead.
522 void* jumpLocation = reinterpret_cast<char*>(stubInfo->hotPathBegin) + patchOffsetGetByIdBranchToSlowCase;
523 Jump::patch(jumpLocation, code);
524 }
525
526 void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset, CallFrame* callFrame)
527 {
528 ASSERT(count);
529
530 JumpList bucketsOfFail;
531
532 // Check eax is an object of the right Structure.
533 Jump baseObjectCheck = checkStructure(X86::eax, structure);
534 bucketsOfFail.append(baseObjectCheck);
535
536 Structure* currStructure = structure;
537 RefPtr<Structure>* chainEntries = chain->head();
538 JSObject* protoObject = 0;
539 for (unsigned i = 0; i < count; ++i) {
540 protoObject = asObject(currStructure->prototypeForLookup(callFrame));
541 currStructure = chainEntries[i].get();
542
543 // Check the prototype object's Structure had not changed.
544 Structure** prototypeStructureAddress = &(protoObject->m_structure);
545 #if PLATFORM(X86_64)
546 move(ImmPtr(currStructure), X86::ebx);
547 bucketsOfFail.append(jnePtr(X86::ebx, AbsoluteAddress(prototypeStructureAddress)));
548 #else
549 bucketsOfFail.append(jnePtr(AbsoluteAddress(prototypeStructureAddress), ImmPtr(currStructure)));
550 #endif
551 }
552 ASSERT(protoObject);
553
554 PropertyStorage* protoPropertyStorage = &protoObject->m_propertyStorage;
555 loadPtr(protoPropertyStorage, X86::edx);
556 loadPtr(Address(X86::edx, cachedOffset * sizeof(JSValuePtr)), X86::eax);
557 Jump success = jump();
558
559 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
560 PatchBuffer patchBuffer(code);
561
562 // Use the patch information to link the failure cases back to the original slow case routine.
563 void* lastProtoBegin = prototypeStructures->list[currentIndex - 1].stubRoutine;
564
565 patchBuffer.link(bucketsOfFail, lastProtoBegin);
566
567 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
568 intptr_t successDest = reinterpret_cast<intptr_t>(stubInfo->hotPathBegin) + patchOffsetGetByIdPutResult;
569 patchBuffer.link(success, reinterpret_cast<void*>(successDest));
570
571 // Track the stub we have created so that it will be deleted later.
572 structure->ref();
573 chain->ref();
574 prototypeStructures->list[currentIndex].set(code, structure, chain);
575
576 // Finally patch the jump to slow case back in the hot path to jump here instead.
577 void* jumpLocation = reinterpret_cast<char*>(stubInfo->hotPathBegin) + patchOffsetGetByIdBranchToSlowCase;
578 Jump::patch(jumpLocation, code);
579 }
580 #endif
581
582 void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset, void* returnAddress, CallFrame* callFrame)
583 {
584 #if USE(CTI_REPATCH_PIC)
585 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
586 Jump::patch(returnAddress, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_proto_list));
587
588 ASSERT(count);
589
590 JumpList bucketsOfFail;
591
592 // Check eax is an object of the right Structure.
593 bucketsOfFail.append(checkStructure(X86::eax, structure));
594
595 Structure* currStructure = structure;
596 RefPtr<Structure>* chainEntries = chain->head();
597 JSObject* protoObject = 0;
598 for (unsigned i = 0; i < count; ++i) {
599 protoObject = asObject(currStructure->prototypeForLookup(callFrame));
600 currStructure = chainEntries[i].get();
601
602 // Check the prototype object's Structure had not changed.
603 Structure** prototypeStructureAddress = &(protoObject->m_structure);
604 #if PLATFORM(X86_64)
605 move(ImmPtr(currStructure), X86::ebx);
606 bucketsOfFail.append(jnePtr(X86::ebx, AbsoluteAddress(prototypeStructureAddress)));
607 #else
608 bucketsOfFail.append(jnePtr(AbsoluteAddress(prototypeStructureAddress), ImmPtr(currStructure)));
609 #endif
610 }
611 ASSERT(protoObject);
612
613 PropertyStorage* protoPropertyStorage = &protoObject->m_propertyStorage;
614 loadPtr(protoPropertyStorage, X86::edx);
615 loadPtr(Address(X86::edx, cachedOffset * sizeof(JSValuePtr)), X86::eax);
616 Jump success = jump();
617
618 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
619 PatchBuffer patchBuffer(code);
620
621 // Use the patch information to link the failure cases back to the original slow case routine.
622 void* slowCaseBegin = reinterpret_cast<char*>(stubInfo->callReturnLocation) - patchOffsetGetByIdSlowCaseCall;
623
624 patchBuffer.link(bucketsOfFail, slowCaseBegin);
625
626 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
627 intptr_t successDest = reinterpret_cast<intptr_t>(stubInfo->hotPathBegin) + patchOffsetGetByIdPutResult;
628 patchBuffer.link(success, reinterpret_cast<void*>(successDest));
629
630 // Track the stub we have created so that it will be deleted later.
631 stubInfo->stubRoutine = code;
632
633 // Finally patch the jump to slow case back in the hot path to jump here instead.
634 void* jumpLocation = reinterpret_cast<char*>(stubInfo->hotPathBegin) + patchOffsetGetByIdBranchToSlowCase;
635 Jump::patch(jumpLocation, code);
636 #else
637 ASSERT(count);
638
639 JumpList bucketsOfFail;
640
641 // Check eax is an object of the right Structure.
642 bucketsOfFail.append(emitJumpIfNotJSCell(X86::eax));
643 bucketsOfFail.append(checkStructure(X86::eax, structure));
644
645 Structure* currStructure = structure;
646 RefPtr<Structure>* chainEntries = chain->head();
647 JSObject* protoObject = 0;
648 for (unsigned i = 0; i < count; ++i) {
649 protoObject = asObject(currStructure->prototypeForLookup(callFrame));
650 currStructure = chainEntries[i].get();
651
652 // Check the prototype object's Structure had not changed.
653 Structure** prototypeStructureAddress = &(protoObject->m_structure);
654 #if PLATFORM(X86_64)
655 move(ImmPtr(currStructure), X86::ebx);
656 bucketsOfFail.append(jnePtr(X86::ebx, AbsoluteAddress(prototypeStructureAddress)));
657 #else
658 bucketsOfFail.append(jnePtr(AbsoluteAddress(prototypeStructureAddress), ImmPtr(currStructure)));
659 #endif
660 }
661 ASSERT(protoObject);
662
663 PropertyStorage* protoPropertyStorage = &protoObject->m_propertyStorage;
664 loadPtr(protoPropertyStorage, X86::edx);
665 loadPtr(Address(X86::edx, cachedOffset * sizeof(JSValuePtr)), X86::eax);
666 ret();
667
668 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
669
670 patchBuffer.link(bucketsOfFail, reinterpret_cast<void*>(Interpreter::cti_op_get_by_id_proto_fail));
671
672 stubInfo->stubRoutine = code;
673
674 Jump::patch(returnAddress, code);
675 #endif
676 }
677
678 void JIT::privateCompilePutByIdReplace(StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, void* returnAddress)
679 {
680 // Check eax is an object of the right Structure.
681 Jump failureCases1 = emitJumpIfNotJSCell(X86::eax);
682 Jump failureCases2 = checkStructure(X86::eax, structure);
683
684 // checks out okay! - putDirectOffset
685 loadPtr(Address(X86::eax, FIELD_OFFSET(JSObject, m_propertyStorage)), X86::eax);
686 storePtr(X86::edx, Address(X86::eax, cachedOffset * sizeof(JSValuePtr)));
687 ret();
688
689 void* code = m_assembler.executableCopy(m_codeBlock->executablePool());
690 PatchBuffer patchBuffer(code);
691
692 patchBuffer.link(failureCases1, reinterpret_cast<void*>(Interpreter::cti_op_put_by_id_fail));
693 patchBuffer.link(failureCases2, reinterpret_cast<void*>(Interpreter::cti_op_put_by_id_fail));
694
695 stubInfo->stubRoutine = code;
696
697 Jump::patch(returnAddress, code);
698 }
699
700 #endif
701
702 } // namespace JSC
703
704 #endif // ENABLE(JIT)