]> git.saurik.com Git - apple/javascriptcore.git/blame - jit/JITPropertyAccess32_64.cpp
JavaScriptCore-1218.34.tar.gz
[apple/javascriptcore.git] / jit / JITPropertyAccess32_64.cpp
CommitLineData
4e4e5a6f
A
1/*
2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27
14957cd0 28#if ENABLE(JIT)
4e4e5a6f 29#if USE(JSVALUE32_64)
4e4e5a6f
A
30#include "JIT.h"
31
4e4e5a6f 32#include "CodeBlock.h"
93a37866
A
33#include "GCAwareJITStubRoutine.h"
34#include "Interpreter.h"
35#include "JITInlines.h"
4e4e5a6f
A
36#include "JITStubCall.h"
37#include "JSArray.h"
38#include "JSFunction.h"
39#include "JSPropertyNameIterator.h"
93a37866 40#include "JSVariableObject.h"
4e4e5a6f
A
41#include "LinkBuffer.h"
42#include "RepatchBuffer.h"
43#include "ResultType.h"
44#include "SamplingTool.h"
93a37866 45#include <wtf/StringPrintStream.h>
4e4e5a6f
A
46
47#ifndef NDEBUG
48#include <stdio.h>
49#endif
50
51using namespace std;
52
53namespace JSC {
54
55void JIT::emit_op_put_by_index(Instruction* currentInstruction)
56{
57 unsigned base = currentInstruction[1].u.operand;
58 unsigned property = currentInstruction[2].u.operand;
59 unsigned value = currentInstruction[3].u.operand;
60
61 JITStubCall stubCall(this, cti_op_put_by_index);
62 stubCall.addArgument(base);
6fe7ccc8 63 stubCall.addArgument(TrustedImm32(property));
4e4e5a6f
A
64 stubCall.addArgument(value);
65 stubCall.call();
66}
67
6fe7ccc8 68void JIT::emit_op_put_getter_setter(Instruction* currentInstruction)
4e4e5a6f
A
69{
70 unsigned base = currentInstruction[1].u.operand;
71 unsigned property = currentInstruction[2].u.operand;
6fe7ccc8
A
72 unsigned getter = currentInstruction[3].u.operand;
73 unsigned setter = currentInstruction[4].u.operand;
4e4e5a6f 74
6fe7ccc8 75 JITStubCall stubCall(this, cti_op_put_getter_setter);
4e4e5a6f 76 stubCall.addArgument(base);
14957cd0 77 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
6fe7ccc8
A
78 stubCall.addArgument(getter);
79 stubCall.addArgument(setter);
4e4e5a6f
A
80 stubCall.call();
81}
82
83void JIT::emit_op_del_by_id(Instruction* currentInstruction)
84{
85 unsigned dst = currentInstruction[1].u.operand;
86 unsigned base = currentInstruction[2].u.operand;
87 unsigned property = currentInstruction[3].u.operand;
88
89 JITStubCall stubCall(this, cti_op_del_by_id);
90 stubCall.addArgument(base);
14957cd0 91 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
4e4e5a6f
A
92 stubCall.call(dst);
93}
94
93a37866 95JIT::CodeRef JIT::stringGetByValStubGenerator(VM* vm)
4e4e5a6f
A
96{
97 JSInterfaceJIT jit;
98 JumpList failures;
93a37866 99 failures.append(jit.branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(vm->stringStructure.get())));
4e4e5a6f
A
100
101 // Load string length to regT1, and start the process of loading the data pointer into regT0
102 jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT1);
103 jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
6fe7ccc8 104 failures.append(jit.branchTest32(Zero, regT0));
4e4e5a6f
A
105
106 // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
107 failures.append(jit.branch32(AboveOrEqual, regT2, regT1));
108
109 // Load the character
6fe7ccc8
A
110 JumpList is16Bit;
111 JumpList cont8Bit;
112 // Load the string flags
93a37866
A
113 jit.loadPtr(Address(regT0, StringImpl::flagsOffset()), regT1);
114 jit.loadPtr(Address(regT0, StringImpl::dataOffset()), regT0);
115 is16Bit.append(jit.branchTest32(Zero, regT1, TrustedImm32(StringImpl::flagIs8Bit())));
6fe7ccc8
A
116 jit.load8(BaseIndex(regT0, regT2, TimesOne, 0), regT0);
117 cont8Bit.append(jit.jump());
118 is16Bit.link(&jit);
4e4e5a6f 119 jit.load16(BaseIndex(regT0, regT2, TimesTwo, 0), regT0);
6fe7ccc8
A
120
121 cont8Bit.link(&jit);
4e4e5a6f 122
14957cd0 123 failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
93a37866 124 jit.move(TrustedImmPtr(vm->smallStrings.singleCharacterStrings()), regT1);
4e4e5a6f 125 jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
14957cd0 126 jit.move(TrustedImm32(JSValue::CellTag), regT1); // We null check regT0 on return so this is safe
4e4e5a6f
A
127 jit.ret();
128
129 failures.link(&jit);
14957cd0 130 jit.move(TrustedImm32(0), regT0);
4e4e5a6f
A
131 jit.ret();
132
93a37866
A
133 LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID);
134 return FINALIZE_CODE(patchBuffer, ("String get_by_val stub"));
4e4e5a6f
A
135}
136
137void JIT::emit_op_get_by_val(Instruction* currentInstruction)
138{
139 unsigned dst = currentInstruction[1].u.operand;
140 unsigned base = currentInstruction[2].u.operand;
141 unsigned property = currentInstruction[3].u.operand;
93a37866 142 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
4e4e5a6f
A
143
144 emitLoad2(base, regT1, regT0, property, regT3, regT2);
145
14957cd0 146 addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
4e4e5a6f 147 emitJumpSlowCaseIfNotJSCell(base, regT1);
93a37866
A
148 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
149 emitArrayProfilingSite(regT1, regT3, profile);
150 and32(TrustedImm32(IndexingShapeMask), regT1);
151
152 PatchableJump badType;
153 JumpList slowCases;
154
155 JITArrayMode mode = chooseArrayMode(profile);
156 switch (mode) {
157 case JITInt32:
158 slowCases = emitInt32GetByVal(currentInstruction, badType);
159 break;
160 case JITDouble:
161 slowCases = emitDoubleGetByVal(currentInstruction, badType);
162 break;
163 case JITContiguous:
164 slowCases = emitContiguousGetByVal(currentInstruction, badType);
165 break;
166 case JITArrayStorage:
167 slowCases = emitArrayStorageGetByVal(currentInstruction, badType);
168 break;
169 default:
170 CRASH();
171 }
4e4e5a6f 172
93a37866
A
173 addSlowCase(badType);
174 addSlowCase(slowCases);
4e4e5a6f 175
93a37866
A
176 Label done = label();
177
178#if !ASSERT_DISABLED
179 Jump resultOK = branch32(NotEqual, regT1, TrustedImm32(JSValue::EmptyValueTag));
180 breakpoint();
181 resultOK.link(this);
182#endif
183
6fe7ccc8 184 emitValueProfilingSite();
4e4e5a6f 185 emitStore(dst, regT1, regT0);
14957cd0 186 map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_val), dst, regT1, regT0);
93a37866
A
187
188 m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
189}
190
191JIT::JumpList JIT::emitContiguousGetByVal(Instruction*, PatchableJump& badType, IndexingType expectedShape)
192{
193 JumpList slowCases;
194
195 badType = patchableBranch32(NotEqual, regT1, TrustedImm32(expectedShape));
196
197 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
198 slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfPublicLength())));
199
200 load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
201 load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
202 slowCases.append(branch32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag)));
203
204 return slowCases;
4e4e5a6f
A
205}
206
93a37866
A
207JIT::JumpList JIT::emitDoubleGetByVal(Instruction*, PatchableJump& badType)
208{
209 JumpList slowCases;
210
211 badType = patchableBranch32(NotEqual, regT1, TrustedImm32(DoubleShape));
212
213 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
214 slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfPublicLength())));
215
216 loadDouble(BaseIndex(regT3, regT2, TimesEight), fpRegT0);
217 slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
218 moveDoubleToInts(fpRegT0, regT0, regT1);
219
220 return slowCases;
221}
222
223JIT::JumpList JIT::emitArrayStorageGetByVal(Instruction*, PatchableJump& badType)
224{
225 JumpList slowCases;
226
227 add32(TrustedImm32(-ArrayStorageShape), regT1, regT3);
228 badType = patchableBranch32(Above, regT3, TrustedImm32(SlowPutArrayStorageShape - ArrayStorageShape));
229
230 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
231 slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, ArrayStorage::vectorLengthOffset())));
232
233 load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
234 load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
235 slowCases.append(branch32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag)));
236
237 return slowCases;
238}
239
4e4e5a6f
A
240void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
241{
242 unsigned dst = currentInstruction[1].u.operand;
243 unsigned base = currentInstruction[2].u.operand;
244 unsigned property = currentInstruction[3].u.operand;
93a37866 245 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
4e4e5a6f
A
246
247 linkSlowCase(iter); // property int32 check
248 linkSlowCaseIfNotJSCell(iter, base); // base cell check
249
250 Jump nonCell = jump();
251 linkSlowCase(iter); // base array check
93a37866
A
252 Jump notString = branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get()));
253 emitNakedCall(m_vm->getCTIStub(stringGetByValStubGenerator).code());
4e4e5a6f
A
254 Jump failed = branchTestPtr(Zero, regT0);
255 emitStore(dst, regT1, regT0);
256 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
257 failed.link(this);
258 notString.link(this);
259 nonCell.link(this);
93a37866
A
260
261 Jump skipProfiling = jump();
4e4e5a6f
A
262
263 linkSlowCase(iter); // vector length check
264 linkSlowCase(iter); // empty value
265
93a37866
A
266 emitArrayProfileOutOfBoundsSpecialCase(profile);
267
268 skipProfiling.link(this);
269
270 Label slowPath = label();
271
4e4e5a6f
A
272 JITStubCall stubCall(this, cti_op_get_by_val);
273 stubCall.addArgument(base);
274 stubCall.addArgument(property);
93a37866
A
275 Call call = stubCall.call(dst);
276
277 m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
278 m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
279 m_byValInstructionIndex++;
6fe7ccc8
A
280
281 emitValueProfilingSite();
4e4e5a6f
A
282}
283
284void JIT::emit_op_put_by_val(Instruction* currentInstruction)
285{
286 unsigned base = currentInstruction[1].u.operand;
287 unsigned property = currentInstruction[2].u.operand;
93a37866 288 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
4e4e5a6f
A
289
290 emitLoad2(base, regT1, regT0, property, regT3, regT2);
291
14957cd0 292 addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
4e4e5a6f 293 emitJumpSlowCaseIfNotJSCell(base, regT1);
93a37866
A
294 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
295 emitArrayProfilingSite(regT1, regT3, profile);
296 and32(TrustedImm32(IndexingShapeMask), regT1);
297
298 PatchableJump badType;
299 JumpList slowCases;
300
301 JITArrayMode mode = chooseArrayMode(profile);
302 switch (mode) {
303 case JITInt32:
304 slowCases = emitInt32PutByVal(currentInstruction, badType);
305 break;
306 case JITDouble:
307 slowCases = emitDoublePutByVal(currentInstruction, badType);
308 break;
309 case JITContiguous:
310 slowCases = emitContiguousPutByVal(currentInstruction, badType);
311 break;
312 case JITArrayStorage:
313 slowCases = emitArrayStoragePutByVal(currentInstruction, badType);
314 break;
315 default:
316 CRASH();
317 break;
318 }
319
320 addSlowCase(badType);
321 addSlowCase(slowCases);
322
323 Label done = label();
324
325 m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
326}
6fe7ccc8 327
93a37866
A
328JIT::JumpList JIT::emitGenericContiguousPutByVal(Instruction* currentInstruction, PatchableJump& badType, IndexingType indexingShape)
329{
330 unsigned value = currentInstruction[3].u.operand;
331 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
332
333 JumpList slowCases;
334
335 badType = patchableBranch32(NotEqual, regT1, TrustedImm32(ContiguousShape));
336
337 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
338 Jump outOfBounds = branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfPublicLength()));
339
340 Label storeResult = label();
341 emitLoad(value, regT1, regT0);
342 switch (indexingShape) {
343 case Int32Shape:
344 slowCases.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
345 // Fall through.
346 case ContiguousShape:
347 store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
348 store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
349 break;
350 case DoubleShape: {
351 Jump notInt = branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag));
352 convertInt32ToDouble(regT0, fpRegT0);
353 Jump ready = jump();
354 notInt.link(this);
355 moveIntsToDouble(regT0, regT1, fpRegT0, fpRegT1);
356 slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
357 ready.link(this);
358 storeDouble(fpRegT0, BaseIndex(regT3, regT2, TimesEight));
359 break;
360 }
361 default:
362 CRASH();
363 break;
364 }
365
366 Jump done = jump();
367
368 outOfBounds.link(this);
369 slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfVectorLength())));
370
371 emitArrayProfileStoreToHoleSpecialCase(profile);
372
373 add32(TrustedImm32(1), regT2, regT1);
374 store32(regT1, Address(regT3, Butterfly::offsetOfPublicLength()));
375 jump().linkTo(storeResult, this);
376
377 done.link(this);
378
6fe7ccc8 379 emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
4e4e5a6f 380
93a37866
A
381 return slowCases;
382}
383
384JIT::JumpList JIT::emitArrayStoragePutByVal(Instruction* currentInstruction, PatchableJump& badType)
385{
386 unsigned value = currentInstruction[3].u.operand;
387 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
388
389 JumpList slowCases;
390
391 badType = patchableBranch32(NotEqual, regT1, TrustedImm32(ArrayStorageShape));
392
393 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
394 slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, ArrayStorage::vectorLengthOffset())));
395
14957cd0 396 Jump empty = branch32(Equal, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
4e4e5a6f
A
397
398 Label storeResult(this);
399 emitLoad(value, regT1, regT0);
14957cd0
A
400 store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload))); // payload
401 store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag))); // tag
4e4e5a6f
A
402 Jump end = jump();
403
404 empty.link(this);
93a37866 405 emitArrayProfileStoreToHoleSpecialCase(profile);
14957cd0 406 add32(TrustedImm32(1), Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
93a37866 407 branch32(Below, regT2, Address(regT3, ArrayStorage::lengthOffset())).linkTo(storeResult, this);
4e4e5a6f 408
14957cd0 409 add32(TrustedImm32(1), regT2, regT0);
93a37866 410 store32(regT0, Address(regT3, ArrayStorage::lengthOffset()));
4e4e5a6f
A
411 jump().linkTo(storeResult, this);
412
413 end.link(this);
93a37866
A
414
415 emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
416
417 return slowCases;
4e4e5a6f
A
418}
419
420void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
421{
422 unsigned base = currentInstruction[1].u.operand;
423 unsigned property = currentInstruction[2].u.operand;
424 unsigned value = currentInstruction[3].u.operand;
93a37866 425 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
4e4e5a6f
A
426
427 linkSlowCase(iter); // property int32 check
428 linkSlowCaseIfNotJSCell(iter, base); // base cell check
429 linkSlowCase(iter); // base not array check
93a37866
A
430
431 JITArrayMode mode = chooseArrayMode(profile);
432 switch (mode) {
433 case JITInt32:
434 case JITDouble:
435 linkSlowCase(iter); // value type check
436 break;
437 default:
438 break;
439 }
440
441 Jump skipProfiling = jump();
442 linkSlowCase(iter); // out of bounds
443 emitArrayProfileOutOfBoundsSpecialCase(profile);
444 skipProfiling.link(this);
445
446 Label slowPath = label();
4e4e5a6f
A
447
448 JITStubCall stubPutByValCall(this, cti_op_put_by_val);
449 stubPutByValCall.addArgument(base);
450 stubPutByValCall.addArgument(property);
451 stubPutByValCall.addArgument(value);
93a37866
A
452 Call call = stubPutByValCall.call();
453
454 m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
455 m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
456 m_byValInstructionIndex++;
4e4e5a6f
A
457}
458
459void JIT::emit_op_get_by_id(Instruction* currentInstruction)
460{
461 int dst = currentInstruction[1].u.operand;
462 int base = currentInstruction[2].u.operand;
93a37866 463 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
4e4e5a6f
A
464
465 emitLoad(base, regT1, regT0);
466 emitJumpSlowCaseIfNotJSCell(base, regT1);
93a37866 467 compileGetByIdHotPath(ident);
6fe7ccc8 468 emitValueProfilingSite();
4e4e5a6f 469 emitStore(dst, regT1, regT0);
14957cd0 470 map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_id), dst, regT1, regT0);
4e4e5a6f
A
471}
472
93a37866 473void JIT::compileGetByIdHotPath(Identifier* ident)
4e4e5a6f
A
474{
475 // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
476 // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
477 // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
478 // to jump back to if one of these trampolies finds a match.
479
93a37866
A
480 if (*ident == m_vm->propertyNames->length && shouldEmitProfiling()) {
481 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
482 emitArrayProfilingSiteForBytecodeIndex(regT2, regT3, m_bytecodeOffset);
483 }
484
4e4e5a6f
A
485 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
486
487 Label hotPathBegin(this);
4e4e5a6f
A
488
489 DataLabelPtr structureToCompare;
6fe7ccc8 490 PatchableJump structureCheck = patchableBranchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
4e4e5a6f 491 addSlowCase(structureCheck);
4e4e5a6f 492
93a37866 493 ConvertibleLoadLabel propertyStorageLoad = convertibleLoadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
14957cd0 494 DataLabelCompact displacementLabel1 = loadPtrWithCompactAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT0); // payload
14957cd0 495 DataLabelCompact displacementLabel2 = loadPtrWithCompactAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT1); // tag
4e4e5a6f
A
496
497 Label putResult(this);
4e4e5a6f
A
498
499 END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
6fe7ccc8 500
93a37866 501 m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo(PropertyStubGetById, m_bytecodeOffset, hotPathBegin, structureToCompare, structureCheck, propertyStorageLoad, displacementLabel1, displacementLabel2, putResult));
4e4e5a6f
A
502}
503
504void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
505{
506 int dst = currentInstruction[1].u.operand;
507 int base = currentInstruction[2].u.operand;
508 int ident = currentInstruction[3].u.operand;
509
510 compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter);
6fe7ccc8 511 emitValueProfilingSite();
4e4e5a6f
A
512}
513
93a37866 514void JIT::compileGetByIdSlowCase(int dst, int base, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter)
4e4e5a6f
A
515{
516 // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
517 // so that we only need track one pointer into the slow case code - we track a pointer to the location
518 // of the call (which we can use to look up the patch information), but should a array-length or
519 // prototype access trampoline fail we want to bail out back to here. To do so we can subtract back
520 // the distance from the call to the head of the slow case.
521 linkSlowCaseIfNotJSCell(iter, base);
522 linkSlowCase(iter);
523
524 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
525
4e4e5a6f 526 Label coldPathBegin(this);
93a37866 527 JITStubCall stubCall(this, cti_op_get_by_id);
4e4e5a6f 528 stubCall.addArgument(regT1, regT0);
14957cd0 529 stubCall.addArgument(TrustedImmPtr(ident));
4e4e5a6f
A
530 Call call = stubCall.call(dst);
531
14957cd0 532 END_UNINTERRUPTED_SEQUENCE_FOR_PUT(sequenceGetByIdSlowCase, dst);
4e4e5a6f 533
4e4e5a6f 534 // Track the location of the call; this will be used to recover patch information.
6fe7ccc8 535 m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].slowCaseInfo(PropertyStubGetById, coldPathBegin, call);
4e4e5a6f
A
536}
537
538void JIT::emit_op_put_by_id(Instruction* currentInstruction)
539{
540 // In order to be able to patch both the Structure, and the object offset, we store one pointer,
541 // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
542 // such that the Structure & offset are always at the same distance from this.
543
544 int base = currentInstruction[1].u.operand;
545 int value = currentInstruction[3].u.operand;
546
547 emitLoad2(base, regT1, regT0, value, regT3, regT2);
548
549 emitJumpSlowCaseIfNotJSCell(base, regT1);
550
551 BEGIN_UNINTERRUPTED_SEQUENCE(sequencePutById);
552
553 Label hotPathBegin(this);
4e4e5a6f
A
554
555 // It is important that the following instruction plants a 32bit immediate, in order that it can be patched over.
556 DataLabelPtr structureToCompare;
14957cd0 557 addSlowCase(branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))));
4e4e5a6f 558
93a37866 559 ConvertibleLoadLabel propertyStorageLoad = convertibleLoadPtr(Address(regT0, JSObject::butterflyOffset()), regT1);
6fe7ccc8
A
560 DataLabel32 displacementLabel1 = storePtrWithAddressOffsetPatch(regT2, Address(regT1, patchPutByIdDefaultOffset)); // payload
561 DataLabel32 displacementLabel2 = storePtrWithAddressOffsetPatch(regT3, Address(regT1, patchPutByIdDefaultOffset)); // tag
4e4e5a6f
A
562
563 END_UNINTERRUPTED_SEQUENCE(sequencePutById);
6fe7ccc8
A
564
565 emitWriteBarrier(regT0, regT2, regT1, regT2, ShouldFilterImmediates, WriteBarrierForPropertyAccess);
566
93a37866 567 m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo(PropertyStubPutById, m_bytecodeOffset, hotPathBegin, structureToCompare, propertyStorageLoad, displacementLabel1, displacementLabel2));
4e4e5a6f
A
568}
569
570void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
571{
572 int base = currentInstruction[1].u.operand;
573 int ident = currentInstruction[2].u.operand;
574 int direct = currentInstruction[8].u.operand;
575
576 linkSlowCaseIfNotJSCell(iter, base);
577 linkSlowCase(iter);
578
579 JITStubCall stubCall(this, direct ? cti_op_put_by_id_direct : cti_op_put_by_id);
6fe7ccc8 580 stubCall.addArgument(base);
14957cd0 581 stubCall.addArgument(TrustedImmPtr(&(m_codeBlock->identifier(ident))));
4e4e5a6f
A
582 stubCall.addArgument(regT3, regT2);
583 Call call = stubCall.call();
584
585 // Track the location of the call; this will be used to recover patch information.
6fe7ccc8 586 m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].slowCaseInfo(PropertyStubPutById, call);
4e4e5a6f
A
587}
588
589// Compile a store into an object's property storage. May overwrite base.
93a37866 590void JIT::compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, PropertyOffset cachedOffset)
4e4e5a6f 591{
93a37866
A
592 if (isOutOfLineOffset(cachedOffset))
593 loadPtr(Address(base, JSObject::butterflyOffset()), base);
594 emitStore(indexRelativeToBase(cachedOffset), valueTag, valuePayload, base);
4e4e5a6f
A
595}
596
597// Compile a load from an object's property storage. May overwrite base.
93a37866 598void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset)
4e4e5a6f 599{
93a37866
A
600 if (isInlineOffset(cachedOffset)) {
601 emitLoad(indexRelativeToBase(cachedOffset), resultTag, resultPayload, base);
602 return;
603 }
604
6fe7ccc8 605 RegisterID temp = resultPayload;
93a37866
A
606 loadPtr(Address(base, JSObject::butterflyOffset()), temp);
607 emitLoad(indexRelativeToBase(cachedOffset), resultTag, resultPayload, temp);
4e4e5a6f
A
608}
609
93a37866 610void JIT::compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset)
4e4e5a6f 611{
93a37866
A
612 if (isInlineOffset(cachedOffset)) {
613 move(TrustedImmPtr(base->locationForOffset(cachedOffset)), resultTag);
614 load32(Address(resultTag, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
615 load32(Address(resultTag, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
616 return;
617 }
618
619 loadPtr(base->butterflyAddress(), resultTag);
620 load32(Address(resultTag, offsetInButterfly(cachedOffset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
621 load32(Address(resultTag, offsetInButterfly(cachedOffset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
4e4e5a6f
A
622}
623
93a37866 624void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, PropertyOffset cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
4e4e5a6f 625{
6fe7ccc8 626 // The code below assumes that regT0 contains the basePayload and regT1 contains the baseTag. Restore them from the stack.
14957cd0
A
627#if CPU(MIPS) || CPU(SH4) || CPU(ARM)
628 // For MIPS, we don't add sizeof(void*) to the stack offset.
629 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
630 // For MIPS, we don't add sizeof(void*) to the stack offset.
631 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
632#else
633 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
634 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
635#endif
636
4e4e5a6f 637 JumpList failureCases;
14957cd0
A
638 failureCases.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
639 failureCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(oldStructure)));
93a37866 640 testPrototype(oldStructure->storedPrototype(), failureCases, stubInfo);
4e4e5a6f
A
641
642 if (!direct) {
643 // Verify that nothing in the prototype chain has a setter for this property.
14957cd0 644 for (WriteBarrier<Structure>* it = chain->head(); *it; ++it)
93a37866 645 testPrototype((*it)->storedPrototype(), failureCases, stubInfo);
4e4e5a6f
A
646 }
647
6fe7ccc8
A
648 // If we succeed in all of our checks, and the code was optimizable, then make sure we
649 // decrement the rare case counter.
650#if ENABLE(VALUE_PROFILER)
93a37866 651 if (m_codeBlock->canCompileWithDFG() >= DFG::MayInline) {
6fe7ccc8
A
652 sub32(
653 TrustedImm32(1),
654 AbsoluteAddress(&m_codeBlock->rareCaseProfileForBytecodeOffset(stubInfo->bytecodeIndex)->m_counter));
655 }
656#endif
657
4e4e5a6f
A
658 // Reallocate property storage if needed.
659 Call callTarget;
93a37866 660 bool willNeedStorageRealloc = oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity();
4e4e5a6f
A
661 if (willNeedStorageRealloc) {
662 // This trampoline was called to like a JIT stub; before we can can call again we need to
663 // remove the return address from the stack, to prevent the stack from becoming misaligned.
664 preserveReturnAddressAfterCall(regT3);
665
666 JITStubCall stubCall(this, cti_op_put_by_id_transition_realloc);
667 stubCall.skipArgument(); // base
668 stubCall.skipArgument(); // ident
669 stubCall.skipArgument(); // value
93a37866 670 stubCall.addArgument(TrustedImm32(oldStructure->outOfLineCapacity()));
6fe7ccc8 671 stubCall.addArgument(TrustedImmPtr(newStructure));
4e4e5a6f 672 stubCall.call(regT0);
6fe7ccc8 673
4e4e5a6f 674 restoreReturnAddressBeforeReturn(regT3);
14957cd0
A
675
676#if CPU(MIPS) || CPU(SH4) || CPU(ARM)
677 // For MIPS, we don't add sizeof(void*) to the stack offset.
678 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
679 // For MIPS, we don't add sizeof(void*) to the stack offset.
680 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
681#else
682 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
683 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
684#endif
4e4e5a6f 685 }
14957cd0 686
6fe7ccc8
A
687 emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
688
689 storePtr(TrustedImmPtr(newStructure), Address(regT0, JSCell::structureOffset()));
14957cd0
A
690#if CPU(MIPS) || CPU(SH4) || CPU(ARM)
691 // For MIPS, we don't add sizeof(void*) to the stack offset.
692 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT3);
693 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT2);
694#else
695 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT3);
696 load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT2);
697#endif
6fe7ccc8
A
698 compilePutDirectOffset(regT0, regT2, regT3, cachedOffset);
699
4e4e5a6f
A
700 ret();
701
702 ASSERT(!failureCases.empty());
703 failureCases.link(this);
704 restoreArgumentReferenceForTrampoline();
705 Call failureCall = tailRecursiveCall();
706
93a37866 707 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
4e4e5a6f
A
708
709 patchBuffer.link(failureCall, FunctionPtr(direct ? cti_op_put_by_id_direct_fail : cti_op_put_by_id_fail));
710
711 if (willNeedStorageRealloc) {
712 ASSERT(m_calls.size() == 1);
713 patchBuffer.link(m_calls[0].from, FunctionPtr(cti_op_put_by_id_transition_realloc));
714 }
715
93a37866
A
716 stubInfo->stubRoutine = createJITStubRoutine(
717 FINALIZE_CODE(
718 patchBuffer,
719 ("Baseline put_by_id transition stub for %s, return point %p",
720 toCString(*m_codeBlock).data(), returnAddress.value())),
721 *m_vm,
722 m_codeBlock->ownerExecutable(),
723 willNeedStorageRealloc,
724 newStructure);
4e4e5a6f 725 RepatchBuffer repatchBuffer(m_codeBlock);
93a37866 726 repatchBuffer.relinkCallerToTrampoline(returnAddress, CodeLocationLabel(stubInfo->stubRoutine->code().code()));
4e4e5a6f
A
727}
728
93a37866 729void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress)
4e4e5a6f
A
730{
731 RepatchBuffer repatchBuffer(codeBlock);
732
733 // We don't want to patch more than once - in future go to cti_op_get_by_id_generic.
734 // Should probably go to JITStubs::cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
735 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
736
4e4e5a6f 737 // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
6fe7ccc8 738 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.get.structureToCompare), structure);
93a37866
A
739 repatchBuffer.setLoadInstructionIsActive(stubInfo->hotPathBegin.convertibleLoadAtOffset(stubInfo->patch.baseline.u.get.propertyStorageLoad), isOutOfLineOffset(cachedOffset));
740 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel1), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)); // payload
741 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel2), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
4e4e5a6f
A
742}
743
93a37866 744void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, bool direct)
4e4e5a6f
A
745{
746 RepatchBuffer repatchBuffer(codeBlock);
747
748 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
749 // Should probably go to cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
750 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
751
4e4e5a6f 752 // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
6fe7ccc8 753 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.put.structureToCompare), structure);
93a37866
A
754 repatchBuffer.setLoadInstructionIsActive(stubInfo->hotPathBegin.convertibleLoadAtOffset(stubInfo->patch.baseline.u.put.propertyStorageLoad), isOutOfLineOffset(cachedOffset));
755 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel1), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)); // payload
756 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel2), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
4e4e5a6f
A
757}
758
759void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
760{
761 StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress);
762
763 // regT0 holds a JSCell*
764
765 // Check for array
93a37866
A
766 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
767 Jump failureCases1 = branchTest32(Zero, regT2, TrustedImm32(IsArray));
768 Jump failureCases2 = branchTest32(Zero, regT2, TrustedImm32(IndexingShapeMask));
4e4e5a6f
A
769
770 // Checks out okay! - get the length from the storage
93a37866
A
771 loadPtr(Address(regT0, JSArray::butterflyOffset()), regT2);
772 load32(Address(regT2, ArrayStorage::lengthOffset()), regT2);
4e4e5a6f 773
93a37866 774 Jump failureCases3 = branch32(Above, regT2, TrustedImm32(INT_MAX));
4e4e5a6f 775 move(regT2, regT0);
14957cd0 776 move(TrustedImm32(JSValue::Int32Tag), regT1);
4e4e5a6f
A
777 Jump success = jump();
778
93a37866 779 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
4e4e5a6f
A
780
781 // Use the patch information to link the failure cases back to the original slow case routine.
6fe7ccc8 782 CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
4e4e5a6f
A
783 patchBuffer.link(failureCases1, slowCaseBegin);
784 patchBuffer.link(failureCases2, slowCaseBegin);
93a37866 785 patchBuffer.link(failureCases3, slowCaseBegin);
4e4e5a6f
A
786
787 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
6fe7ccc8 788 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
4e4e5a6f
A
789
790 // Track the stub we have created so that it will be deleted later.
93a37866
A
791 stubInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
792 patchBuffer,
793 ("Baseline get_by_id array length stub for %s, return point %p",
794 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
795 stubInfo->patch.baseline.u.get.putResult).executableAddress()));
4e4e5a6f
A
796
797 // Finally patch the jump to slow case back in the hot path to jump here instead.
6fe7ccc8 798 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
4e4e5a6f 799 RepatchBuffer repatchBuffer(m_codeBlock);
93a37866 800 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine->code().code()));
4e4e5a6f
A
801
802 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
803 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail));
804}
805
93a37866 806void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
4e4e5a6f
A
807{
808 // regT0 holds a JSCell*
809
810 // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
811 // referencing the prototype object - let's speculatively load it's table nice and early!)
812 JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
813
814 Jump failureCases1 = checkStructure(regT0, structure);
815
816 // Check the prototype object's Structure had not changed.
93a37866 817 Jump failureCases2 = addStructureTransitionCheck(protoObject, prototypeStructure, stubInfo, regT3);
14957cd0 818
4e4e5a6f
A
819 bool needsStubLink = false;
820 // Checks out okay!
821 if (slot.cachedPropertyType() == PropertySlot::Getter) {
822 needsStubLink = true;
14957cd0 823 compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
4e4e5a6f
A
824 JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
825 stubCall.addArgument(regT1);
826 stubCall.addArgument(regT0);
14957cd0 827 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
4e4e5a6f
A
828 stubCall.call();
829 } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
830 needsStubLink = true;
831 JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
14957cd0
A
832 stubCall.addArgument(TrustedImmPtr(protoObject));
833 stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
834 stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
835 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
4e4e5a6f
A
836 stubCall.call();
837 } else
14957cd0 838 compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
4e4e5a6f
A
839
840 Jump success = jump();
841
93a37866 842 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
4e4e5a6f
A
843
844 // Use the patch information to link the failure cases back to the original slow case routine.
6fe7ccc8 845 CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
4e4e5a6f 846 patchBuffer.link(failureCases1, slowCaseBegin);
93a37866
A
847 if (failureCases2.isSet())
848 patchBuffer.link(failureCases2, slowCaseBegin);
4e4e5a6f
A
849
850 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
6fe7ccc8 851 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
4e4e5a6f
A
852
853 if (needsStubLink) {
854 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
855 if (iter->to)
856 patchBuffer.link(iter->from, FunctionPtr(iter->to));
857 }
858 }
859
860 // Track the stub we have created so that it will be deleted later.
93a37866
A
861 stubInfo->stubRoutine = createJITStubRoutine(
862 FINALIZE_CODE(
863 patchBuffer,
864 ("Baseline get_by_id proto stub for %s, return point %p",
865 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
866 stubInfo->patch.baseline.u.get.putResult).executableAddress())),
867 *m_vm,
868 m_codeBlock->ownerExecutable(),
869 needsStubLink);
4e4e5a6f
A
870
871 // Finally patch the jump to slow case back in the hot path to jump here instead.
6fe7ccc8 872 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
4e4e5a6f 873 RepatchBuffer repatchBuffer(m_codeBlock);
93a37866 874 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine->code().code()));
4e4e5a6f
A
875
876 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
877 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
878}
879
880
93a37866 881void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset)
4e4e5a6f
A
882{
883 // regT0 holds a JSCell*
884 Jump failureCase = checkStructure(regT0, structure);
885 bool needsStubLink = false;
6fe7ccc8 886 bool isDirect = false;
4e4e5a6f
A
887 if (slot.cachedPropertyType() == PropertySlot::Getter) {
888 needsStubLink = true;
6fe7ccc8 889 compileGetDirectOffset(regT0, regT2, regT1, cachedOffset);
4e4e5a6f
A
890 JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
891 stubCall.addArgument(regT1);
892 stubCall.addArgument(regT0);
14957cd0 893 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
4e4e5a6f
A
894 stubCall.call();
895 } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
896 needsStubLink = true;
897 JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
898 stubCall.addArgument(regT0);
14957cd0
A
899 stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
900 stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
901 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
4e4e5a6f 902 stubCall.call();
6fe7ccc8
A
903 } else {
904 isDirect = true;
905 compileGetDirectOffset(regT0, regT1, regT0, cachedOffset);
906 }
4e4e5a6f
A
907
908 Jump success = jump();
909
93a37866 910 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
4e4e5a6f
A
911 if (needsStubLink) {
912 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
913 if (iter->to)
914 patchBuffer.link(iter->from, FunctionPtr(iter->to));
915 }
916 }
917 // Use the patch information to link the failure cases back to the original slow case routine.
93a37866 918 CodeLocationLabel lastProtoBegin = CodeLocationLabel(JITStubRoutine::asCodePtr(polymorphicStructures->list[currentIndex - 1].stubRoutine));
4e4e5a6f 919 if (!lastProtoBegin)
6fe7ccc8 920 lastProtoBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
4e4e5a6f
A
921
922 patchBuffer.link(failureCase, lastProtoBegin);
923
924 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
6fe7ccc8 925 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
4e4e5a6f 926
93a37866
A
927 RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
928 FINALIZE_CODE(
929 patchBuffer,
930 ("Baseline get_by_id self list stub for %s, return point %p",
931 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
932 stubInfo->patch.baseline.u.get.putResult).executableAddress())),
933 *m_vm,
934 m_codeBlock->ownerExecutable(),
935 needsStubLink);
14957cd0 936
93a37866 937 polymorphicStructures->list[currentIndex].set(*m_vm, m_codeBlock->ownerExecutable(), stubRoutine, structure, isDirect);
4e4e5a6f
A
938
939 // Finally patch the jump to slow case back in the hot path to jump here instead.
6fe7ccc8 940 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
4e4e5a6f 941 RepatchBuffer repatchBuffer(m_codeBlock);
93a37866 942 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
4e4e5a6f
A
943}
944
93a37866 945void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, CallFrame* callFrame)
4e4e5a6f
A
946{
947 // regT0 holds a JSCell*
948
949 // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
950 // referencing the prototype object - let's speculatively load it's table nice and early!)
951 JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
952
953 // Check eax is an object of the right Structure.
954 Jump failureCases1 = checkStructure(regT0, structure);
955
956 // Check the prototype object's Structure had not changed.
93a37866 957 Jump failureCases2 = addStructureTransitionCheck(protoObject, prototypeStructure, stubInfo, regT3);
4e4e5a6f
A
958
959 bool needsStubLink = false;
6fe7ccc8 960 bool isDirect = false;
4e4e5a6f
A
961 if (slot.cachedPropertyType() == PropertySlot::Getter) {
962 needsStubLink = true;
14957cd0 963 compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
4e4e5a6f
A
964 JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
965 stubCall.addArgument(regT1);
966 stubCall.addArgument(regT0);
14957cd0 967 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
4e4e5a6f
A
968 stubCall.call();
969 } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
970 needsStubLink = true;
971 JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
14957cd0
A
972 stubCall.addArgument(TrustedImmPtr(protoObject));
973 stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
974 stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
975 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
4e4e5a6f 976 stubCall.call();
6fe7ccc8
A
977 } else {
978 isDirect = true;
14957cd0 979 compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
6fe7ccc8 980 }
4e4e5a6f
A
981
982 Jump success = jump();
983
93a37866 984 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
4e4e5a6f
A
985 if (needsStubLink) {
986 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
987 if (iter->to)
988 patchBuffer.link(iter->from, FunctionPtr(iter->to));
989 }
990 }
991 // Use the patch information to link the failure cases back to the original slow case routine.
93a37866 992 CodeLocationLabel lastProtoBegin = CodeLocationLabel(JITStubRoutine::asCodePtr(prototypeStructures->list[currentIndex - 1].stubRoutine));
4e4e5a6f 993 patchBuffer.link(failureCases1, lastProtoBegin);
93a37866
A
994 if (failureCases2.isSet())
995 patchBuffer.link(failureCases2, lastProtoBegin);
4e4e5a6f
A
996
997 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
6fe7ccc8 998 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
4e4e5a6f 999
93a37866
A
1000 RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
1001 FINALIZE_CODE(
1002 patchBuffer,
1003 ("Baseline get_by_id proto list stub for %s, return point %p",
1004 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
1005 stubInfo->patch.baseline.u.get.putResult).executableAddress())),
1006 *m_vm,
1007 m_codeBlock->ownerExecutable(),
1008 needsStubLink);
14957cd0 1009
93a37866 1010 prototypeStructures->list[currentIndex].set(callFrame->vm(), m_codeBlock->ownerExecutable(), stubRoutine, structure, prototypeStructure, isDirect);
4e4e5a6f
A
1011
1012 // Finally patch the jump to slow case back in the hot path to jump here instead.
6fe7ccc8 1013 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
4e4e5a6f 1014 RepatchBuffer repatchBuffer(m_codeBlock);
93a37866 1015 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
4e4e5a6f
A
1016}
1017
93a37866 1018void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, CallFrame* callFrame)
4e4e5a6f
A
1019{
1020 // regT0 holds a JSCell*
1021 ASSERT(count);
1022
1023 JumpList bucketsOfFail;
1024
1025 // Check eax is an object of the right Structure.
1026 bucketsOfFail.append(checkStructure(regT0, structure));
1027
1028 Structure* currStructure = structure;
14957cd0 1029 WriteBarrier<Structure>* it = chain->head();
4e4e5a6f 1030 JSObject* protoObject = 0;
14957cd0 1031 for (unsigned i = 0; i < count; ++i, ++it) {
4e4e5a6f 1032 protoObject = asObject(currStructure->prototypeForLookup(callFrame));
14957cd0 1033 currStructure = it->get();
93a37866 1034 testPrototype(protoObject, bucketsOfFail, stubInfo);
4e4e5a6f
A
1035 }
1036 ASSERT(protoObject);
1037
1038 bool needsStubLink = false;
6fe7ccc8 1039 bool isDirect = false;
4e4e5a6f
A
1040 if (slot.cachedPropertyType() == PropertySlot::Getter) {
1041 needsStubLink = true;
14957cd0 1042 compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
4e4e5a6f
A
1043 JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
1044 stubCall.addArgument(regT1);
1045 stubCall.addArgument(regT0);
14957cd0 1046 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
4e4e5a6f
A
1047 stubCall.call();
1048 } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
1049 needsStubLink = true;
1050 JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
14957cd0
A
1051 stubCall.addArgument(TrustedImmPtr(protoObject));
1052 stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
1053 stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
1054 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
4e4e5a6f 1055 stubCall.call();
6fe7ccc8
A
1056 } else {
1057 isDirect = true;
14957cd0 1058 compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
6fe7ccc8 1059 }
4e4e5a6f
A
1060
1061 Jump success = jump();
1062
93a37866 1063 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
4e4e5a6f
A
1064 if (needsStubLink) {
1065 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
1066 if (iter->to)
1067 patchBuffer.link(iter->from, FunctionPtr(iter->to));
1068 }
1069 }
1070 // Use the patch information to link the failure cases back to the original slow case routine.
93a37866 1071 CodeLocationLabel lastProtoBegin = CodeLocationLabel(JITStubRoutine::asCodePtr(prototypeStructures->list[currentIndex - 1].stubRoutine));
4e4e5a6f
A
1072
1073 patchBuffer.link(bucketsOfFail, lastProtoBegin);
1074
1075 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
6fe7ccc8 1076 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
4e4e5a6f 1077
93a37866
A
1078 RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
1079 FINALIZE_CODE(
1080 patchBuffer,
1081 ("Baseline get_by_id chain list stub for %s, return point %p",
1082 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
1083 stubInfo->patch.baseline.u.get.putResult).executableAddress())),
1084 *m_vm,
1085 m_codeBlock->ownerExecutable(),
1086 needsStubLink);
4e4e5a6f
A
1087
1088 // Track the stub we have created so that it will be deleted later.
93a37866 1089 prototypeStructures->list[currentIndex].set(callFrame->vm(), m_codeBlock->ownerExecutable(), stubRoutine, structure, chain, isDirect);
4e4e5a6f
A
1090
1091 // Finally patch the jump to slow case back in the hot path to jump here instead.
6fe7ccc8 1092 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
4e4e5a6f 1093 RepatchBuffer repatchBuffer(m_codeBlock);
93a37866 1094 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
4e4e5a6f
A
1095}
1096
93a37866 1097void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
4e4e5a6f
A
1098{
1099 // regT0 holds a JSCell*
1100 ASSERT(count);
1101
1102 JumpList bucketsOfFail;
1103
1104 // Check eax is an object of the right Structure.
1105 bucketsOfFail.append(checkStructure(regT0, structure));
1106
1107 Structure* currStructure = structure;
14957cd0 1108 WriteBarrier<Structure>* it = chain->head();
4e4e5a6f 1109 JSObject* protoObject = 0;
14957cd0 1110 for (unsigned i = 0; i < count; ++i, ++it) {
4e4e5a6f 1111 protoObject = asObject(currStructure->prototypeForLookup(callFrame));
14957cd0 1112 currStructure = it->get();
93a37866 1113 testPrototype(protoObject, bucketsOfFail, stubInfo);
4e4e5a6f
A
1114 }
1115 ASSERT(protoObject);
1116
1117 bool needsStubLink = false;
1118 if (slot.cachedPropertyType() == PropertySlot::Getter) {
1119 needsStubLink = true;
14957cd0 1120 compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
4e4e5a6f
A
1121 JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
1122 stubCall.addArgument(regT1);
1123 stubCall.addArgument(regT0);
14957cd0 1124 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
4e4e5a6f
A
1125 stubCall.call();
1126 } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
1127 needsStubLink = true;
1128 JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
14957cd0
A
1129 stubCall.addArgument(TrustedImmPtr(protoObject));
1130 stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
1131 stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
1132 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
4e4e5a6f
A
1133 stubCall.call();
1134 } else
14957cd0 1135 compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
4e4e5a6f
A
1136 Jump success = jump();
1137
93a37866 1138 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
4e4e5a6f
A
1139 if (needsStubLink) {
1140 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
1141 if (iter->to)
1142 patchBuffer.link(iter->from, FunctionPtr(iter->to));
1143 }
1144 }
1145 // Use the patch information to link the failure cases back to the original slow case routine.
6fe7ccc8 1146 patchBuffer.link(bucketsOfFail, stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin));
4e4e5a6f
A
1147
1148 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
6fe7ccc8 1149 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
4e4e5a6f
A
1150
1151 // Track the stub we have created so that it will be deleted later.
93a37866
A
1152 RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
1153 FINALIZE_CODE(
1154 patchBuffer,
1155 ("Baseline get_by_id chain stub for %s, return point %p",
1156 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
1157 stubInfo->patch.baseline.u.get.putResult).executableAddress())),
1158 *m_vm,
1159 m_codeBlock->ownerExecutable(),
1160 needsStubLink);
6fe7ccc8 1161 stubInfo->stubRoutine = stubRoutine;
4e4e5a6f
A
1162
1163 // Finally patch the jump to slow case back in the hot path to jump here instead.
6fe7ccc8 1164 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
4e4e5a6f 1165 RepatchBuffer repatchBuffer(m_codeBlock);
93a37866 1166 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
4e4e5a6f
A
1167
1168 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
1169 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
1170}
1171
93a37866 1172void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset, FinalObjectMode finalObjectMode)
4e4e5a6f 1173{
4e4e5a6f
A
1174 ASSERT(sizeof(JSValue) == 8);
1175
93a37866
A
1176 if (finalObjectMode == MayBeFinal) {
1177 Jump isInline = branch32(LessThan, offset, TrustedImm32(firstOutOfLineOffset));
1178 loadPtr(Address(base, JSObject::butterflyOffset()), base);
1179 neg32(offset);
1180 Jump done = jump();
1181 isInline.link(this);
1182 addPtr(TrustedImmPtr(JSObject::offsetOfInlineStorage() - (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), base);
1183 done.link(this);
1184 } else {
1185#if !ASSERT_DISABLED
1186 Jump isOutOfLine = branch32(GreaterThanOrEqual, offset, TrustedImm32(firstOutOfLineOffset));
1187 breakpoint();
1188 isOutOfLine.link(this);
1189#endif
1190 loadPtr(Address(base, JSObject::butterflyOffset()), base);
1191 neg32(offset);
1192 }
1193 load32(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) + (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), resultPayload);
1194 load32(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) + (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), resultTag);
4e4e5a6f
A
1195}
1196
1197void JIT::emit_op_get_by_pname(Instruction* currentInstruction)
1198{
1199 unsigned dst = currentInstruction[1].u.operand;
1200 unsigned base = currentInstruction[2].u.operand;
1201 unsigned property = currentInstruction[3].u.operand;
1202 unsigned expected = currentInstruction[4].u.operand;
1203 unsigned iter = currentInstruction[5].u.operand;
1204 unsigned i = currentInstruction[6].u.operand;
1205
1206 emitLoad2(property, regT1, regT0, base, regT3, regT2);
1207 emitJumpSlowCaseIfNotJSCell(property, regT1);
1208 addSlowCase(branchPtr(NotEqual, regT0, payloadFor(expected)));
1209 // Property registers are now available as the property is known
1210 emitJumpSlowCaseIfNotJSCell(base, regT3);
1211 emitLoadPayload(iter, regT1);
1212
1213 // Test base's structure
14957cd0 1214 loadPtr(Address(regT2, JSCell::structureOffset()), regT0);
4e4e5a6f
A
1215 addSlowCase(branchPtr(NotEqual, regT0, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))));
1216 load32(addressFor(i), regT3);
14957cd0 1217 sub32(TrustedImm32(1), regT3);
4e4e5a6f 1218 addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots))));
93a37866
A
1219 Jump inlineProperty = branch32(Below, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructureInlineCapacity)));
1220 add32(TrustedImm32(firstOutOfLineOffset), regT3);
1221 sub32(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructureInlineCapacity)), regT3);
1222 inlineProperty.link(this);
14957cd0 1223 compileGetDirectOffset(regT2, regT1, regT0, regT3);
4e4e5a6f
A
1224
1225 emitStore(dst, regT1, regT0);
14957cd0 1226 map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_pname), dst, regT1, regT0);
4e4e5a6f
A
1227}
1228
1229void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1230{
1231 unsigned dst = currentInstruction[1].u.operand;
1232 unsigned base = currentInstruction[2].u.operand;
1233 unsigned property = currentInstruction[3].u.operand;
1234
1235 linkSlowCaseIfNotJSCell(iter, property);
1236 linkSlowCase(iter);
1237 linkSlowCaseIfNotJSCell(iter, base);
1238 linkSlowCase(iter);
1239 linkSlowCase(iter);
1240
93a37866 1241 JITStubCall stubCall(this, cti_op_get_by_val_generic);
4e4e5a6f
A
1242 stubCall.addArgument(base);
1243 stubCall.addArgument(property);
1244 stubCall.call(dst);
1245}
1246
6fe7ccc8
A
1247void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
1248{
1249 int dst = currentInstruction[1].u.operand;
1250 int index = currentInstruction[2].u.operand;
1251 int skip = currentInstruction[3].u.operand;
1252
93a37866 1253 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT2);
6fe7ccc8
A
1254 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1255 ASSERT(skip || !checkTopLevel);
1256 if (checkTopLevel && skip--) {
1257 Jump activationNotCreated;
1258 if (checkTopLevel)
1259 activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
93a37866 1260 loadPtr(Address(regT2, JSScope::offsetOfNext()), regT2);
6fe7ccc8
A
1261 activationNotCreated.link(this);
1262 }
1263 while (skip--)
93a37866 1264 loadPtr(Address(regT2, JSScope::offsetOfNext()), regT2);
6fe7ccc8 1265
6fe7ccc8
A
1266 loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT2);
1267
1268 emitLoad(index, regT1, regT0, regT2);
1269 emitValueProfilingSite();
1270 emitStore(dst, regT1, regT0);
1271 map(m_bytecodeOffset + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
1272}
1273
1274void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
1275{
1276 int index = currentInstruction[1].u.operand;
1277 int skip = currentInstruction[2].u.operand;
1278 int value = currentInstruction[3].u.operand;
1279
1280 emitLoad(value, regT1, regT0);
1281
93a37866 1282 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT2);
6fe7ccc8
A
1283 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1284 ASSERT(skip || !checkTopLevel);
1285 if (checkTopLevel && skip--) {
1286 Jump activationNotCreated;
1287 if (checkTopLevel)
1288 activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
93a37866 1289 loadPtr(Address(regT2, JSScope::offsetOfNext()), regT2);
6fe7ccc8
A
1290 activationNotCreated.link(this);
1291 }
1292 while (skip--)
93a37866 1293 loadPtr(Address(regT2, JSScope::offsetOfNext()), regT2);
6fe7ccc8
A
1294
1295 loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT3);
1296 emitStore(index, regT1, regT0, regT3);
1297 emitWriteBarrier(regT2, regT1, regT0, regT1, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1298}
1299
93a37866 1300void JIT::emit_op_init_global_const(Instruction* currentInstruction)
6fe7ccc8 1301{
93a37866
A
1302 WriteBarrier<Unknown>* registerPointer = currentInstruction[1].u.registerPointer;
1303 int value = currentInstruction[2].u.operand;
1304
6fe7ccc8 1305 JSGlobalObject* globalObject = m_codeBlock->globalObject();
6fe7ccc8 1306
93a37866
A
1307 emitLoad(value, regT1, regT0);
1308
1309 if (Heap::isWriteBarrierEnabled()) {
1310 move(TrustedImmPtr(globalObject), regT2);
1311
1312 emitWriteBarrier(globalObject, regT1, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1313 }
6fe7ccc8 1314
93a37866
A
1315 store32(regT1, registerPointer->tagPointer());
1316 store32(regT0, registerPointer->payloadPointer());
1317 map(m_bytecodeOffset + OPCODE_LENGTH(op_init_global_const), value, regT1, regT0);
6fe7ccc8
A
1318}
1319
93a37866 1320void JIT::emit_op_init_global_const_check(Instruction* currentInstruction)
6fe7ccc8 1321{
93a37866 1322 WriteBarrier<Unknown>* registerPointer = currentInstruction[1].u.registerPointer;
6fe7ccc8 1323 int value = currentInstruction[2].u.operand;
93a37866 1324
6fe7ccc8 1325 JSGlobalObject* globalObject = m_codeBlock->globalObject();
93a37866 1326
6fe7ccc8 1327 emitLoad(value, regT1, regT0);
93a37866
A
1328
1329 addSlowCase(branchTest8(NonZero, AbsoluteAddress(currentInstruction[3].u.predicatePointer)));
1330
1331 if (Heap::isWriteBarrierEnabled()) {
1332 move(TrustedImmPtr(globalObject), regT2);
1333 emitWriteBarrier(globalObject, regT1, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1334 }
1335
1336 store32(regT1, registerPointer->tagPointer());
1337 store32(regT0, registerPointer->payloadPointer());
1338 unmap();
1339}
6fe7ccc8 1340
93a37866
A
1341void JIT::emitSlow_op_init_global_const_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1342{
1343 linkSlowCase(iter);
1344
1345 JITStubCall stubCall(this, cti_op_init_global_const_check);
1346 stubCall.addArgument(regT1, regT0);
1347 stubCall.addArgument(TrustedImm32(currentInstruction[4].u.operand));
1348 stubCall.call();
6fe7ccc8
A
1349}
1350
1351void JIT::resetPatchGetById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1352{
1353 repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_get_by_id);
93a37866 1354 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.get.structureToCompare), reinterpret_cast<void*>(unusedPointer));
6fe7ccc8
A
1355 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel1), 0);
1356 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel2), 0);
1357 repatchBuffer.relink(stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck), stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin));
1358}
1359
1360void JIT::resetPatchPutById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1361{
1362 if (isDirectPutById(stubInfo))
1363 repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id_direct);
1364 else
1365 repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id);
93a37866 1366 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.put.structureToCompare), reinterpret_cast<void*>(unusedPointer));
6fe7ccc8
A
1367 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel1), 0);
1368 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel2), 0);
1369}
1370
4e4e5a6f
A
1371} // namespace JSC
1372
14957cd0 1373#endif // USE(JSVALUE32_64)
4e4e5a6f 1374#endif // ENABLE(JIT)