]> git.saurik.com Git - apple/javascriptcore.git/blame - jit/JITPropertyAccess.cpp
JavaScriptCore-1218.35.tar.gz
[apple/javascriptcore.git] / jit / JITPropertyAccess.cpp
CommitLineData
9dae56ea 1/*
ba379fdc 2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
9dae56ea
A
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
ba379fdc 27
4e4e5a6f 28#if ENABLE(JIT)
14957cd0 29#include "JIT.h"
ba379fdc 30
4e4e5a6f 31#include "CodeBlock.h"
93a37866 32#include "GCAwareJITStubRoutine.h"
4e4e5a6f 33#include "GetterSetter.h"
93a37866
A
34#include "Interpreter.h"
35#include "JITInlines.h"
4e4e5a6f
A
36#include "JITStubCall.h"
37#include "JSArray.h"
38#include "JSFunction.h"
39#include "JSPropertyNameIterator.h"
93a37866 40#include "JSVariableObject.h"
4e4e5a6f
A
41#include "LinkBuffer.h"
42#include "RepatchBuffer.h"
43#include "ResultType.h"
44#include "SamplingTool.h"
93a37866 45#include <wtf/StringPrintStream.h>
ba379fdc 46
4e4e5a6f
A
47#ifndef NDEBUG
48#include <stdio.h>
ba379fdc
A
49#endif
50
4e4e5a6f 51using namespace std;
ba379fdc 52
4e4e5a6f 53namespace JSC {
14957cd0 54#if USE(JSVALUE64)
ba379fdc 55
93a37866 56JIT::CodeRef JIT::stringGetByValStubGenerator(VM* vm)
9dae56ea 57{
4e4e5a6f
A
58 JSInterfaceJIT jit;
59 JumpList failures;
93a37866 60 failures.append(jit.branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(vm->stringStructure.get())));
ba379fdc 61
14957cd0 62 // Load string length to regT2, and start the process of loading the data pointer into regT0
4e4e5a6f
A
63 jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT2);
64 jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
6fe7ccc8
A
65 failures.append(jit.branchTest32(Zero, regT0));
66
4e4e5a6f
A
67 // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
68 failures.append(jit.branch32(AboveOrEqual, regT1, regT2));
ba379fdc 69
4e4e5a6f 70 // Load the character
6fe7ccc8
A
71 JumpList is16Bit;
72 JumpList cont8Bit;
73 // Load the string flags
93a37866
A
74 jit.loadPtr(Address(regT0, StringImpl::flagsOffset()), regT2);
75 jit.loadPtr(Address(regT0, StringImpl::dataOffset()), regT0);
76 is16Bit.append(jit.branchTest32(Zero, regT2, TrustedImm32(StringImpl::flagIs8Bit())));
6fe7ccc8
A
77 jit.load8(BaseIndex(regT0, regT1, TimesOne, 0), regT0);
78 cont8Bit.append(jit.jump());
79 is16Bit.link(&jit);
4e4e5a6f 80 jit.load16(BaseIndex(regT0, regT1, TimesTwo, 0), regT0);
6fe7ccc8
A
81 cont8Bit.link(&jit);
82
14957cd0 83 failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
93a37866 84 jit.move(TrustedImmPtr(vm->smallStrings.singleCharacterStrings()), regT1);
4e4e5a6f
A
85 jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
86 jit.ret();
f9bf01c6 87
4e4e5a6f 88 failures.link(&jit);
14957cd0 89 jit.move(TrustedImm32(0), regT0);
4e4e5a6f 90 jit.ret();
f9bf01c6 91
93a37866
A
92 LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID);
93 return FINALIZE_CODE(patchBuffer, ("String get_by_val stub"));
f9bf01c6
A
94}
95
ba379fdc
A
96void JIT::emit_op_get_by_val(Instruction* currentInstruction)
97{
f9bf01c6
A
98 unsigned dst = currentInstruction[1].u.operand;
99 unsigned base = currentInstruction[2].u.operand;
100 unsigned property = currentInstruction[3].u.operand;
93a37866
A
101 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
102
f9bf01c6 103 emitGetVirtualRegisters(base, regT0, property, regT1);
ba379fdc 104 emitJumpSlowCaseIfNotImmediateInteger(regT1);
14957cd0 105
ba379fdc 106 // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
f9bf01c6
A
107 // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
108 // number was signed since m_vectorLength is always less than intmax (since the total allocation
ba379fdc
A
109 // size is always less than 4Gb). As such zero extending wil have been correct (and extending the value
110 // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
111 // extending since it makes it easier to re-tag the value in the slow case.
112 zeroExtend32ToPtr(regT1, regT1);
14957cd0 113
f9bf01c6 114 emitJumpSlowCaseIfNotJSCell(regT0, base);
93a37866
A
115 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
116 emitArrayProfilingSite(regT2, regT3, profile);
117 and32(TrustedImm32(IndexingShapeMask), regT2);
118
119 PatchableJump badType;
120 JumpList slowCases;
121
122 JITArrayMode mode = chooseArrayMode(profile);
123 switch (mode) {
124 case JITInt32:
125 slowCases = emitInt32GetByVal(currentInstruction, badType);
126 break;
127 case JITDouble:
128 slowCases = emitDoubleGetByVal(currentInstruction, badType);
129 break;
130 case JITContiguous:
131 slowCases = emitContiguousGetByVal(currentInstruction, badType);
132 break;
133 case JITArrayStorage:
134 slowCases = emitArrayStorageGetByVal(currentInstruction, badType);
135 break;
136 default:
137 CRASH();
138 break;
139 }
140
141 addSlowCase(badType);
142 addSlowCase(slowCases);
143
144 Label done = label();
145
146#if !ASSERT_DISABLED
147 Jump resultOK = branchTest64(NonZero, regT0);
148 breakpoint();
149 resultOK.link(this);
150#endif
f9bf01c6 151
6fe7ccc8 152 emitValueProfilingSite();
f9bf01c6 153 emitPutVirtualRegister(dst);
93a37866
A
154
155 m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
156}
157
158JIT::JumpList JIT::emitDoubleGetByVal(Instruction*, PatchableJump& badType)
159{
160 JumpList slowCases;
161
162 badType = patchableBranch32(NotEqual, regT2, TrustedImm32(DoubleShape));
163 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
164 slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength())));
165 loadDouble(BaseIndex(regT2, regT1, TimesEight), fpRegT0);
166 slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
167 moveDoubleTo64(fpRegT0, regT0);
168 sub64(tagTypeNumberRegister, regT0);
169
170 return slowCases;
171}
172
173JIT::JumpList JIT::emitContiguousGetByVal(Instruction*, PatchableJump& badType, IndexingType expectedShape)
174{
175 JumpList slowCases;
176
177 badType = patchableBranch32(NotEqual, regT2, TrustedImm32(expectedShape));
178 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
179 slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength())));
180 load64(BaseIndex(regT2, regT1, TimesEight), regT0);
181 slowCases.append(branchTest64(Zero, regT0));
182
183 return slowCases;
184}
185
186JIT::JumpList JIT::emitArrayStorageGetByVal(Instruction*, PatchableJump& badType)
187{
188 JumpList slowCases;
189
190 add32(TrustedImm32(-ArrayStorageShape), regT2, regT3);
191 badType = patchableBranch32(Above, regT3, TrustedImm32(SlowPutArrayStorageShape - ArrayStorageShape));
192
193 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
194 slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, ArrayStorage::vectorLengthOffset())));
195
196 load64(BaseIndex(regT2, regT1, TimesEight, ArrayStorage::vectorOffset()), regT0);
197 slowCases.append(branchTest64(Zero, regT0));
198
199 return slowCases;
f9bf01c6
A
200}
201
4e4e5a6f
A
202void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
203{
204 unsigned dst = currentInstruction[1].u.operand;
205 unsigned base = currentInstruction[2].u.operand;
206 unsigned property = currentInstruction[3].u.operand;
93a37866 207 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
4e4e5a6f
A
208
209 linkSlowCase(iter); // property int32 check
210 linkSlowCaseIfNotJSCell(iter, base); // base cell check
211 Jump nonCell = jump();
212 linkSlowCase(iter); // base array check
93a37866
A
213 Jump notString = branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get()));
214 emitNakedCall(CodeLocationLabel(m_vm->getCTIStub(stringGetByValStubGenerator).code()));
215 Jump failed = branchTest64(Zero, regT0);
4e4e5a6f
A
216 emitPutVirtualRegister(dst, regT0);
217 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
218 failed.link(this);
219 notString.link(this);
220 nonCell.link(this);
221
93a37866
A
222 Jump skipProfiling = jump();
223
4e4e5a6f
A
224 linkSlowCase(iter); // vector length check
225 linkSlowCase(iter); // empty value
226
93a37866
A
227 emitArrayProfileOutOfBoundsSpecialCase(profile);
228
229 skipProfiling.link(this);
230
231 Label slowPath = label();
232
4e4e5a6f
A
233 JITStubCall stubCall(this, cti_op_get_by_val);
234 stubCall.addArgument(base, regT2);
235 stubCall.addArgument(property, regT2);
93a37866
A
236 Call call = stubCall.call(dst);
237
238 m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
239 m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
240 m_byValInstructionIndex++;
6fe7ccc8
A
241
242 emitValueProfilingSite();
4e4e5a6f
A
243}
244
93a37866 245void JIT::compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID offset, RegisterID scratch, FinalObjectMode finalObjectMode)
f9bf01c6 246{
93a37866
A
247 ASSERT(sizeof(JSValue) == 8);
248
249 if (finalObjectMode == MayBeFinal) {
250 Jump isInline = branch32(LessThan, offset, TrustedImm32(firstOutOfLineOffset));
251 loadPtr(Address(base, JSObject::butterflyOffset()), scratch);
252 neg32(offset);
253 Jump done = jump();
254 isInline.link(this);
255 addPtr(TrustedImm32(JSObject::offsetOfInlineStorage() - (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), base, scratch);
256 done.link(this);
257 } else {
258#if !ASSERT_DISABLED
259 Jump isOutOfLine = branch32(GreaterThanOrEqual, offset, TrustedImm32(firstOutOfLineOffset));
260 breakpoint();
261 isOutOfLine.link(this);
262#endif
263 loadPtr(Address(base, JSObject::butterflyOffset()), scratch);
264 neg32(offset);
265 }
266 signExtend32ToPtr(offset, offset);
267 load64(BaseIndex(scratch, offset, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), result);
f9bf01c6
A
268}
269
270void JIT::emit_op_get_by_pname(Instruction* currentInstruction)
271{
272 unsigned dst = currentInstruction[1].u.operand;
273 unsigned base = currentInstruction[2].u.operand;
274 unsigned property = currentInstruction[3].u.operand;
275 unsigned expected = currentInstruction[4].u.operand;
276 unsigned iter = currentInstruction[5].u.operand;
277 unsigned i = currentInstruction[6].u.operand;
278
279 emitGetVirtualRegister(property, regT0);
93a37866 280 addSlowCase(branch64(NotEqual, regT0, addressFor(expected)));
f9bf01c6
A
281 emitGetVirtualRegisters(base, regT0, iter, regT1);
282 emitJumpSlowCaseIfNotJSCell(regT0, base);
283
284 // Test base's structure
14957cd0 285 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
f9bf01c6
A
286 addSlowCase(branchPtr(NotEqual, regT2, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))));
287 load32(addressFor(i), regT3);
14957cd0 288 sub32(TrustedImm32(1), regT3);
f9bf01c6 289 addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots))));
93a37866
A
290 Jump inlineProperty = branch32(Below, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructureInlineCapacity)));
291 add32(TrustedImm32(firstOutOfLineOffset), regT3);
292 sub32(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructureInlineCapacity)), regT3);
293 inlineProperty.link(this);
14957cd0 294 compileGetDirectOffset(regT0, regT0, regT3, regT1);
f9bf01c6
A
295
296 emitPutVirtualRegister(dst, regT0);
297}
298
299void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
300{
301 unsigned dst = currentInstruction[1].u.operand;
302 unsigned base = currentInstruction[2].u.operand;
303 unsigned property = currentInstruction[3].u.operand;
304
305 linkSlowCase(iter);
306 linkSlowCaseIfNotJSCell(iter, base);
307 linkSlowCase(iter);
308 linkSlowCase(iter);
309
93a37866 310 JITStubCall stubCall(this, cti_op_get_by_val_generic);
f9bf01c6
A
311 stubCall.addArgument(base, regT2);
312 stubCall.addArgument(property, regT2);
313 stubCall.call(dst);
ba379fdc
A
314}
315
316void JIT::emit_op_put_by_val(Instruction* currentInstruction)
317{
f9bf01c6
A
318 unsigned base = currentInstruction[1].u.operand;
319 unsigned property = currentInstruction[2].u.operand;
93a37866 320 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
f9bf01c6
A
321
322 emitGetVirtualRegisters(base, regT0, property, regT1);
ba379fdc 323 emitJumpSlowCaseIfNotImmediateInteger(regT1);
ba379fdc
A
324 // See comment in op_get_by_val.
325 zeroExtend32ToPtr(regT1, regT1);
f9bf01c6 326 emitJumpSlowCaseIfNotJSCell(regT0, base);
93a37866
A
327 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
328 emitArrayProfilingSite(regT2, regT3, profile);
329 and32(TrustedImm32(IndexingShapeMask), regT2);
330
331 PatchableJump badType;
332 JumpList slowCases;
333
334 JITArrayMode mode = chooseArrayMode(profile);
335 switch (mode) {
336 case JITInt32:
337 slowCases = emitInt32PutByVal(currentInstruction, badType);
338 break;
339 case JITDouble:
340 slowCases = emitDoublePutByVal(currentInstruction, badType);
341 break;
342 case JITContiguous:
343 slowCases = emitContiguousPutByVal(currentInstruction, badType);
344 break;
345 case JITArrayStorage:
346 slowCases = emitArrayStoragePutByVal(currentInstruction, badType);
347 break;
348 default:
349 CRASH();
350 break;
351 }
352
353 addSlowCase(badType);
354 addSlowCase(slowCases);
355
356 Label done = label();
357
358 m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
ba379fdc 359
93a37866
A
360 emitWriteBarrier(regT0, regT3, regT1, regT3, ShouldFilterImmediates, WriteBarrierForPropertyAccess);
361}
362
363JIT::JumpList JIT::emitGenericContiguousPutByVal(Instruction* currentInstruction, PatchableJump& badType, IndexingType indexingShape)
364{
365 unsigned value = currentInstruction[3].u.operand;
366 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
367
368 JumpList slowCases;
369
370 badType = patchableBranch32(NotEqual, regT2, TrustedImm32(indexingShape));
371
372 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
373 Jump outOfBounds = branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength()));
374
375 Label storeResult = label();
376 emitGetVirtualRegister(value, regT3);
377 switch (indexingShape) {
378 case Int32Shape:
379 slowCases.append(emitJumpIfNotImmediateInteger(regT3));
380 store64(regT3, BaseIndex(regT2, regT1, TimesEight));
381 break;
382 case DoubleShape: {
383 Jump notInt = emitJumpIfNotImmediateInteger(regT3);
384 convertInt32ToDouble(regT3, fpRegT0);
385 Jump ready = jump();
386 notInt.link(this);
387 add64(tagTypeNumberRegister, regT3);
388 move64ToDouble(regT3, fpRegT0);
389 slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
390 ready.link(this);
391 storeDouble(fpRegT0, BaseIndex(regT2, regT1, TimesEight));
392 break;
393 }
394 case ContiguousShape:
395 store64(regT3, BaseIndex(regT2, regT1, TimesEight));
396 break;
397 default:
398 CRASH();
399 break;
400 }
401
402 Jump done = jump();
403 outOfBounds.link(this);
404
405 slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfVectorLength())));
406
407 emitArrayProfileStoreToHoleSpecialCase(profile);
408
409 add32(TrustedImm32(1), regT1, regT3);
410 store32(regT3, Address(regT2, Butterfly::offsetOfPublicLength()));
411 jump().linkTo(storeResult, this);
412
413 done.link(this);
414
415 return slowCases;
416}
417
418JIT::JumpList JIT::emitArrayStoragePutByVal(Instruction* currentInstruction, PatchableJump& badType)
419{
420 unsigned value = currentInstruction[3].u.operand;
421 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
422
423 JumpList slowCases;
424
425 badType = patchableBranch32(NotEqual, regT2, TrustedImm32(ArrayStorageShape));
426 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
427 slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, ArrayStorage::vectorLengthOffset())));
428
429 Jump empty = branchTest64(Zero, BaseIndex(regT2, regT1, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
ba379fdc 430
f9bf01c6 431 Label storeResult(this);
6fe7ccc8 432 emitGetVirtualRegister(value, regT3);
93a37866 433 store64(regT3, BaseIndex(regT2, regT1, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
f9bf01c6
A
434 Jump end = jump();
435
436 empty.link(this);
93a37866
A
437 emitArrayProfileStoreToHoleSpecialCase(profile);
438 add32(TrustedImm32(1), Address(regT2, ArrayStorage::numValuesInVectorOffset()));
439 branch32(Below, regT1, Address(regT2, ArrayStorage::lengthOffset())).linkTo(storeResult, this);
f9bf01c6 440
6fe7ccc8 441 add32(TrustedImm32(1), regT1);
93a37866 442 store32(regT1, Address(regT2, ArrayStorage::lengthOffset()));
6fe7ccc8 443 sub32(TrustedImm32(1), regT1);
f9bf01c6
A
444 jump().linkTo(storeResult, this);
445
446 end.link(this);
93a37866
A
447
448 return slowCases;
ba379fdc
A
449}
450
6fe7ccc8 451void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
ba379fdc 452{
6fe7ccc8
A
453 unsigned base = currentInstruction[1].u.operand;
454 unsigned property = currentInstruction[2].u.operand;
455 unsigned value = currentInstruction[3].u.operand;
93a37866 456 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
6fe7ccc8
A
457
458 linkSlowCase(iter); // property int32 check
459 linkSlowCaseIfNotJSCell(iter, base); // base cell check
460 linkSlowCase(iter); // base not array check
93a37866
A
461
462 JITArrayMode mode = chooseArrayMode(profile);
463 switch (mode) {
464 case JITInt32:
465 case JITDouble:
466 linkSlowCase(iter); // value type check
467 break;
468 default:
469 break;
470 }
471
472 Jump skipProfiling = jump();
473 linkSlowCase(iter); // out of bounds
474 emitArrayProfileOutOfBoundsSpecialCase(profile);
475 skipProfiling.link(this);
476
477 Label slowPath = label();
6fe7ccc8
A
478
479 JITStubCall stubPutByValCall(this, cti_op_put_by_val);
480 stubPutByValCall.addArgument(regT0);
481 stubPutByValCall.addArgument(property, regT2);
482 stubPutByValCall.addArgument(value, regT2);
93a37866
A
483 Call call = stubPutByValCall.call();
484
485 m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
486 m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
487 m_byValInstructionIndex++;
ba379fdc
A
488}
489
6fe7ccc8 490void JIT::emit_op_put_by_index(Instruction* currentInstruction)
ba379fdc 491{
6fe7ccc8 492 JITStubCall stubCall(this, cti_op_put_by_index);
ba379fdc 493 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
6fe7ccc8 494 stubCall.addArgument(TrustedImm32(currentInstruction[2].u.operand));
ba379fdc
A
495 stubCall.addArgument(currentInstruction[3].u.operand, regT2);
496 stubCall.call();
497}
498
6fe7ccc8 499void JIT::emit_op_put_getter_setter(Instruction* currentInstruction)
ba379fdc 500{
6fe7ccc8 501 JITStubCall stubCall(this, cti_op_put_getter_setter);
ba379fdc 502 stubCall.addArgument(currentInstruction[1].u.operand, regT2);
14957cd0 503 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
ba379fdc 504 stubCall.addArgument(currentInstruction[3].u.operand, regT2);
6fe7ccc8 505 stubCall.addArgument(currentInstruction[4].u.operand, regT2);
ba379fdc
A
506 stubCall.call();
507}
508
509void JIT::emit_op_del_by_id(Instruction* currentInstruction)
510{
511 JITStubCall stubCall(this, cti_op_del_by_id);
512 stubCall.addArgument(currentInstruction[2].u.operand, regT2);
14957cd0 513 stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
ba379fdc 514 stubCall.call(currentInstruction[1].u.operand);
9dae56ea
A
515}
516
ba379fdc
A
517void JIT::emit_op_get_by_id(Instruction* currentInstruction)
518{
519 unsigned resultVReg = currentInstruction[1].u.operand;
520 unsigned baseVReg = currentInstruction[2].u.operand;
521 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
522
523 emitGetVirtualRegister(baseVReg, regT0);
6fe7ccc8
A
524 compileGetByIdHotPath(baseVReg, ident);
525 emitValueProfilingSite();
ba379fdc
A
526 emitPutVirtualRegister(resultVReg);
527}
528
93a37866 529void JIT::compileGetByIdHotPath(int baseVReg, Identifier* ident)
9dae56ea
A
530{
531 // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
532 // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
533 // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
534 // to jump back to if one of these trampolies finds a match.
535
ba379fdc 536 emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
93a37866
A
537
538 if (*ident == m_vm->propertyNames->length && shouldEmitProfiling()) {
539 loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
540 emitArrayProfilingSiteForBytecodeIndex(regT1, regT2, m_bytecodeOffset);
541 }
9dae56ea 542
f9bf01c6
A
543 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
544
9dae56ea 545 Label hotPathBegin(this);
9dae56ea
A
546
547 DataLabelPtr structureToCompare;
6fe7ccc8 548 PatchableJump structureCheck = patchableBranchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
9dae56ea 549 addSlowCase(structureCheck);
9dae56ea 550
93a37866
A
551 ConvertibleLoadLabel propertyStorageLoad = convertibleLoadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
552 DataLabelCompact displacementLabel = load64WithCompactAddressOffsetPatch(Address(regT0, patchGetByIdDefaultOffset), regT0);
9dae56ea
A
553
554 Label putResult(this);
f9bf01c6
A
555
556 END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
557
93a37866 558 m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo(PropertyStubGetById, m_bytecodeOffset, hotPathBegin, structureToCompare, structureCheck, propertyStorageLoad, displacementLabel, putResult));
9dae56ea
A
559}
560
ba379fdc
A
561void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
562{
563 unsigned resultVReg = currentInstruction[1].u.operand;
564 unsigned baseVReg = currentInstruction[2].u.operand;
565 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
566
93a37866 567 compileGetByIdSlowCase(resultVReg, baseVReg, ident, iter);
6fe7ccc8 568 emitValueProfilingSite();
ba379fdc 569}
9dae56ea 570
93a37866 571void JIT::compileGetByIdSlowCase(int resultVReg, int baseVReg, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter)
9dae56ea
A
572{
573 // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
574 // so that we only need track one pointer into the slow case code - we track a pointer to the location
575 // of the call (which we can use to look up the patch information), but should a array-length or
576 // prototype access trampoline fail we want to bail out back to here. To do so we can subtract back
577 // the distance from the call to the head of the slow case.
578
579 linkSlowCaseIfNotJSCell(iter, baseVReg);
580 linkSlowCase(iter);
581
f9bf01c6
A
582 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
583
9dae56ea 584 Label coldPathBegin(this);
93a37866 585 JITStubCall stubCall(this, cti_op_get_by_id);
ba379fdc 586 stubCall.addArgument(regT0);
14957cd0 587 stubCall.addArgument(TrustedImmPtr(ident));
ba379fdc 588 Call call = stubCall.call(resultVReg);
9dae56ea 589
f9bf01c6
A
590 END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
591
9dae56ea 592 // Track the location of the call; this will be used to recover patch information.
6fe7ccc8 593 m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].slowCaseInfo(PropertyStubGetById, coldPathBegin, call);
9dae56ea
A
594}
595
ba379fdc 596void JIT::emit_op_put_by_id(Instruction* currentInstruction)
9dae56ea 597{
ba379fdc
A
598 unsigned baseVReg = currentInstruction[1].u.operand;
599 unsigned valueVReg = currentInstruction[3].u.operand;
600
9dae56ea
A
601 // In order to be able to patch both the Structure, and the object offset, we store one pointer,
602 // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
603 // such that the Structure & offset are always at the same distance from this.
604
ba379fdc 605 emitGetVirtualRegisters(baseVReg, regT0, valueVReg, regT1);
9dae56ea
A
606
607 // Jump to a slow case if either the base object is an immediate, or if the Structure does not match.
ba379fdc 608 emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
9dae56ea 609
f9bf01c6
A
610 BEGIN_UNINTERRUPTED_SEQUENCE(sequencePutById);
611
9dae56ea 612 Label hotPathBegin(this);
9dae56ea
A
613
614 // It is important that the following instruction plants a 32bit immediate, in order that it can be patched over.
615 DataLabelPtr structureToCompare;
14957cd0 616 addSlowCase(branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))));
9dae56ea 617
93a37866
A
618 ConvertibleLoadLabel propertyStorageLoad = convertibleLoadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
619 DataLabel32 displacementLabel = store64WithAddressOffsetPatch(regT1, Address(regT2, patchPutByIdDefaultOffset));
f9bf01c6
A
620
621 END_UNINTERRUPTED_SEQUENCE(sequencePutById);
622
6fe7ccc8
A
623 emitWriteBarrier(regT0, regT1, regT2, regT3, ShouldFilterImmediates, WriteBarrierForPropertyAccess);
624
93a37866 625 m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo(PropertyStubPutById, m_bytecodeOffset, hotPathBegin, structureToCompare, propertyStorageLoad, displacementLabel));
9dae56ea
A
626}
627
ba379fdc 628void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
9dae56ea 629{
ba379fdc
A
630 unsigned baseVReg = currentInstruction[1].u.operand;
631 Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
4e4e5a6f 632 unsigned direct = currentInstruction[8].u.operand;
ba379fdc 633
9dae56ea
A
634 linkSlowCaseIfNotJSCell(iter, baseVReg);
635 linkSlowCase(iter);
636
4e4e5a6f 637 JITStubCall stubCall(this, direct ? cti_op_put_by_id_direct : cti_op_put_by_id);
ba379fdc 638 stubCall.addArgument(regT0);
14957cd0 639 stubCall.addArgument(TrustedImmPtr(ident));
ba379fdc 640 stubCall.addArgument(regT1);
93a37866 641 move(regT0, nonArgGPR1);
ba379fdc 642 Call call = stubCall.call();
9dae56ea
A
643
644 // Track the location of the call; this will be used to recover patch information.
6fe7ccc8 645 m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].slowCaseInfo(PropertyStubPutById, call);
9dae56ea
A
646}
647
ba379fdc
A
648// Compile a store into an object's property storage. May overwrite the
649// value in objectReg.
93a37866 650void JIT::compilePutDirectOffset(RegisterID base, RegisterID value, PropertyOffset cachedOffset)
ba379fdc 651{
93a37866
A
652 if (isInlineOffset(cachedOffset)) {
653 store64(value, Address(base, JSObject::offsetOfInlineStorage() + sizeof(JSValue) * offsetInInlineStorage(cachedOffset)));
654 return;
655 }
656
657 loadPtr(Address(base, JSObject::butterflyOffset()), base);
658 store64(value, Address(base, sizeof(JSValue) * offsetInButterfly(cachedOffset)));
ba379fdc
A
659}
660
661// Compile a load from an object's property storage. May overwrite base.
93a37866 662void JIT::compileGetDirectOffset(RegisterID base, RegisterID result, PropertyOffset cachedOffset)
9dae56ea 663{
93a37866
A
664 if (isInlineOffset(cachedOffset)) {
665 load64(Address(base, JSObject::offsetOfInlineStorage() + sizeof(JSValue) * offsetInInlineStorage(cachedOffset)), result);
666 return;
667 }
668
669 loadPtr(Address(base, JSObject::butterflyOffset()), result);
670 load64(Address(result, sizeof(JSValue) * offsetInButterfly(cachedOffset)), result);
9dae56ea
A
671}
672
93a37866 673void JIT::compileGetDirectOffset(JSObject* base, RegisterID result, PropertyOffset cachedOffset)
f9bf01c6 674{
93a37866
A
675 if (isInlineOffset(cachedOffset)) {
676 load64(base->locationForOffset(cachedOffset), result);
677 return;
678 }
679
680 loadPtr(base->butterflyAddress(), result);
681 load64(Address(result, offsetInButterfly(cachedOffset) * sizeof(WriteBarrier<Unknown>)), result);
f9bf01c6
A
682}
683
93a37866 684void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, PropertyOffset cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
9dae56ea 685{
93a37866
A
686 move(nonArgGPR1, regT0);
687
9dae56ea
A
688 JumpList failureCases;
689 // Check eax is an object of the right Structure.
ba379fdc 690 failureCases.append(emitJumpIfNotJSCell(regT0));
14957cd0 691 failureCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(oldStructure)));
6fe7ccc8 692
93a37866 693 testPrototype(oldStructure->storedPrototype(), failureCases, stubInfo);
6fe7ccc8
A
694
695 ASSERT(oldStructure->storedPrototype().isNull() || oldStructure->storedPrototype().asCell()->structure() == chain->head()->get());
9dae56ea 696
9dae56ea 697 // ecx = baseObject->m_structure
4e4e5a6f 698 if (!direct) {
6fe7ccc8
A
699 for (WriteBarrier<Structure>* it = chain->head(); *it; ++it) {
700 ASSERT((*it)->storedPrototype().isNull() || (*it)->storedPrototype().asCell()->structure() == it[1].get());
93a37866 701 testPrototype((*it)->storedPrototype(), failureCases, stubInfo);
6fe7ccc8 702 }
4e4e5a6f 703 }
9dae56ea 704
6fe7ccc8
A
705 // If we succeed in all of our checks, and the code was optimizable, then make sure we
706 // decrement the rare case counter.
707#if ENABLE(VALUE_PROFILER)
93a37866 708 if (m_codeBlock->canCompileWithDFG() >= DFG::MayInline) {
6fe7ccc8
A
709 sub32(
710 TrustedImm32(1),
711 AbsoluteAddress(&m_codeBlock->rareCaseProfileForBytecodeOffset(stubInfo->bytecodeIndex)->m_counter));
712 }
713#endif
714
9dae56ea 715 // emit a call only if storage realloc is needed
93a37866 716 bool willNeedStorageRealloc = oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity();
ba379fdc
A
717 if (willNeedStorageRealloc) {
718 // This trampoline was called to like a JIT stub; before we can can call again we need to
719 // remove the return address from the stack, to prevent the stack from becoming misaligned.
720 preserveReturnAddressAfterCall(regT3);
721
722 JITStubCall stubCall(this, cti_op_put_by_id_transition_realloc);
723 stubCall.skipArgument(); // base
724 stubCall.skipArgument(); // ident
725 stubCall.skipArgument(); // value
93a37866 726 stubCall.addArgument(TrustedImm32(oldStructure->outOfLineCapacity()));
6fe7ccc8 727 stubCall.addArgument(TrustedImmPtr(newStructure));
ba379fdc 728 stubCall.call(regT0);
f9bf01c6 729 emitGetJITStubArg(2, regT1);
ba379fdc
A
730
731 restoreReturnAddressBeforeReturn(regT3);
9dae56ea
A
732 }
733
6fe7ccc8
A
734 // Planting the new structure triggers the write barrier so we need
735 // an unconditional barrier here.
736 emitWriteBarrier(regT0, regT1, regT2, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
9dae56ea 737
6fe7ccc8
A
738 ASSERT(newStructure->classInfo() == oldStructure->classInfo());
739 storePtr(TrustedImmPtr(newStructure), Address(regT0, JSCell::structureOffset()));
740 compilePutDirectOffset(regT0, regT1, cachedOffset);
9dae56ea
A
741
742 ret();
743
ba379fdc
A
744 ASSERT(!failureCases.empty());
745 failureCases.link(this);
746 restoreArgumentReferenceForTrampoline();
747 Call failureCall = tailRecursiveCall();
9dae56ea 748
93a37866 749 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
9dae56ea 750
4e4e5a6f 751 patchBuffer.link(failureCall, FunctionPtr(direct ? cti_op_put_by_id_direct_fail : cti_op_put_by_id_fail));
9dae56ea 752
ba379fdc
A
753 if (willNeedStorageRealloc) {
754 ASSERT(m_calls.size() == 1);
755 patchBuffer.link(m_calls[0].from, FunctionPtr(cti_op_put_by_id_transition_realloc));
756 }
9dae56ea 757
93a37866
A
758 stubInfo->stubRoutine = createJITStubRoutine(
759 FINALIZE_CODE(
760 patchBuffer,
761 ("Baseline put_by_id transition for %s, return point %p",
762 toCString(*m_codeBlock).data(), returnAddress.value())),
763 *m_vm,
764 m_codeBlock->ownerExecutable(),
765 willNeedStorageRealloc,
766 newStructure);
ba379fdc 767 RepatchBuffer repatchBuffer(m_codeBlock);
93a37866 768 repatchBuffer.relinkCallerToTrampoline(returnAddress, CodeLocationLabel(stubInfo->stubRoutine->code().code()));
9dae56ea
A
769}
770
93a37866 771void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress)
9dae56ea 772{
ba379fdc
A
773 RepatchBuffer repatchBuffer(codeBlock);
774
9dae56ea 775 // We don't want to patch more than once - in future go to cti_op_get_by_id_generic.
ba379fdc
A
776 // Should probably go to cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
777 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
778
9dae56ea 779 // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
6fe7ccc8 780 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.get.structureToCompare), structure);
93a37866
A
781 repatchBuffer.setLoadInstructionIsActive(stubInfo->hotPathBegin.convertibleLoadAtOffset(stubInfo->patch.baseline.u.get.propertyStorageLoad), isOutOfLineOffset(cachedOffset));
782 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel), offsetRelativeToPatchedStorage(cachedOffset));
9dae56ea
A
783}
784
93a37866 785void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, bool direct)
9dae56ea 786{
ba379fdc
A
787 RepatchBuffer repatchBuffer(codeBlock);
788
9dae56ea 789 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
ba379fdc 790 // Should probably go to cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
4e4e5a6f 791 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
ba379fdc 792
9dae56ea 793 // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
6fe7ccc8 794 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.put.structureToCompare), structure);
93a37866
A
795 repatchBuffer.setLoadInstructionIsActive(stubInfo->hotPathBegin.convertibleLoadAtOffset(stubInfo->patch.baseline.u.put.propertyStorageLoad), isOutOfLineOffset(cachedOffset));
796 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel), offsetRelativeToPatchedStorage(cachedOffset));
9dae56ea
A
797}
798
ba379fdc 799void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
9dae56ea
A
800{
801 StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress);
802
9dae56ea 803 // Check eax is an array
93a37866
A
804 loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
805 Jump failureCases1 = branchTest32(Zero, regT2, TrustedImm32(IsArray));
806 Jump failureCases2 = branchTest32(Zero, regT2, TrustedImm32(IndexingShapeMask));
9dae56ea
A
807
808 // Checks out okay! - get the length from the storage
93a37866
A
809 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
810 load32(Address(regT3, ArrayStorage::lengthOffset()), regT2);
811 Jump failureCases3 = branch32(LessThan, regT2, TrustedImm32(0));
9dae56ea 812
ba379fdc 813 emitFastArithIntToImmNoCheck(regT2, regT0);
9dae56ea
A
814 Jump success = jump();
815
93a37866 816 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
9dae56ea
A
817
818 // Use the patch information to link the failure cases back to the original slow case routine.
6fe7ccc8 819 CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
9dae56ea
A
820 patchBuffer.link(failureCases1, slowCaseBegin);
821 patchBuffer.link(failureCases2, slowCaseBegin);
93a37866 822 patchBuffer.link(failureCases3, slowCaseBegin);
9dae56ea
A
823
824 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
6fe7ccc8 825 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
9dae56ea
A
826
827 // Track the stub we have created so that it will be deleted later.
93a37866
A
828 stubInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
829 patchBuffer,
830 ("Basline JIT get_by_id array length stub for %s, return point %p",
831 toCString(*m_codeBlock).data(),
832 stubInfo->hotPathBegin.labelAtOffset(
833 stubInfo->patch.baseline.u.get.putResult).executableAddress()));
9dae56ea 834
ba379fdc 835 // Finally patch the jump to slow case back in the hot path to jump here instead.
6fe7ccc8 836 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
ba379fdc 837 RepatchBuffer repatchBuffer(m_codeBlock);
93a37866 838 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine->code().code()));
9dae56ea 839
ba379fdc
A
840 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
841 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail));
9dae56ea
A
842}
843
93a37866 844void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
9dae56ea 845{
9dae56ea
A
846 // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
847 // referencing the prototype object - let's speculatively load it's table nice and early!)
848 JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
9dae56ea
A
849
850 // Check eax is an object of the right Structure.
ba379fdc 851 Jump failureCases1 = checkStructure(regT0, structure);
9dae56ea
A
852
853 // Check the prototype object's Structure had not changed.
93a37866 854 Jump failureCases2 = addStructureTransitionCheck(protoObject, prototypeStructure, stubInfo, regT3);
9dae56ea 855
4e4e5a6f
A
856 bool needsStubLink = false;
857
858 // Checks out okay!
859 if (slot.cachedPropertyType() == PropertySlot::Getter) {
860 needsStubLink = true;
14957cd0 861 compileGetDirectOffset(protoObject, regT1, cachedOffset);
4e4e5a6f
A
862 JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
863 stubCall.addArgument(regT1);
864 stubCall.addArgument(regT0);
14957cd0 865 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
4e4e5a6f
A
866 stubCall.call();
867 } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
868 needsStubLink = true;
869 JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
14957cd0
A
870 stubCall.addArgument(TrustedImmPtr(protoObject));
871 stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
872 stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
873 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
4e4e5a6f
A
874 stubCall.call();
875 } else
14957cd0 876 compileGetDirectOffset(protoObject, regT0, cachedOffset);
9dae56ea 877 Jump success = jump();
93a37866 878 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
9dae56ea
A
879
880 // Use the patch information to link the failure cases back to the original slow case routine.
6fe7ccc8 881 CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
9dae56ea 882 patchBuffer.link(failureCases1, slowCaseBegin);
93a37866
A
883 if (failureCases2.isSet())
884 patchBuffer.link(failureCases2, slowCaseBegin);
9dae56ea
A
885
886 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
6fe7ccc8 887 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
9dae56ea 888
4e4e5a6f
A
889 if (needsStubLink) {
890 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
891 if (iter->to)
892 patchBuffer.link(iter->from, FunctionPtr(iter->to));
893 }
894 }
9dae56ea 895 // Track the stub we have created so that it will be deleted later.
93a37866
A
896 stubInfo->stubRoutine = createJITStubRoutine(
897 FINALIZE_CODE(
898 patchBuffer,
899 ("Baseline JIT get_by_id proto stub for %s, return point %p",
900 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
901 stubInfo->patch.baseline.u.get.putResult).executableAddress())),
902 *m_vm,
903 m_codeBlock->ownerExecutable(),
904 needsStubLink);
9dae56ea
A
905
906 // Finally patch the jump to slow case back in the hot path to jump here instead.
6fe7ccc8 907 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
ba379fdc 908 RepatchBuffer repatchBuffer(m_codeBlock);
93a37866 909 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine->code().code()));
9dae56ea 910
ba379fdc
A
911 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
912 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
9dae56ea
A
913}
914
93a37866 915void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset)
9dae56ea 916{
ba379fdc 917 Jump failureCase = checkStructure(regT0, structure);
4e4e5a6f 918 bool needsStubLink = false;
6fe7ccc8 919 bool isDirect = false;
4e4e5a6f
A
920 if (slot.cachedPropertyType() == PropertySlot::Getter) {
921 needsStubLink = true;
6fe7ccc8 922 compileGetDirectOffset(regT0, regT1, cachedOffset);
4e4e5a6f
A
923 JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
924 stubCall.addArgument(regT1);
925 stubCall.addArgument(regT0);
14957cd0 926 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
4e4e5a6f
A
927 stubCall.call();
928 } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
929 needsStubLink = true;
930 JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
931 stubCall.addArgument(regT0);
14957cd0
A
932 stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
933 stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
934 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
4e4e5a6f 935 stubCall.call();
6fe7ccc8
A
936 } else {
937 isDirect = true;
938 compileGetDirectOffset(regT0, regT0, cachedOffset);
939 }
9dae56ea
A
940 Jump success = jump();
941
93a37866 942 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
9dae56ea 943
4e4e5a6f
A
944 if (needsStubLink) {
945 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
946 if (iter->to)
947 patchBuffer.link(iter->from, FunctionPtr(iter->to));
948 }
949 }
950
9dae56ea 951 // Use the patch information to link the failure cases back to the original slow case routine.
93a37866 952 CodeLocationLabel lastProtoBegin = CodeLocationLabel(JITStubRoutine::asCodePtr(polymorphicStructures->list[currentIndex - 1].stubRoutine));
9dae56ea 953 if (!lastProtoBegin)
6fe7ccc8 954 lastProtoBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
9dae56ea
A
955
956 patchBuffer.link(failureCase, lastProtoBegin);
957
958 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
6fe7ccc8 959 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
ba379fdc 960
93a37866
A
961 RefPtr<JITStubRoutine> stubCode = createJITStubRoutine(
962 FINALIZE_CODE(
963 patchBuffer,
964 ("Baseline JIT get_by_id list stub for %s, return point %p",
965 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
966 stubInfo->patch.baseline.u.get.putResult).executableAddress())),
967 *m_vm,
968 m_codeBlock->ownerExecutable(),
969 needsStubLink);
9dae56ea 970
93a37866 971 polymorphicStructures->list[currentIndex].set(*m_vm, m_codeBlock->ownerExecutable(), stubCode, structure, isDirect);
9dae56ea
A
972
973 // Finally patch the jump to slow case back in the hot path to jump here instead.
6fe7ccc8 974 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
ba379fdc 975 RepatchBuffer repatchBuffer(m_codeBlock);
93a37866 976 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubCode->code().code()));
9dae56ea
A
977}
978
93a37866 979void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, CallFrame* callFrame)
9dae56ea
A
980{
981 // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
982 // referencing the prototype object - let's speculatively load it's table nice and early!)
983 JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
9dae56ea
A
984
985 // Check eax is an object of the right Structure.
ba379fdc 986 Jump failureCases1 = checkStructure(regT0, structure);
9dae56ea
A
987
988 // Check the prototype object's Structure had not changed.
93a37866 989 Jump failureCases2 = addStructureTransitionCheck(protoObject, prototypeStructure, stubInfo, regT3);
9dae56ea 990
4e4e5a6f
A
991 // Checks out okay!
992 bool needsStubLink = false;
6fe7ccc8 993 bool isDirect = false;
4e4e5a6f
A
994 if (slot.cachedPropertyType() == PropertySlot::Getter) {
995 needsStubLink = true;
14957cd0 996 compileGetDirectOffset(protoObject, regT1, cachedOffset);
4e4e5a6f
A
997 JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
998 stubCall.addArgument(regT1);
999 stubCall.addArgument(regT0);
14957cd0 1000 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
4e4e5a6f
A
1001 stubCall.call();
1002 } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
1003 needsStubLink = true;
1004 JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
14957cd0
A
1005 stubCall.addArgument(TrustedImmPtr(protoObject));
1006 stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
1007 stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
1008 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
4e4e5a6f 1009 stubCall.call();
6fe7ccc8
A
1010 } else {
1011 isDirect = true;
14957cd0 1012 compileGetDirectOffset(protoObject, regT0, cachedOffset);
6fe7ccc8 1013 }
9dae56ea
A
1014
1015 Jump success = jump();
1016
93a37866 1017 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
9dae56ea 1018
4e4e5a6f
A
1019 if (needsStubLink) {
1020 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
1021 if (iter->to)
1022 patchBuffer.link(iter->from, FunctionPtr(iter->to));
1023 }
1024 }
1025
9dae56ea 1026 // Use the patch information to link the failure cases back to the original slow case routine.
93a37866 1027 CodeLocationLabel lastProtoBegin = CodeLocationLabel(JITStubRoutine::asCodePtr(prototypeStructures->list[currentIndex - 1].stubRoutine));
9dae56ea 1028 patchBuffer.link(failureCases1, lastProtoBegin);
93a37866
A
1029 if (failureCases2.isSet())
1030 patchBuffer.link(failureCases2, lastProtoBegin);
9dae56ea
A
1031
1032 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
6fe7ccc8 1033 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
ba379fdc 1034
93a37866
A
1035 RefPtr<JITStubRoutine> stubCode = createJITStubRoutine(
1036 FINALIZE_CODE(
1037 patchBuffer,
1038 ("Baseline JIT get_by_id proto list stub for %s, return point %p",
1039 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
1040 stubInfo->patch.baseline.u.get.putResult).executableAddress())),
1041 *m_vm,
1042 m_codeBlock->ownerExecutable(),
1043 needsStubLink);
1044 prototypeStructures->list[currentIndex].set(*m_vm, m_codeBlock->ownerExecutable(), stubCode, structure, prototypeStructure, isDirect);
9dae56ea
A
1045
1046 // Finally patch the jump to slow case back in the hot path to jump here instead.
6fe7ccc8 1047 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
ba379fdc 1048 RepatchBuffer repatchBuffer(m_codeBlock);
93a37866 1049 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubCode->code().code()));
9dae56ea
A
1050}
1051
93a37866 1052void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, CallFrame* callFrame)
9dae56ea
A
1053{
1054 ASSERT(count);
9dae56ea
A
1055 JumpList bucketsOfFail;
1056
1057 // Check eax is an object of the right Structure.
ba379fdc 1058 Jump baseObjectCheck = checkStructure(regT0, structure);
9dae56ea
A
1059 bucketsOfFail.append(baseObjectCheck);
1060
1061 Structure* currStructure = structure;
14957cd0 1062 WriteBarrier<Structure>* it = chain->head();
9dae56ea 1063 JSObject* protoObject = 0;
14957cd0 1064 for (unsigned i = 0; i < count; ++i, ++it) {
9dae56ea 1065 protoObject = asObject(currStructure->prototypeForLookup(callFrame));
14957cd0 1066 currStructure = it->get();
93a37866 1067 testPrototype(protoObject, bucketsOfFail, stubInfo);
9dae56ea
A
1068 }
1069 ASSERT(protoObject);
4e4e5a6f
A
1070
1071 bool needsStubLink = false;
6fe7ccc8 1072 bool isDirect = false;
4e4e5a6f
A
1073 if (slot.cachedPropertyType() == PropertySlot::Getter) {
1074 needsStubLink = true;
14957cd0 1075 compileGetDirectOffset(protoObject, regT1, cachedOffset);
4e4e5a6f
A
1076 JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
1077 stubCall.addArgument(regT1);
1078 stubCall.addArgument(regT0);
14957cd0 1079 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
4e4e5a6f
A
1080 stubCall.call();
1081 } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
1082 needsStubLink = true;
1083 JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
14957cd0
A
1084 stubCall.addArgument(TrustedImmPtr(protoObject));
1085 stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
1086 stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
1087 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
4e4e5a6f 1088 stubCall.call();
6fe7ccc8
A
1089 } else {
1090 isDirect = true;
14957cd0 1091 compileGetDirectOffset(protoObject, regT0, cachedOffset);
6fe7ccc8 1092 }
9dae56ea
A
1093 Jump success = jump();
1094
93a37866 1095 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
4e4e5a6f
A
1096
1097 if (needsStubLink) {
1098 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
1099 if (iter->to)
1100 patchBuffer.link(iter->from, FunctionPtr(iter->to));
1101 }
1102 }
9dae56ea
A
1103
1104 // Use the patch information to link the failure cases back to the original slow case routine.
93a37866 1105 CodeLocationLabel lastProtoBegin = CodeLocationLabel(JITStubRoutine::asCodePtr(prototypeStructures->list[currentIndex - 1].stubRoutine));
9dae56ea
A
1106
1107 patchBuffer.link(bucketsOfFail, lastProtoBegin);
1108
1109 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
6fe7ccc8 1110 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
ba379fdc 1111
93a37866
A
1112 RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
1113 FINALIZE_CODE(
1114 patchBuffer,
1115 ("Baseline JIT get_by_id chain list stub for %s, return point %p",
1116 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
1117 stubInfo->patch.baseline.u.get.putResult).executableAddress())),
1118 *m_vm,
1119 m_codeBlock->ownerExecutable(),
1120 needsStubLink);
9dae56ea
A
1121
1122 // Track the stub we have created so that it will be deleted later.
93a37866 1123 prototypeStructures->list[currentIndex].set(callFrame->vm(), m_codeBlock->ownerExecutable(), stubRoutine, structure, chain, isDirect);
9dae56ea
A
1124
1125 // Finally patch the jump to slow case back in the hot path to jump here instead.
6fe7ccc8 1126 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
ba379fdc 1127 RepatchBuffer repatchBuffer(m_codeBlock);
93a37866 1128 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
9dae56ea 1129}
9dae56ea 1130
93a37866 1131void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
9dae56ea 1132{
9dae56ea 1133 ASSERT(count);
4e4e5a6f 1134
9dae56ea
A
1135 JumpList bucketsOfFail;
1136
1137 // Check eax is an object of the right Structure.
ba379fdc 1138 bucketsOfFail.append(checkStructure(regT0, structure));
9dae56ea
A
1139
1140 Structure* currStructure = structure;
14957cd0 1141 WriteBarrier<Structure>* it = chain->head();
9dae56ea 1142 JSObject* protoObject = 0;
14957cd0 1143 for (unsigned i = 0; i < count; ++i, ++it) {
9dae56ea 1144 protoObject = asObject(currStructure->prototypeForLookup(callFrame));
14957cd0 1145 currStructure = it->get();
93a37866 1146 testPrototype(protoObject, bucketsOfFail, stubInfo);
9dae56ea
A
1147 }
1148 ASSERT(protoObject);
1149
4e4e5a6f
A
1150 bool needsStubLink = false;
1151 if (slot.cachedPropertyType() == PropertySlot::Getter) {
1152 needsStubLink = true;
14957cd0 1153 compileGetDirectOffset(protoObject, regT1, cachedOffset);
4e4e5a6f
A
1154 JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
1155 stubCall.addArgument(regT1);
1156 stubCall.addArgument(regT0);
14957cd0 1157 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
4e4e5a6f
A
1158 stubCall.call();
1159 } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
1160 needsStubLink = true;
1161 JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
14957cd0
A
1162 stubCall.addArgument(TrustedImmPtr(protoObject));
1163 stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
1164 stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
1165 stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
4e4e5a6f
A
1166 stubCall.call();
1167 } else
14957cd0 1168 compileGetDirectOffset(protoObject, regT0, cachedOffset);
9dae56ea
A
1169 Jump success = jump();
1170
93a37866 1171 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
9dae56ea 1172
4e4e5a6f
A
1173 if (needsStubLink) {
1174 for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
1175 if (iter->to)
1176 patchBuffer.link(iter->from, FunctionPtr(iter->to));
1177 }
1178 }
1179
9dae56ea 1180 // Use the patch information to link the failure cases back to the original slow case routine.
6fe7ccc8 1181 patchBuffer.link(bucketsOfFail, stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin));
9dae56ea
A
1182
1183 // On success return back to the hot patch code, at a point it will perform the store to dest for us.
6fe7ccc8 1184 patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
9dae56ea
A
1185
1186 // Track the stub we have created so that it will be deleted later.
93a37866
A
1187 RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
1188 FINALIZE_CODE(
1189 patchBuffer,
1190 ("Baseline JIT get_by_id chain stub for %s, return point %p",
1191 toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
1192 stubInfo->patch.baseline.u.get.putResult).executableAddress())),
1193 *m_vm,
1194 m_codeBlock->ownerExecutable(),
1195 needsStubLink);
6fe7ccc8 1196 stubInfo->stubRoutine = stubRoutine;
9dae56ea
A
1197
1198 // Finally patch the jump to slow case back in the hot path to jump here instead.
6fe7ccc8 1199 CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
ba379fdc 1200 RepatchBuffer repatchBuffer(m_codeBlock);
93a37866 1201 repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
9dae56ea 1202
ba379fdc
A
1203 // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
1204 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
9dae56ea
A
1205}
1206
6fe7ccc8
A
1207void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
1208{
1209 int skip = currentInstruction[3].u.operand;
1210
93a37866 1211 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT0);
6fe7ccc8
A
1212 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1213 ASSERT(skip || !checkTopLevel);
1214 if (checkTopLevel && skip--) {
1215 Jump activationNotCreated;
1216 if (checkTopLevel)
1217 activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
93a37866 1218 loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
6fe7ccc8
A
1219 activationNotCreated.link(this);
1220 }
1221 while (skip--)
93a37866 1222 loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
6fe7ccc8 1223
6fe7ccc8
A
1224 loadPtr(Address(regT0, JSVariableObject::offsetOfRegisters()), regT0);
1225 loadPtr(Address(regT0, currentInstruction[2].u.operand * sizeof(Register)), regT0);
1226 emitValueProfilingSite();
1227 emitPutVirtualRegister(currentInstruction[1].u.operand);
1228}
1229
1230void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
1231{
1232 int skip = currentInstruction[2].u.operand;
1233
1234 emitGetVirtualRegister(currentInstruction[3].u.operand, regT0);
1235
93a37866 1236 emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT1);
6fe7ccc8
A
1237 bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1238 ASSERT(skip || !checkTopLevel);
1239 if (checkTopLevel && skip--) {
1240 Jump activationNotCreated;
1241 if (checkTopLevel)
1242 activationNotCreated = branchTestPtr(Zero, addressFor(m_codeBlock->activationRegister()));
93a37866 1243 loadPtr(Address(regT1, JSScope::offsetOfNext()), regT1);
6fe7ccc8
A
1244 activationNotCreated.link(this);
1245 }
1246 while (skip--)
93a37866 1247 loadPtr(Address(regT1, JSScope::offsetOfNext()), regT1);
9dae56ea 1248
6fe7ccc8
A
1249 emitWriteBarrier(regT1, regT0, regT2, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1250
1251 loadPtr(Address(regT1, JSVariableObject::offsetOfRegisters()), regT1);
1252 storePtr(regT0, Address(regT1, currentInstruction[1].u.operand * sizeof(Register)));
1253}
1254
93a37866 1255void JIT::emit_op_init_global_const(Instruction* currentInstruction)
6fe7ccc8 1256{
93a37866
A
1257 JSGlobalObject* globalObject = m_codeBlock->globalObject();
1258
1259 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
1260
1261 store64(regT0, currentInstruction[1].u.registerPointer);
1262 if (Heap::isWriteBarrierEnabled())
1263 emitWriteBarrier(globalObject, regT0, regT2, ShouldFilterImmediates, WriteBarrierForVariableAccess);
6fe7ccc8
A
1264}
1265
93a37866 1266void JIT::emit_op_init_global_const_check(Instruction* currentInstruction)
6fe7ccc8 1267{
93a37866
A
1268 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
1269
1270 addSlowCase(branchTest8(NonZero, AbsoluteAddress(currentInstruction[3].u.predicatePointer)));
1271
6fe7ccc8
A
1272 JSGlobalObject* globalObject = m_codeBlock->globalObject();
1273
93a37866
A
1274 store64(regT0, currentInstruction[1].u.registerPointer);
1275 if (Heap::isWriteBarrierEnabled())
1276 emitWriteBarrier(globalObject, regT0, regT2, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1277}
6fe7ccc8 1278
93a37866
A
1279void JIT::emitSlow_op_init_global_const_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1280{
1281 linkSlowCase(iter);
1282
1283 JITStubCall stubCall(this, cti_op_init_global_const_check);
1284 stubCall.addArgument(regT0);
1285 stubCall.addArgument(TrustedImm32(currentInstruction[4].u.operand));
1286 stubCall.call();
6fe7ccc8
A
1287}
1288
1289void JIT::resetPatchGetById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1290{
1291 repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_get_by_id);
93a37866 1292 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.get.structureToCompare), reinterpret_cast<void*>(unusedPointer));
6fe7ccc8
A
1293 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel), 0);
1294 repatchBuffer.relink(stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck), stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin));
1295}
1296
1297void JIT::resetPatchPutById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1298{
1299 if (isDirectPutById(stubInfo))
1300 repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id_direct);
1301 else
1302 repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id);
93a37866
A
1303 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.put.structureToCompare), reinterpret_cast<void*>(unusedPointer));
1304 repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel), 0);
6fe7ccc8 1305}
9dae56ea 1306
14957cd0
A
1307#endif // USE(JSVALUE64)
1308
6fe7ccc8
A
1309void JIT::emitWriteBarrier(RegisterID owner, RegisterID value, RegisterID scratch, RegisterID scratch2, WriteBarrierMode mode, WriteBarrierUseKind useKind)
1310{
1311 UNUSED_PARAM(owner);
1312 UNUSED_PARAM(scratch);
1313 UNUSED_PARAM(scratch2);
1314 UNUSED_PARAM(useKind);
1315 UNUSED_PARAM(value);
1316 UNUSED_PARAM(mode);
1317 ASSERT(owner != scratch);
1318 ASSERT(owner != scratch2);
1319
1320#if ENABLE(WRITE_BARRIER_PROFILING)
1321 emitCount(WriteBarrierCounters::jitCounterFor(useKind));
1322#endif
6fe7ccc8
A
1323}
1324
1325void JIT::emitWriteBarrier(JSCell* owner, RegisterID value, RegisterID scratch, WriteBarrierMode mode, WriteBarrierUseKind useKind)
1326{
1327 UNUSED_PARAM(owner);
1328 UNUSED_PARAM(scratch);
1329 UNUSED_PARAM(useKind);
1330 UNUSED_PARAM(value);
1331 UNUSED_PARAM(mode);
1332
1333#if ENABLE(WRITE_BARRIER_PROFILING)
1334 emitCount(WriteBarrierCounters::jitCounterFor(useKind));
1335#endif
93a37866
A
1336}
1337
1338JIT::Jump JIT::addStructureTransitionCheck(JSCell* object, Structure* structure, StructureStubInfo* stubInfo, RegisterID scratch)
1339{
1340 if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
1341 structure->addTransitionWatchpoint(stubInfo->addWatchpoint(m_codeBlock));
1342#if !ASSERT_DISABLED
1343 move(TrustedImmPtr(object), scratch);
1344 Jump ok = branchPtr(Equal, Address(scratch, JSCell::structureOffset()), TrustedImmPtr(structure));
1345 breakpoint();
1346 ok.link(this);
6fe7ccc8 1347#endif
93a37866
A
1348 Jump result; // Returning an unset jump this way because otherwise VC++ would complain.
1349 return result;
1350 }
1351
1352 move(TrustedImmPtr(object), scratch);
1353 return branchPtr(NotEqual, Address(scratch, JSCell::structureOffset()), TrustedImmPtr(structure));
6fe7ccc8
A
1354}
1355
93a37866 1356void JIT::addStructureTransitionCheck(JSCell* object, Structure* structure, StructureStubInfo* stubInfo, JumpList& failureCases, RegisterID scratch)
14957cd0 1357{
93a37866
A
1358 Jump failureCase = addStructureTransitionCheck(object, structure, stubInfo, scratch);
1359 if (!failureCase.isSet())
14957cd0 1360 return;
93a37866
A
1361
1362 failureCases.append(failureCase);
14957cd0
A
1363}
1364
93a37866 1365void JIT::testPrototype(JSValue prototype, JumpList& failureCases, StructureStubInfo* stubInfo)
14957cd0 1366{
93a37866
A
1367 if (prototype.isNull())
1368 return;
1369
1370 ASSERT(prototype.isCell());
1371 addStructureTransitionCheck(prototype.asCell(), prototype.asCell()->structure(), stubInfo, failureCases, regT3);
6fe7ccc8
A
1372}
1373
1374bool JIT::isDirectPutById(StructureStubInfo* stubInfo)
1375{
1376 switch (stubInfo->accessType) {
1377 case access_put_by_id_transition_normal:
1378 return false;
1379 case access_put_by_id_transition_direct:
1380 return true;
1381 case access_put_by_id_replace:
1382 case access_put_by_id_generic: {
1383 void* oldCall = MacroAssembler::readCallTarget(stubInfo->callReturnLocation).executableAddress();
1384 if (oldCall == bitwise_cast<void*>(cti_op_put_by_id_direct)
1385 || oldCall == bitwise_cast<void*>(cti_op_put_by_id_direct_generic)
1386 || oldCall == bitwise_cast<void*>(cti_op_put_by_id_direct_fail))
1387 return true;
1388 ASSERT(oldCall == bitwise_cast<void*>(cti_op_put_by_id)
1389 || oldCall == bitwise_cast<void*>(cti_op_put_by_id_generic)
1390 || oldCall == bitwise_cast<void*>(cti_op_put_by_id_fail));
1391 return false;
1392 }
1393 default:
93a37866 1394 RELEASE_ASSERT_NOT_REACHED();
6fe7ccc8
A
1395 return false;
1396 }
14957cd0
A
1397}
1398
93a37866
A
1399void JIT::privateCompileGetByVal(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1400{
1401 Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1402
1403 PatchableJump badType;
1404 JumpList slowCases;
1405
1406 switch (arrayMode) {
1407 case JITInt32:
1408 slowCases = emitInt32GetByVal(currentInstruction, badType);
1409 break;
1410 case JITDouble:
1411 slowCases = emitDoubleGetByVal(currentInstruction, badType);
1412 break;
1413 case JITContiguous:
1414 slowCases = emitContiguousGetByVal(currentInstruction, badType);
1415 break;
1416 case JITArrayStorage:
1417 slowCases = emitArrayStorageGetByVal(currentInstruction, badType);
1418 break;
1419 case JITInt8Array:
1420 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_vm->int8ArrayDescriptor(), 1, SignedTypedArray);
1421 break;
1422 case JITInt16Array:
1423 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_vm->int16ArrayDescriptor(), 2, SignedTypedArray);
1424 break;
1425 case JITInt32Array:
1426 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_vm->int32ArrayDescriptor(), 4, SignedTypedArray);
1427 break;
1428 case JITUint8Array:
1429 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_vm->uint8ArrayDescriptor(), 1, UnsignedTypedArray);
1430 break;
1431 case JITUint8ClampedArray:
1432 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_vm->uint8ClampedArrayDescriptor(), 1, UnsignedTypedArray);
1433 break;
1434 case JITUint16Array:
1435 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_vm->uint16ArrayDescriptor(), 2, UnsignedTypedArray);
1436 break;
1437 case JITUint32Array:
1438 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, m_vm->uint32ArrayDescriptor(), 4, UnsignedTypedArray);
1439 break;
1440 case JITFloat32Array:
1441 slowCases = emitFloatTypedArrayGetByVal(currentInstruction, badType, m_vm->float32ArrayDescriptor(), 4);
1442 break;
1443 case JITFloat64Array:
1444 slowCases = emitFloatTypedArrayGetByVal(currentInstruction, badType, m_vm->float64ArrayDescriptor(), 8);
1445 break;
1446 default:
1447 CRASH();
1448 }
1449
1450 Jump done = jump();
1451
1452 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
1453
1454 patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1455 patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1456
1457 patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1458
1459 byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1460 patchBuffer,
1461 ("Baseline get_by_val stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1462
1463 RepatchBuffer repatchBuffer(m_codeBlock);
1464 repatchBuffer.relink(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1465 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_val_generic));
1466}
1467
1468void JIT::privateCompilePutByVal(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1469{
1470 Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1471
1472 PatchableJump badType;
1473 JumpList slowCases;
1474
1475 switch (arrayMode) {
1476 case JITInt32:
1477 slowCases = emitInt32PutByVal(currentInstruction, badType);
1478 break;
1479 case JITDouble:
1480 slowCases = emitDoublePutByVal(currentInstruction, badType);
1481 break;
1482 case JITContiguous:
1483 slowCases = emitContiguousPutByVal(currentInstruction, badType);
1484 break;
1485 case JITArrayStorage:
1486 slowCases = emitArrayStoragePutByVal(currentInstruction, badType);
1487 break;
1488 case JITInt8Array:
1489 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_vm->int8ArrayDescriptor(), 1, SignedTypedArray, TruncateRounding);
1490 break;
1491 case JITInt16Array:
1492 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_vm->int16ArrayDescriptor(), 2, SignedTypedArray, TruncateRounding);
1493 break;
1494 case JITInt32Array:
1495 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_vm->int32ArrayDescriptor(), 4, SignedTypedArray, TruncateRounding);
1496 break;
1497 case JITUint8Array:
1498 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_vm->uint8ArrayDescriptor(), 1, UnsignedTypedArray, TruncateRounding);
1499 break;
1500 case JITUint8ClampedArray:
1501 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_vm->uint8ClampedArrayDescriptor(), 1, UnsignedTypedArray, ClampRounding);
1502 break;
1503 case JITUint16Array:
1504 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_vm->uint16ArrayDescriptor(), 2, UnsignedTypedArray, TruncateRounding);
1505 break;
1506 case JITUint32Array:
1507 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, m_vm->uint32ArrayDescriptor(), 4, UnsignedTypedArray, TruncateRounding);
1508 break;
1509 case JITFloat32Array:
1510 slowCases = emitFloatTypedArrayPutByVal(currentInstruction, badType, m_vm->float32ArrayDescriptor(), 4);
1511 break;
1512 case JITFloat64Array:
1513 slowCases = emitFloatTypedArrayPutByVal(currentInstruction, badType, m_vm->float64ArrayDescriptor(), 8);
1514 break;
1515 default:
1516 CRASH();
1517 break;
1518 }
1519
1520 Jump done = jump();
1521
1522 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
1523
1524 patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1525 patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1526
1527 patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1528
1529 byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1530 patchBuffer,
1531 ("Baseline put_by_val stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1532
1533 RepatchBuffer repatchBuffer(m_codeBlock);
1534 repatchBuffer.relink(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1535 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_put_by_val_generic));
1536}
1537
1538JIT::JumpList JIT::emitIntTypedArrayGetByVal(Instruction*, PatchableJump& badType, const TypedArrayDescriptor& descriptor, size_t elementSize, TypedArraySignedness signedness)
1539{
1540 // The best way to test the array type is to use the classInfo. We need to do so without
1541 // clobbering the register that holds the indexing type, base, and property.
1542
1543#if USE(JSVALUE64)
1544 RegisterID base = regT0;
1545 RegisterID property = regT1;
1546 RegisterID resultPayload = regT0;
1547 RegisterID scratch = regT3;
1548#else
1549 RegisterID base = regT0;
1550 RegisterID property = regT2;
1551 RegisterID resultPayload = regT0;
1552 RegisterID resultTag = regT1;
1553 RegisterID scratch = regT3;
1554#endif
1555
1556 JumpList slowCases;
1557
1558 loadPtr(Address(base, JSCell::structureOffset()), scratch);
1559 badType = patchableBranchPtr(NotEqual, Address(scratch, Structure::classInfoOffset()), TrustedImmPtr(descriptor.m_classInfo));
1560 slowCases.append(branch32(AboveOrEqual, property, Address(base, descriptor.m_lengthOffset)));
1561 loadPtr(Address(base, descriptor.m_storageOffset), base);
1562
1563 switch (elementSize) {
1564 case 1:
1565 if (signedness == SignedTypedArray)
1566 load8Signed(BaseIndex(base, property, TimesOne), resultPayload);
1567 else
1568 load8(BaseIndex(base, property, TimesOne), resultPayload);
1569 break;
1570 case 2:
1571 if (signedness == SignedTypedArray)
1572 load16Signed(BaseIndex(base, property, TimesTwo), resultPayload);
1573 else
1574 load16(BaseIndex(base, property, TimesTwo), resultPayload);
1575 break;
1576 case 4:
1577 load32(BaseIndex(base, property, TimesFour), resultPayload);
1578 break;
1579 default:
1580 CRASH();
1581 }
1582
1583 Jump done;
1584 if (elementSize == 4 && signedness == UnsignedTypedArray) {
1585 Jump canBeInt = branch32(GreaterThanOrEqual, resultPayload, TrustedImm32(0));
1586
1587 convertInt32ToDouble(resultPayload, fpRegT0);
1588 addDouble(AbsoluteAddress(&twoToThe32), fpRegT0);
1589#if USE(JSVALUE64)
1590 moveDoubleTo64(fpRegT0, resultPayload);
1591 sub64(tagTypeNumberRegister, resultPayload);
1592#else
1593 moveDoubleToInts(fpRegT0, resultPayload, resultTag);
1594#endif
1595
1596 done = jump();
1597 canBeInt.link(this);
1598 }
1599
1600#if USE(JSVALUE64)
1601 or64(tagTypeNumberRegister, resultPayload);
1602#else
1603 move(TrustedImm32(JSValue::Int32Tag), resultTag);
1604#endif
1605 if (done.isSet())
1606 done.link(this);
1607 return slowCases;
1608}
1609
1610JIT::JumpList JIT::emitFloatTypedArrayGetByVal(Instruction*, PatchableJump& badType, const TypedArrayDescriptor& descriptor, size_t elementSize)
1611{
1612#if USE(JSVALUE64)
1613 RegisterID base = regT0;
1614 RegisterID property = regT1;
1615 RegisterID resultPayload = regT0;
1616 RegisterID scratch = regT3;
1617#else
1618 RegisterID base = regT0;
1619 RegisterID property = regT2;
1620 RegisterID resultPayload = regT0;
1621 RegisterID resultTag = regT1;
1622 RegisterID scratch = regT3;
1623#endif
1624
1625 JumpList slowCases;
1626
1627 loadPtr(Address(base, JSCell::structureOffset()), scratch);
1628 badType = patchableBranchPtr(NotEqual, Address(scratch, Structure::classInfoOffset()), TrustedImmPtr(descriptor.m_classInfo));
1629 slowCases.append(branch32(AboveOrEqual, property, Address(base, descriptor.m_lengthOffset)));
1630 loadPtr(Address(base, descriptor.m_storageOffset), base);
1631
1632 switch (elementSize) {
1633 case 4:
1634 loadFloat(BaseIndex(base, property, TimesFour), fpRegT0);
1635 convertFloatToDouble(fpRegT0, fpRegT0);
1636 break;
1637 case 8: {
1638 loadDouble(BaseIndex(base, property, TimesEight), fpRegT0);
1639 break;
1640 }
1641 default:
1642 CRASH();
1643 }
1644
1645 Jump notNaN = branchDouble(DoubleEqual, fpRegT0, fpRegT0);
1646 static const double NaN = QNaN;
1647 loadDouble(&NaN, fpRegT0);
1648 notNaN.link(this);
1649
1650#if USE(JSVALUE64)
1651 moveDoubleTo64(fpRegT0, resultPayload);
1652 sub64(tagTypeNumberRegister, resultPayload);
1653#else
1654 moveDoubleToInts(fpRegT0, resultPayload, resultTag);
1655#endif
1656 return slowCases;
1657}
1658
1659JIT::JumpList JIT::emitIntTypedArrayPutByVal(Instruction* currentInstruction, PatchableJump& badType, const TypedArrayDescriptor& descriptor, size_t elementSize, TypedArraySignedness signedness, TypedArrayRounding rounding)
1660{
1661 unsigned value = currentInstruction[3].u.operand;
1662
1663#if USE(JSVALUE64)
1664 RegisterID base = regT0;
1665 RegisterID property = regT1;
1666 RegisterID earlyScratch = regT3;
1667 RegisterID lateScratch = regT2;
1668#else
1669 RegisterID base = regT0;
1670 RegisterID property = regT2;
1671 RegisterID earlyScratch = regT3;
1672 RegisterID lateScratch = regT1;
1673#endif
1674
1675 JumpList slowCases;
1676
1677 loadPtr(Address(base, JSCell::structureOffset()), earlyScratch);
1678 badType = patchableBranchPtr(NotEqual, Address(earlyScratch, Structure::classInfoOffset()), TrustedImmPtr(descriptor.m_classInfo));
1679 slowCases.append(branch32(AboveOrEqual, property, Address(base, descriptor.m_lengthOffset)));
1680
1681#if USE(JSVALUE64)
1682 emitGetVirtualRegister(value, earlyScratch);
1683 slowCases.append(emitJumpIfNotImmediateInteger(earlyScratch));
1684#else
1685 emitLoad(value, lateScratch, earlyScratch);
1686 slowCases.append(branch32(NotEqual, lateScratch, TrustedImm32(JSValue::Int32Tag)));
1687#endif
1688
1689 // We would be loading this into base as in get_by_val, except that the slow
1690 // path expects the base to be unclobbered.
1691 loadPtr(Address(base, descriptor.m_storageOffset), lateScratch);
1692
1693 if (rounding == ClampRounding) {
1694 ASSERT(elementSize == 1);
1695 ASSERT_UNUSED(signedness, signedness = UnsignedTypedArray);
1696 Jump inBounds = branch32(BelowOrEqual, earlyScratch, TrustedImm32(0xff));
1697 Jump tooBig = branch32(GreaterThan, earlyScratch, TrustedImm32(0xff));
1698 xor32(earlyScratch, earlyScratch);
1699 Jump clamped = jump();
1700 tooBig.link(this);
1701 move(TrustedImm32(0xff), earlyScratch);
1702 clamped.link(this);
1703 inBounds.link(this);
1704 }
1705
1706 switch (elementSize) {
1707 case 1:
1708 store8(earlyScratch, BaseIndex(lateScratch, property, TimesOne));
1709 break;
1710 case 2:
1711 store16(earlyScratch, BaseIndex(lateScratch, property, TimesTwo));
1712 break;
1713 case 4:
1714 store32(earlyScratch, BaseIndex(lateScratch, property, TimesFour));
1715 break;
1716 default:
1717 CRASH();
1718 }
1719
1720 return slowCases;
1721}
1722
1723JIT::JumpList JIT::emitFloatTypedArrayPutByVal(Instruction* currentInstruction, PatchableJump& badType, const TypedArrayDescriptor& descriptor, size_t elementSize)
1724{
1725 unsigned value = currentInstruction[3].u.operand;
1726
1727#if USE(JSVALUE64)
1728 RegisterID base = regT0;
1729 RegisterID property = regT1;
1730 RegisterID earlyScratch = regT3;
1731 RegisterID lateScratch = regT2;
1732#else
1733 RegisterID base = regT0;
1734 RegisterID property = regT2;
1735 RegisterID earlyScratch = regT3;
1736 RegisterID lateScratch = regT1;
1737#endif
1738
1739 JumpList slowCases;
1740
1741 loadPtr(Address(base, JSCell::structureOffset()), earlyScratch);
1742 badType = patchableBranchPtr(NotEqual, Address(earlyScratch, Structure::classInfoOffset()), TrustedImmPtr(descriptor.m_classInfo));
1743 slowCases.append(branch32(AboveOrEqual, property, Address(base, descriptor.m_lengthOffset)));
1744
1745#if USE(JSVALUE64)
1746 emitGetVirtualRegister(value, earlyScratch);
1747 Jump doubleCase = emitJumpIfNotImmediateInteger(earlyScratch);
1748 convertInt32ToDouble(earlyScratch, fpRegT0);
1749 Jump ready = jump();
1750 doubleCase.link(this);
1751 slowCases.append(emitJumpIfNotImmediateNumber(earlyScratch));
1752 add64(tagTypeNumberRegister, earlyScratch);
1753 move64ToDouble(earlyScratch, fpRegT0);
1754 ready.link(this);
1755#else
1756 emitLoad(value, lateScratch, earlyScratch);
1757 Jump doubleCase = branch32(NotEqual, lateScratch, TrustedImm32(JSValue::Int32Tag));
1758 convertInt32ToDouble(earlyScratch, fpRegT0);
1759 Jump ready = jump();
1760 doubleCase.link(this);
1761 slowCases.append(branch32(Above, lateScratch, TrustedImm32(JSValue::LowestTag)));
1762 moveIntsToDouble(earlyScratch, lateScratch, fpRegT0, fpRegT1);
1763 ready.link(this);
1764#endif
1765
1766 // We would be loading this into base as in get_by_val, except that the slow
1767 // path expects the base to be unclobbered.
1768 loadPtr(Address(base, descriptor.m_storageOffset), lateScratch);
1769
1770 switch (elementSize) {
1771 case 4:
1772 convertDoubleToFloat(fpRegT0, fpRegT0);
1773 storeFloat(fpRegT0, BaseIndex(lateScratch, property, TimesFour));
1774 break;
1775 case 8:
1776 storeDouble(fpRegT0, BaseIndex(lateScratch, property, TimesEight));
1777 break;
1778 default:
1779 CRASH();
1780 }
1781
1782 return slowCases;
1783}
1784
9dae56ea
A
1785} // namespace JSC
1786
1787#endif // ENABLE(JIT)