]> git.saurik.com Git - apple/javascriptcore.git/blame - jit/JITPropertyAccess.cpp
JavaScriptCore-7600.1.4.15.12.tar.gz
[apple/javascriptcore.git] / jit / JITPropertyAccess.cpp
CommitLineData
9dae56ea 1/*
81345200 2 * Copyright (C) 2008, 2009, 2014 Apple Inc. All rights reserved.
9dae56ea
A
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
ba379fdc 27
4e4e5a6f 28#if ENABLE(JIT)
14957cd0 29#include "JIT.h"
ba379fdc 30
4e4e5a6f 31#include "CodeBlock.h"
93a37866 32#include "GCAwareJITStubRoutine.h"
4e4e5a6f 33#include "GetterSetter.h"
93a37866
A
34#include "Interpreter.h"
35#include "JITInlines.h"
4e4e5a6f
A
36#include "JSArray.h"
37#include "JSFunction.h"
38#include "JSPropertyNameIterator.h"
93a37866 39#include "JSVariableObject.h"
4e4e5a6f
A
40#include "LinkBuffer.h"
41#include "RepatchBuffer.h"
42#include "ResultType.h"
43#include "SamplingTool.h"
93a37866 44#include <wtf/StringPrintStream.h>
ba379fdc 45
ba379fdc 46
4e4e5a6f 47namespace JSC {
14957cd0 48#if USE(JSVALUE64)
ba379fdc 49
93a37866 50JIT::CodeRef JIT::stringGetByValStubGenerator(VM* vm)
9dae56ea 51{
81345200 52 JSInterfaceJIT jit(vm);
4e4e5a6f 53 JumpList failures;
81345200
A
54 failures.append(JSC::branchStructure(jit,
55 NotEqual,
56 Address(regT0, JSCell::structureIDOffset()),
57 vm->stringStructure.get()));
ba379fdc 58
14957cd0 59 // Load string length to regT2, and start the process of loading the data pointer into regT0
4e4e5a6f
A
60 jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT2);
61 jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
6fe7ccc8
A
62 failures.append(jit.branchTest32(Zero, regT0));
63
4e4e5a6f
A
64 // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
65 failures.append(jit.branch32(AboveOrEqual, regT1, regT2));
ba379fdc 66
4e4e5a6f 67 // Load the character
6fe7ccc8
A
68 JumpList is16Bit;
69 JumpList cont8Bit;
70 // Load the string flags
93a37866
A
71 jit.loadPtr(Address(regT0, StringImpl::flagsOffset()), regT2);
72 jit.loadPtr(Address(regT0, StringImpl::dataOffset()), regT0);
73 is16Bit.append(jit.branchTest32(Zero, regT2, TrustedImm32(StringImpl::flagIs8Bit())));
6fe7ccc8
A
74 jit.load8(BaseIndex(regT0, regT1, TimesOne, 0), regT0);
75 cont8Bit.append(jit.jump());
76 is16Bit.link(&jit);
4e4e5a6f 77 jit.load16(BaseIndex(regT0, regT1, TimesTwo, 0), regT0);
6fe7ccc8
A
78 cont8Bit.link(&jit);
79
14957cd0 80 failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
93a37866 81 jit.move(TrustedImmPtr(vm->smallStrings.singleCharacterStrings()), regT1);
4e4e5a6f
A
82 jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
83 jit.ret();
f9bf01c6 84
4e4e5a6f 85 failures.link(&jit);
14957cd0 86 jit.move(TrustedImm32(0), regT0);
4e4e5a6f 87 jit.ret();
f9bf01c6 88
81345200 89 LinkBuffer patchBuffer(*vm, jit, GLOBAL_THUNK_ID);
93a37866 90 return FINALIZE_CODE(patchBuffer, ("String get_by_val stub"));
f9bf01c6
A
91}
92
ba379fdc
A
93void JIT::emit_op_get_by_val(Instruction* currentInstruction)
94{
81345200
A
95 int dst = currentInstruction[1].u.operand;
96 int base = currentInstruction[2].u.operand;
97 int property = currentInstruction[3].u.operand;
93a37866
A
98 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
99
f9bf01c6 100 emitGetVirtualRegisters(base, regT0, property, regT1);
ba379fdc 101 emitJumpSlowCaseIfNotImmediateInteger(regT1);
14957cd0 102
ba379fdc 103 // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
f9bf01c6
A
104 // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
105 // number was signed since m_vectorLength is always less than intmax (since the total allocation
ba379fdc
A
106 // size is always less than 4Gb). As such zero extending wil have been correct (and extending the value
107 // to 64-bits is necessary since it's used in the address calculation. We zero extend rather than sign
108 // extending since it makes it easier to re-tag the value in the slow case.
109 zeroExtend32ToPtr(regT1, regT1);
14957cd0 110
f9bf01c6 111 emitJumpSlowCaseIfNotJSCell(regT0, base);
81345200 112 emitArrayProfilingSiteWithCell(regT0, regT2, profile);
93a37866
A
113 and32(TrustedImm32(IndexingShapeMask), regT2);
114
115 PatchableJump badType;
116 JumpList slowCases;
117
118 JITArrayMode mode = chooseArrayMode(profile);
119 switch (mode) {
120 case JITInt32:
121 slowCases = emitInt32GetByVal(currentInstruction, badType);
122 break;
123 case JITDouble:
124 slowCases = emitDoubleGetByVal(currentInstruction, badType);
125 break;
126 case JITContiguous:
127 slowCases = emitContiguousGetByVal(currentInstruction, badType);
128 break;
129 case JITArrayStorage:
130 slowCases = emitArrayStorageGetByVal(currentInstruction, badType);
131 break;
132 default:
133 CRASH();
134 break;
135 }
136
137 addSlowCase(badType);
138 addSlowCase(slowCases);
139
140 Label done = label();
141
81345200
A
142 if (!ASSERT_DISABLED) {
143 Jump resultOK = branchTest64(NonZero, regT0);
144 abortWithReason(JITGetByValResultIsNotEmpty);
145 resultOK.link(this);
146 }
f9bf01c6 147
6fe7ccc8 148 emitValueProfilingSite();
f9bf01c6 149 emitPutVirtualRegister(dst);
93a37866
A
150
151 m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
152}
153
154JIT::JumpList JIT::emitDoubleGetByVal(Instruction*, PatchableJump& badType)
155{
156 JumpList slowCases;
157
158 badType = patchableBranch32(NotEqual, regT2, TrustedImm32(DoubleShape));
159 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
160 slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength())));
161 loadDouble(BaseIndex(regT2, regT1, TimesEight), fpRegT0);
162 slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
163 moveDoubleTo64(fpRegT0, regT0);
164 sub64(tagTypeNumberRegister, regT0);
165
166 return slowCases;
167}
168
169JIT::JumpList JIT::emitContiguousGetByVal(Instruction*, PatchableJump& badType, IndexingType expectedShape)
170{
171 JumpList slowCases;
172
173 badType = patchableBranch32(NotEqual, regT2, TrustedImm32(expectedShape));
174 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
175 slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength())));
176 load64(BaseIndex(regT2, regT1, TimesEight), regT0);
177 slowCases.append(branchTest64(Zero, regT0));
178
179 return slowCases;
180}
181
182JIT::JumpList JIT::emitArrayStorageGetByVal(Instruction*, PatchableJump& badType)
183{
184 JumpList slowCases;
185
186 add32(TrustedImm32(-ArrayStorageShape), regT2, regT3);
187 badType = patchableBranch32(Above, regT3, TrustedImm32(SlowPutArrayStorageShape - ArrayStorageShape));
188
189 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
190 slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, ArrayStorage::vectorLengthOffset())));
191
192 load64(BaseIndex(regT2, regT1, TimesEight, ArrayStorage::vectorOffset()), regT0);
193 slowCases.append(branchTest64(Zero, regT0));
194
195 return slowCases;
f9bf01c6
A
196}
197
4e4e5a6f
A
198void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
199{
81345200
A
200 int dst = currentInstruction[1].u.operand;
201 int base = currentInstruction[2].u.operand;
202 int property = currentInstruction[3].u.operand;
93a37866 203 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
4e4e5a6f
A
204
205 linkSlowCase(iter); // property int32 check
206 linkSlowCaseIfNotJSCell(iter, base); // base cell check
207 Jump nonCell = jump();
208 linkSlowCase(iter); // base array check
81345200
A
209 Jump notString = branchStructure(NotEqual,
210 Address(regT0, JSCell::structureIDOffset()),
211 m_vm->stringStructure.get());
93a37866
A
212 emitNakedCall(CodeLocationLabel(m_vm->getCTIStub(stringGetByValStubGenerator).code()));
213 Jump failed = branchTest64(Zero, regT0);
4e4e5a6f
A
214 emitPutVirtualRegister(dst, regT0);
215 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
216 failed.link(this);
217 notString.link(this);
218 nonCell.link(this);
219
93a37866
A
220 Jump skipProfiling = jump();
221
4e4e5a6f
A
222 linkSlowCase(iter); // vector length check
223 linkSlowCase(iter); // empty value
224
93a37866
A
225 emitArrayProfileOutOfBoundsSpecialCase(profile);
226
227 skipProfiling.link(this);
228
229 Label slowPath = label();
230
81345200
A
231 emitGetVirtualRegister(base, regT0);
232 emitGetVirtualRegister(property, regT1);
233 Call call = callOperation(operationGetByValDefault, dst, regT0, regT1);
93a37866
A
234
235 m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
236 m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
237 m_byValInstructionIndex++;
6fe7ccc8
A
238
239 emitValueProfilingSite();
4e4e5a6f
A
240}
241
93a37866 242void JIT::compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID offset, RegisterID scratch, FinalObjectMode finalObjectMode)
f9bf01c6 243{
93a37866
A
244 ASSERT(sizeof(JSValue) == 8);
245
246 if (finalObjectMode == MayBeFinal) {
247 Jump isInline = branch32(LessThan, offset, TrustedImm32(firstOutOfLineOffset));
248 loadPtr(Address(base, JSObject::butterflyOffset()), scratch);
249 neg32(offset);
250 Jump done = jump();
251 isInline.link(this);
252 addPtr(TrustedImm32(JSObject::offsetOfInlineStorage() - (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), base, scratch);
253 done.link(this);
254 } else {
81345200
A
255 if (!ASSERT_DISABLED) {
256 Jump isOutOfLine = branch32(GreaterThanOrEqual, offset, TrustedImm32(firstOutOfLineOffset));
257 abortWithReason(JITOffsetIsNotOutOfLine);
258 isOutOfLine.link(this);
259 }
93a37866
A
260 loadPtr(Address(base, JSObject::butterflyOffset()), scratch);
261 neg32(offset);
262 }
263 signExtend32ToPtr(offset, offset);
264 load64(BaseIndex(scratch, offset, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), result);
f9bf01c6
A
265}
266
267void JIT::emit_op_get_by_pname(Instruction* currentInstruction)
268{
81345200
A
269 int dst = currentInstruction[1].u.operand;
270 int base = currentInstruction[2].u.operand;
271 int property = currentInstruction[3].u.operand;
f9bf01c6 272 unsigned expected = currentInstruction[4].u.operand;
81345200
A
273 int iter = currentInstruction[5].u.operand;
274 int i = currentInstruction[6].u.operand;
f9bf01c6
A
275
276 emitGetVirtualRegister(property, regT0);
93a37866 277 addSlowCase(branch64(NotEqual, regT0, addressFor(expected)));
f9bf01c6
A
278 emitGetVirtualRegisters(base, regT0, iter, regT1);
279 emitJumpSlowCaseIfNotJSCell(regT0, base);
280
281 // Test base's structure
81345200 282 emitLoadStructure(regT0, regT2, regT3);
f9bf01c6
A
283 addSlowCase(branchPtr(NotEqual, regT2, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))));
284 load32(addressFor(i), regT3);
14957cd0 285 sub32(TrustedImm32(1), regT3);
f9bf01c6 286 addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots))));
93a37866
A
287 Jump inlineProperty = branch32(Below, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructureInlineCapacity)));
288 add32(TrustedImm32(firstOutOfLineOffset), regT3);
289 sub32(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructureInlineCapacity)), regT3);
290 inlineProperty.link(this);
14957cd0 291 compileGetDirectOffset(regT0, regT0, regT3, regT1);
f9bf01c6
A
292
293 emitPutVirtualRegister(dst, regT0);
294}
295
296void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
297{
81345200
A
298 int dst = currentInstruction[1].u.operand;
299 int base = currentInstruction[2].u.operand;
300 int property = currentInstruction[3].u.operand;
f9bf01c6
A
301
302 linkSlowCase(iter);
303 linkSlowCaseIfNotJSCell(iter, base);
304 linkSlowCase(iter);
305 linkSlowCase(iter);
306
81345200
A
307 emitGetVirtualRegister(base, regT0);
308 emitGetVirtualRegister(property, regT1);
309 callOperation(operationGetByValGeneric, dst, regT0, regT1);
ba379fdc
A
310}
311
312void JIT::emit_op_put_by_val(Instruction* currentInstruction)
313{
81345200
A
314 int base = currentInstruction[1].u.operand;
315 int property = currentInstruction[2].u.operand;
93a37866 316 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
f9bf01c6
A
317
318 emitGetVirtualRegisters(base, regT0, property, regT1);
ba379fdc 319 emitJumpSlowCaseIfNotImmediateInteger(regT1);
ba379fdc
A
320 // See comment in op_get_by_val.
321 zeroExtend32ToPtr(regT1, regT1);
f9bf01c6 322 emitJumpSlowCaseIfNotJSCell(regT0, base);
81345200 323 emitArrayProfilingSiteWithCell(regT0, regT2, profile);
93a37866
A
324 and32(TrustedImm32(IndexingShapeMask), regT2);
325
326 PatchableJump badType;
327 JumpList slowCases;
328
329 JITArrayMode mode = chooseArrayMode(profile);
330 switch (mode) {
331 case JITInt32:
332 slowCases = emitInt32PutByVal(currentInstruction, badType);
333 break;
334 case JITDouble:
335 slowCases = emitDoublePutByVal(currentInstruction, badType);
336 break;
337 case JITContiguous:
338 slowCases = emitContiguousPutByVal(currentInstruction, badType);
339 break;
340 case JITArrayStorage:
341 slowCases = emitArrayStoragePutByVal(currentInstruction, badType);
342 break;
343 default:
344 CRASH();
345 break;
346 }
347
348 addSlowCase(badType);
349 addSlowCase(slowCases);
350
351 Label done = label();
352
353 m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
ba379fdc 354
93a37866
A
355}
356
357JIT::JumpList JIT::emitGenericContiguousPutByVal(Instruction* currentInstruction, PatchableJump& badType, IndexingType indexingShape)
358{
81345200 359 int value = currentInstruction[3].u.operand;
93a37866
A
360 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
361
362 JumpList slowCases;
363
364 badType = patchableBranch32(NotEqual, regT2, TrustedImm32(indexingShape));
365
366 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
367 Jump outOfBounds = branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength()));
368
369 Label storeResult = label();
370 emitGetVirtualRegister(value, regT3);
371 switch (indexingShape) {
372 case Int32Shape:
373 slowCases.append(emitJumpIfNotImmediateInteger(regT3));
374 store64(regT3, BaseIndex(regT2, regT1, TimesEight));
375 break;
376 case DoubleShape: {
377 Jump notInt = emitJumpIfNotImmediateInteger(regT3);
378 convertInt32ToDouble(regT3, fpRegT0);
379 Jump ready = jump();
380 notInt.link(this);
381 add64(tagTypeNumberRegister, regT3);
382 move64ToDouble(regT3, fpRegT0);
383 slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
384 ready.link(this);
385 storeDouble(fpRegT0, BaseIndex(regT2, regT1, TimesEight));
386 break;
387 }
388 case ContiguousShape:
389 store64(regT3, BaseIndex(regT2, regT1, TimesEight));
81345200 390 emitWriteBarrier(currentInstruction[1].u.operand, value, ShouldFilterValue);
93a37866
A
391 break;
392 default:
393 CRASH();
394 break;
395 }
396
397 Jump done = jump();
398 outOfBounds.link(this);
399
400 slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfVectorLength())));
401
402 emitArrayProfileStoreToHoleSpecialCase(profile);
403
404 add32(TrustedImm32(1), regT1, regT3);
405 store32(regT3, Address(regT2, Butterfly::offsetOfPublicLength()));
406 jump().linkTo(storeResult, this);
407
408 done.link(this);
409
410 return slowCases;
411}
412
413JIT::JumpList JIT::emitArrayStoragePutByVal(Instruction* currentInstruction, PatchableJump& badType)
414{
81345200 415 int value = currentInstruction[3].u.operand;
93a37866
A
416 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
417
418 JumpList slowCases;
419
420 badType = patchableBranch32(NotEqual, regT2, TrustedImm32(ArrayStorageShape));
421 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
422 slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, ArrayStorage::vectorLengthOffset())));
423
424 Jump empty = branchTest64(Zero, BaseIndex(regT2, regT1, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
ba379fdc 425
f9bf01c6 426 Label storeResult(this);
6fe7ccc8 427 emitGetVirtualRegister(value, regT3);
93a37866 428 store64(regT3, BaseIndex(regT2, regT1, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
81345200 429 emitWriteBarrier(currentInstruction[1].u.operand, value, ShouldFilterValue);
f9bf01c6
A
430 Jump end = jump();
431
432 empty.link(this);
93a37866
A
433 emitArrayProfileStoreToHoleSpecialCase(profile);
434 add32(TrustedImm32(1), Address(regT2, ArrayStorage::numValuesInVectorOffset()));
435 branch32(Below, regT1, Address(regT2, ArrayStorage::lengthOffset())).linkTo(storeResult, this);
f9bf01c6 436
6fe7ccc8 437 add32(TrustedImm32(1), regT1);
93a37866 438 store32(regT1, Address(regT2, ArrayStorage::lengthOffset()));
6fe7ccc8 439 sub32(TrustedImm32(1), regT1);
f9bf01c6
A
440 jump().linkTo(storeResult, this);
441
442 end.link(this);
93a37866
A
443
444 return slowCases;
ba379fdc
A
445}
446
6fe7ccc8 447void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
ba379fdc 448{
81345200
A
449 int base = currentInstruction[1].u.operand;
450 int property = currentInstruction[2].u.operand;
451 int value = currentInstruction[3].u.operand;
93a37866 452 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
6fe7ccc8
A
453
454 linkSlowCase(iter); // property int32 check
455 linkSlowCaseIfNotJSCell(iter, base); // base cell check
456 linkSlowCase(iter); // base not array check
93a37866
A
457
458 JITArrayMode mode = chooseArrayMode(profile);
459 switch (mode) {
460 case JITInt32:
461 case JITDouble:
462 linkSlowCase(iter); // value type check
463 break;
464 default:
465 break;
466 }
467
468 Jump skipProfiling = jump();
469 linkSlowCase(iter); // out of bounds
470 emitArrayProfileOutOfBoundsSpecialCase(profile);
471 skipProfiling.link(this);
472
473 Label slowPath = label();
6fe7ccc8 474
81345200
A
475 emitGetVirtualRegister(property, regT1);
476 emitGetVirtualRegister(value, regT2);
477 bool isDirect = m_interpreter->getOpcodeID(currentInstruction->u.opcode) == op_put_by_val_direct;
478 Call call = callOperation(isDirect ? operationDirectPutByVal : operationPutByVal, regT0, regT1, regT2);
93a37866
A
479
480 m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
481 m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
482 m_byValInstructionIndex++;
ba379fdc
A
483}
484
6fe7ccc8 485void JIT::emit_op_put_by_index(Instruction* currentInstruction)
ba379fdc 486{
81345200
A
487 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
488 emitGetVirtualRegister(currentInstruction[3].u.operand, regT1);
489 callOperation(operationPutByIndex, regT0, currentInstruction[2].u.operand, regT1);
ba379fdc
A
490}
491
6fe7ccc8 492void JIT::emit_op_put_getter_setter(Instruction* currentInstruction)
ba379fdc 493{
81345200
A
494 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
495 emitGetVirtualRegister(currentInstruction[3].u.operand, regT1);
496 emitGetVirtualRegister(currentInstruction[4].u.operand, regT2);
497 callOperation(operationPutGetterSetter, regT0, &m_codeBlock->identifier(currentInstruction[2].u.operand), regT1, regT2);
ba379fdc
A
498}
499
500void JIT::emit_op_del_by_id(Instruction* currentInstruction)
501{
81345200
A
502 int dst = currentInstruction[1].u.operand;
503 int base = currentInstruction[2].u.operand;
504 int property = currentInstruction[3].u.operand;
505 emitGetVirtualRegister(base, regT0);
506 callOperation(operationDeleteById, dst, regT0, &m_codeBlock->identifier(property));
9dae56ea
A
507}
508
ba379fdc
A
509void JIT::emit_op_get_by_id(Instruction* currentInstruction)
510{
81345200
A
511 int resultVReg = currentInstruction[1].u.operand;
512 int baseVReg = currentInstruction[2].u.operand;
513 const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
ba379fdc
A
514
515 emitGetVirtualRegister(baseVReg, regT0);
81345200 516
ba379fdc 517 emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
93a37866 518
81345200
A
519 if (*ident == m_vm->propertyNames->length && shouldEmitProfiling())
520 emitArrayProfilingSiteForBytecodeIndexWithCell(regT0, regT1, m_bytecodeOffset);
9dae56ea 521
81345200
A
522 JITGetByIdGenerator gen(
523 m_codeBlock, CodeOrigin(m_bytecodeOffset), RegisterSet::specialRegisters(),
524 JSValueRegs(regT0), JSValueRegs(regT0), DontSpill);
525 gen.generateFastPath(*this);
526 addSlowCase(gen.slowPathJump());
527 m_getByIds.append(gen);
9dae56ea 528
6fe7ccc8 529 emitValueProfilingSite();
81345200 530 emitPutVirtualRegister(resultVReg);
ba379fdc 531}
9dae56ea 532
81345200 533void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
9dae56ea 534{
81345200
A
535 int resultVReg = currentInstruction[1].u.operand;
536 int baseVReg = currentInstruction[2].u.operand;
537 const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
9dae56ea
A
538
539 linkSlowCaseIfNotJSCell(iter, baseVReg);
540 linkSlowCase(iter);
541
81345200
A
542 JITGetByIdGenerator& gen = m_getByIds[m_getByIdIndex++];
543
544 Label coldPathBegin = label();
545
546 Call call = callOperation(WithProfile, operationGetByIdOptimize, resultVReg, gen.stubInfo(), regT0, ident->impl());
f9bf01c6 547
81345200 548 gen.reportSlowPathCall(coldPathBegin, call);
9dae56ea
A
549}
550
ba379fdc 551void JIT::emit_op_put_by_id(Instruction* currentInstruction)
9dae56ea 552{
81345200
A
553 int baseVReg = currentInstruction[1].u.operand;
554 int valueVReg = currentInstruction[3].u.operand;
555 unsigned direct = currentInstruction[8].u.operand;
556
557 emitWriteBarrier(baseVReg, valueVReg, ShouldFilterBase);
ba379fdc 558
9dae56ea
A
559 // In order to be able to patch both the Structure, and the object offset, we store one pointer,
560 // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
561 // such that the Structure & offset are always at the same distance from this.
562
ba379fdc 563 emitGetVirtualRegisters(baseVReg, regT0, valueVReg, regT1);
9dae56ea
A
564
565 // Jump to a slow case if either the base object is an immediate, or if the Structure does not match.
ba379fdc 566 emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
9dae56ea 567
81345200
A
568 JITPutByIdGenerator gen(
569 m_codeBlock, CodeOrigin(m_bytecodeOffset), RegisterSet::specialRegisters(),
570 JSValueRegs(regT0), JSValueRegs(regT1), regT2, DontSpill, m_codeBlock->ecmaMode(),
571 direct ? Direct : NotDirect);
572
573 gen.generateFastPath(*this);
574 addSlowCase(gen.slowPathJump());
575
576 m_putByIds.append(gen);
9dae56ea
A
577}
578
ba379fdc 579void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
9dae56ea 580{
81345200
A
581 int baseVReg = currentInstruction[1].u.operand;
582 const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
ba379fdc 583
9dae56ea
A
584 linkSlowCaseIfNotJSCell(iter, baseVReg);
585 linkSlowCase(iter);
586
81345200
A
587 Label coldPathBegin(this);
588
589 JITPutByIdGenerator& gen = m_putByIds[m_putByIdIndex++];
9dae56ea 590
81345200
A
591 Call call = callOperation(
592 gen.slowPathFunction(), gen.stubInfo(), regT1, regT0, ident->impl());
593
594 gen.reportSlowPathCall(coldPathBegin, call);
9dae56ea
A
595}
596
ba379fdc
A
597// Compile a store into an object's property storage. May overwrite the
598// value in objectReg.
93a37866 599void JIT::compilePutDirectOffset(RegisterID base, RegisterID value, PropertyOffset cachedOffset)
ba379fdc 600{
93a37866
A
601 if (isInlineOffset(cachedOffset)) {
602 store64(value, Address(base, JSObject::offsetOfInlineStorage() + sizeof(JSValue) * offsetInInlineStorage(cachedOffset)));
603 return;
604 }
605
606 loadPtr(Address(base, JSObject::butterflyOffset()), base);
607 store64(value, Address(base, sizeof(JSValue) * offsetInButterfly(cachedOffset)));
ba379fdc
A
608}
609
610// Compile a load from an object's property storage. May overwrite base.
93a37866 611void JIT::compileGetDirectOffset(RegisterID base, RegisterID result, PropertyOffset cachedOffset)
9dae56ea 612{
93a37866
A
613 if (isInlineOffset(cachedOffset)) {
614 load64(Address(base, JSObject::offsetOfInlineStorage() + sizeof(JSValue) * offsetInInlineStorage(cachedOffset)), result);
615 return;
616 }
617
618 loadPtr(Address(base, JSObject::butterflyOffset()), result);
619 load64(Address(result, sizeof(JSValue) * offsetInButterfly(cachedOffset)), result);
9dae56ea
A
620}
621
93a37866 622void JIT::compileGetDirectOffset(JSObject* base, RegisterID result, PropertyOffset cachedOffset)
f9bf01c6 623{
93a37866
A
624 if (isInlineOffset(cachedOffset)) {
625 load64(base->locationForOffset(cachedOffset), result);
626 return;
627 }
628
629 loadPtr(base->butterflyAddress(), result);
630 load64(Address(result, offsetInButterfly(cachedOffset) * sizeof(WriteBarrier<Unknown>)), result);
f9bf01c6
A
631}
632
81345200 633void JIT::emitVarInjectionCheck(bool needsVarInjectionChecks)
9dae56ea 634{
81345200
A
635 if (!needsVarInjectionChecks)
636 return;
637 addSlowCase(branch8(Equal, AbsoluteAddress(m_codeBlock->globalObject()->varInjectionWatchpoint()->addressOfState()), TrustedImm32(IsInvalidated)));
638}
9dae56ea 639
81345200
A
640void JIT::emitResolveClosure(int dst, bool needsVarInjectionChecks, unsigned depth)
641{
642 emitVarInjectionCheck(needsVarInjectionChecks);
643 emitGetVirtualRegister(JSStack::ScopeChain, regT0);
644 if (m_codeBlock->needsActivation()) {
645 emitGetVirtualRegister(m_codeBlock->activationRegister(), regT1);
646 Jump noActivation = branchTestPtr(Zero, regT1);
647 loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
648 noActivation.link(this);
9dae56ea 649 }
81345200
A
650 for (unsigned i = 0; i < depth; ++i)
651 loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
652 emitPutVirtualRegister(dst);
653}
9dae56ea 654
81345200
A
655void JIT::emit_op_resolve_scope(Instruction* currentInstruction)
656{
657 int dst = currentInstruction[1].u.operand;
658 ResolveType resolveType = static_cast<ResolveType>(currentInstruction[3].u.operand);
659 unsigned depth = currentInstruction[4].u.operand;
660
661 switch (resolveType) {
662 case GlobalProperty:
663 case GlobalVar:
664 case GlobalPropertyWithVarInjectionChecks:
665 case GlobalVarWithVarInjectionChecks:
666 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
667 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
668 emitPutVirtualRegister(dst);
669 break;
670 case ClosureVar:
671 case ClosureVarWithVarInjectionChecks:
672 emitResolveClosure(dst, needsVarInjectionChecks(resolveType), depth);
673 break;
674 case Dynamic:
675 addSlowCase(jump());
676 break;
ba379fdc 677 }
9dae56ea
A
678}
679
81345200 680void JIT::emitSlow_op_resolve_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
9dae56ea 681{
81345200
A
682 int dst = currentInstruction[1].u.operand;
683 ResolveType resolveType = static_cast<ResolveType>(currentInstruction[3].u.operand);
ba379fdc 684
81345200
A
685 if (resolveType == GlobalProperty || resolveType == GlobalVar || resolveType == ClosureVar)
686 return;
ba379fdc 687
81345200
A
688 linkSlowCase(iter);
689 int32_t indentifierIndex = currentInstruction[2].u.operand;
690 callOperation(operationResolveScope, dst, indentifierIndex);
9dae56ea
A
691}
692
81345200 693void JIT::emitLoadWithStructureCheck(int scope, Structure** structureSlot)
9dae56ea 694{
81345200
A
695 emitGetVirtualRegister(scope, regT0);
696 loadPtr(structureSlot, regT1);
697 addSlowCase(branchTestPtr(Zero, regT1));
698 load32(Address(regT1, Structure::structureIDOffset()), regT1);
699 addSlowCase(branch32(NotEqual, Address(regT0, JSCell::structureIDOffset()), regT1));
9dae56ea
A
700}
701
81345200 702void JIT::emitGetGlobalProperty(uintptr_t* operandSlot)
9dae56ea 703{
81345200
A
704 load32(operandSlot, regT1);
705 compileGetDirectOffset(regT0, regT0, regT1, regT2, KnownNotFinal);
9dae56ea
A
706}
707
81345200 708void JIT::emitGetGlobalVar(uintptr_t operand)
9dae56ea 709{
81345200 710 loadPtr(reinterpret_cast<void*>(operand), regT0);
9dae56ea
A
711}
712
81345200 713void JIT::emitGetClosureVar(int scope, uintptr_t operand)
9dae56ea 714{
81345200
A
715 emitGetVirtualRegister(scope, regT0);
716 loadPtr(Address(regT0, JSVariableObject::offsetOfRegisters()), regT0);
717 loadPtr(Address(regT0, operand * sizeof(Register)), regT0);
718}
9dae56ea 719
81345200
A
720void JIT::emit_op_get_from_scope(Instruction* currentInstruction)
721{
722 int dst = currentInstruction[1].u.operand;
723 int scope = currentInstruction[2].u.operand;
724 ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
725 Structure** structureSlot = currentInstruction[5].u.structure.slot();
726 uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(&currentInstruction[6].u.pointer);
727
728 switch (resolveType) {
729 case GlobalProperty:
730 case GlobalPropertyWithVarInjectionChecks:
731 emitLoadWithStructureCheck(scope, structureSlot); // Structure check covers var injection.
732 emitGetGlobalProperty(operandSlot);
733 break;
734 case GlobalVar:
735 case GlobalVarWithVarInjectionChecks:
736 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
737 emitGetGlobalVar(*operandSlot);
738 break;
739 case ClosureVar:
740 case ClosureVarWithVarInjectionChecks:
741 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
742 emitGetClosureVar(scope, *operandSlot);
743 break;
744 case Dynamic:
745 addSlowCase(jump());
746 break;
4e4e5a6f 747 }
81345200
A
748 emitPutVirtualRegister(dst);
749 emitValueProfilingSite();
9dae56ea
A
750}
751
81345200 752void JIT::emitSlow_op_get_from_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
9dae56ea 753{
81345200
A
754 int dst = currentInstruction[1].u.operand;
755 ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
9dae56ea 756
81345200
A
757 if (resolveType == GlobalVar || resolveType == ClosureVar)
758 return;
9dae56ea 759
81345200
A
760 if (resolveType == GlobalProperty || resolveType == GlobalPropertyWithVarInjectionChecks)
761 linkSlowCase(iter);
762 linkSlowCase(iter);
763 callOperation(WithProfile, operationGetFromScope, dst, currentInstruction);
764}
9dae56ea 765
81345200
A
766void JIT::emitPutGlobalProperty(uintptr_t* operandSlot, int value)
767{
768 emitGetVirtualRegister(value, regT2);
4e4e5a6f 769
81345200
A
770 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
771 loadPtr(operandSlot, regT1);
772 negPtr(regT1);
773 storePtr(regT2, BaseIndex(regT0, regT1, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)));
9dae56ea
A
774}
775
81345200 776void JIT::emitNotifyWrite(RegisterID value, RegisterID scratch, VariableWatchpointSet* set)
9dae56ea 777{
81345200
A
778 if (!set || set->state() == IsInvalidated)
779 return;
4e4e5a6f 780
81345200
A
781 load8(set->addressOfState(), scratch);
782 Jump isDone = branch32(Equal, scratch, TrustedImm32(IsInvalidated));
783 addSlowCase(branch64(NotEqual, AbsoluteAddress(set->addressOfInferredValue()), value));
784 isDone.link(this);
9dae56ea 785}
9dae56ea 786
81345200 787void JIT::emitPutGlobalVar(uintptr_t operand, int value, VariableWatchpointSet* set)
9dae56ea 788{
81345200
A
789 emitGetVirtualRegister(value, regT0);
790 emitNotifyWrite(regT0, regT1, set);
791 storePtr(regT0, reinterpret_cast<void*>(operand));
9dae56ea
A
792}
793
81345200 794void JIT::emitPutClosureVar(int scope, uintptr_t operand, int value)
6fe7ccc8 795{
81345200
A
796 emitGetVirtualRegister(value, regT1);
797 emitGetVirtualRegister(scope, regT0);
6fe7ccc8 798 loadPtr(Address(regT0, JSVariableObject::offsetOfRegisters()), regT0);
81345200 799 storePtr(regT1, Address(regT0, operand * sizeof(Register)));
6fe7ccc8
A
800}
801
81345200 802void JIT::emit_op_put_to_scope(Instruction* currentInstruction)
6fe7ccc8 803{
81345200
A
804 int scope = currentInstruction[1].u.operand;
805 int value = currentInstruction[3].u.operand;
806 ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
807 Structure** structureSlot = currentInstruction[5].u.structure.slot();
808 uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(&currentInstruction[6].u.pointer);
809
810 switch (resolveType) {
811 case GlobalProperty:
812 case GlobalPropertyWithVarInjectionChecks:
813 emitWriteBarrier(m_codeBlock->globalObject(), value, ShouldFilterValue);
814 emitLoadWithStructureCheck(scope, structureSlot); // Structure check covers var injection.
815 emitPutGlobalProperty(operandSlot, value);
816 break;
817 case GlobalVar:
818 case GlobalVarWithVarInjectionChecks:
819 emitWriteBarrier(m_codeBlock->globalObject(), value, ShouldFilterValue);
820 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
821 emitPutGlobalVar(*operandSlot, value, currentInstruction[5].u.watchpointSet);
822 break;
823 case ClosureVar:
824 case ClosureVarWithVarInjectionChecks:
825 emitWriteBarrier(scope, value, ShouldFilterValue);
826 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
827 emitPutClosureVar(scope, *operandSlot, value);
828 break;
829 case Dynamic:
830 addSlowCase(jump());
831 break;
6fe7ccc8 832 }
81345200 833}
6fe7ccc8 834
81345200
A
835void JIT::emitSlow_op_put_to_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
836{
837 ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
838 unsigned linkCount = 0;
839 if (resolveType != GlobalVar && resolveType != ClosureVar)
840 linkCount++;
841 if ((resolveType == GlobalVar || resolveType == GlobalVarWithVarInjectionChecks)
842 && currentInstruction[5].u.watchpointSet->state() != IsInvalidated)
843 linkCount++;
844 if (resolveType == GlobalProperty || resolveType == GlobalPropertyWithVarInjectionChecks)
845 linkCount++;
846 if (!linkCount)
847 return;
848 while (linkCount--)
849 linkSlowCase(iter);
850 callOperation(operationPutToScope, currentInstruction);
6fe7ccc8
A
851}
852
93a37866 853void JIT::emit_op_init_global_const(Instruction* currentInstruction)
6fe7ccc8 854{
93a37866 855 JSGlobalObject* globalObject = m_codeBlock->globalObject();
81345200 856 emitWriteBarrier(globalObject, currentInstruction[2].u.operand, ShouldFilterValue);
93a37866 857 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
93a37866 858 store64(regT0, currentInstruction[1].u.registerPointer);
6fe7ccc8
A
859}
860
81345200 861#endif // USE(JSVALUE64)
6fe7ccc8 862
81345200
A
863#if USE(JSVALUE64)
864void JIT::emitWriteBarrier(unsigned owner, unsigned value, WriteBarrierMode mode)
93a37866 865{
81345200
A
866#if ENABLE(GGC)
867 Jump valueNotCell;
868 if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue) {
869 emitGetVirtualRegister(value, regT0);
870 valueNotCell = branchTest64(NonZero, regT0, tagMaskRegister);
871 }
872
873 emitGetVirtualRegister(owner, regT0);
874 Jump ownerNotCell;
875 if (mode == ShouldFilterBaseAndValue || mode == ShouldFilterBase)
876 ownerNotCell = branchTest64(NonZero, regT0, tagMaskRegister);
93a37866 877
81345200
A
878 Jump ownerNotMarkedOrAlreadyRemembered = checkMarkByte(regT0);
879 callOperation(operationUnconditionalWriteBarrier, regT0);
880 ownerNotMarkedOrAlreadyRemembered.link(this);
6fe7ccc8 881
81345200
A
882 if (mode == ShouldFilterBaseAndValue || mode == ShouldFilterBase)
883 ownerNotCell.link(this);
884 if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue)
885 valueNotCell.link(this);
886#else
887 UNUSED_PARAM(owner);
888 UNUSED_PARAM(value);
889 UNUSED_PARAM(mode);
890#endif
6fe7ccc8
A
891}
892
81345200 893void JIT::emitWriteBarrier(JSCell* owner, unsigned value, WriteBarrierMode mode)
6fe7ccc8 894{
81345200
A
895#if ENABLE(GGC)
896 emitGetVirtualRegister(value, regT0);
897 Jump valueNotCell;
898 if (mode == ShouldFilterValue)
899 valueNotCell = branchTest64(NonZero, regT0, tagMaskRegister);
9dae56ea 900
81345200 901 emitWriteBarrier(owner);
14957cd0 902
81345200
A
903 if (mode == ShouldFilterValue)
904 valueNotCell.link(this);
905#else
6fe7ccc8 906 UNUSED_PARAM(owner);
6fe7ccc8
A
907 UNUSED_PARAM(value);
908 UNUSED_PARAM(mode);
6fe7ccc8 909#endif
6fe7ccc8
A
910}
911
81345200
A
912#else // USE(JSVALUE64)
913
914void JIT::emitWriteBarrier(unsigned owner, unsigned value, WriteBarrierMode mode)
6fe7ccc8 915{
81345200
A
916#if ENABLE(GGC)
917 Jump valueNotCell;
918 if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue) {
919 emitLoadTag(value, regT0);
920 valueNotCell = branch32(NotEqual, regT0, TrustedImm32(JSValue::CellTag));
921 }
922
923 emitLoad(owner, regT0, regT1);
924 Jump ownerNotCell;
925 if (mode == ShouldFilterBase || mode == ShouldFilterBaseAndValue)
926 ownerNotCell = branch32(NotEqual, regT0, TrustedImm32(JSValue::CellTag));
927
928 Jump ownerNotMarkedOrAlreadyRemembered = checkMarkByte(regT1);
929 callOperation(operationUnconditionalWriteBarrier, regT1);
930 ownerNotMarkedOrAlreadyRemembered.link(this);
931
932 if (mode == ShouldFilterBase || mode == ShouldFilterBaseAndValue)
933 ownerNotCell.link(this);
934 if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue)
935 valueNotCell.link(this);
936#else
6fe7ccc8 937 UNUSED_PARAM(owner);
6fe7ccc8
A
938 UNUSED_PARAM(value);
939 UNUSED_PARAM(mode);
6fe7ccc8 940#endif
93a37866
A
941}
942
81345200 943void JIT::emitWriteBarrier(JSCell* owner, unsigned value, WriteBarrierMode mode)
93a37866 944{
81345200
A
945#if ENABLE(GGC)
946 Jump valueNotCell;
947 if (mode == ShouldFilterValue) {
948 emitLoadTag(value, regT0);
949 valueNotCell = branch32(NotEqual, regT0, TrustedImm32(JSValue::CellTag));
93a37866 950 }
14957cd0 951
81345200 952 emitWriteBarrier(owner);
93a37866 953
81345200
A
954 if (mode == ShouldFilterValue)
955 valueNotCell.link(this);
956#else
957 UNUSED_PARAM(owner);
958 UNUSED_PARAM(value);
959 UNUSED_PARAM(mode);
960#endif
6fe7ccc8
A
961}
962
81345200
A
963#endif // USE(JSVALUE64)
964
965void JIT::emitWriteBarrier(JSCell* owner)
6fe7ccc8 966{
81345200
A
967#if ENABLE(GGC)
968 if (!MarkedBlock::blockFor(owner)->isMarked(owner)) {
969 Jump ownerNotMarkedOrAlreadyRemembered = checkMarkByte(owner);
970 callOperation(operationUnconditionalWriteBarrier, owner);
971 ownerNotMarkedOrAlreadyRemembered.link(this);
972 } else
973 callOperation(operationUnconditionalWriteBarrier, owner);
974#else
975 UNUSED_PARAM(owner);
976#endif // ENABLE(GGC)
14957cd0
A
977}
978
93a37866
A
979void JIT::privateCompileGetByVal(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
980{
981 Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
982
983 PatchableJump badType;
984 JumpList slowCases;
985
986 switch (arrayMode) {
987 case JITInt32:
988 slowCases = emitInt32GetByVal(currentInstruction, badType);
989 break;
990 case JITDouble:
991 slowCases = emitDoubleGetByVal(currentInstruction, badType);
992 break;
993 case JITContiguous:
994 slowCases = emitContiguousGetByVal(currentInstruction, badType);
995 break;
996 case JITArrayStorage:
997 slowCases = emitArrayStorageGetByVal(currentInstruction, badType);
998 break;
93a37866 999 default:
81345200
A
1000 TypedArrayType type = typedArrayTypeForJITArrayMode(arrayMode);
1001 if (isInt(type))
1002 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, type);
1003 else
1004 slowCases = emitFloatTypedArrayGetByVal(currentInstruction, badType, type);
1005 break;
93a37866
A
1006 }
1007
1008 Jump done = jump();
1009
81345200 1010 LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
93a37866
A
1011
1012 patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1013 patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1014
1015 patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1016
1017 byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
81345200 1018 m_codeBlock, patchBuffer,
93a37866
A
1019 ("Baseline get_by_val stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1020
1021 RepatchBuffer repatchBuffer(m_codeBlock);
1022 repatchBuffer.relink(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
81345200 1023 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(operationGetByValGeneric));
93a37866
A
1024}
1025
1026void JIT::privateCompilePutByVal(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1027{
1028 Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1029
1030 PatchableJump badType;
1031 JumpList slowCases;
81345200
A
1032
1033#if ENABLE(GGC)
1034 bool needsLinkForWriteBarrier = false;
1035#endif
1036
93a37866
A
1037 switch (arrayMode) {
1038 case JITInt32:
1039 slowCases = emitInt32PutByVal(currentInstruction, badType);
1040 break;
1041 case JITDouble:
1042 slowCases = emitDoublePutByVal(currentInstruction, badType);
1043 break;
1044 case JITContiguous:
1045 slowCases = emitContiguousPutByVal(currentInstruction, badType);
81345200
A
1046#if ENABLE(GGC)
1047 needsLinkForWriteBarrier = true;
1048#endif
93a37866
A
1049 break;
1050 case JITArrayStorage:
1051 slowCases = emitArrayStoragePutByVal(currentInstruction, badType);
81345200
A
1052#if ENABLE(GGC)
1053 needsLinkForWriteBarrier = true;
1054#endif
93a37866
A
1055 break;
1056 default:
81345200
A
1057 TypedArrayType type = typedArrayTypeForJITArrayMode(arrayMode);
1058 if (isInt(type))
1059 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, type);
1060 else
1061 slowCases = emitFloatTypedArrayPutByVal(currentInstruction, badType, type);
93a37866
A
1062 break;
1063 }
1064
1065 Jump done = jump();
1066
81345200 1067 LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
93a37866
A
1068 patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1069 patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
93a37866 1070 patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
81345200
A
1071#if ENABLE(GGC)
1072 if (needsLinkForWriteBarrier) {
1073 ASSERT(m_calls.last().to == operationUnconditionalWriteBarrier);
1074 patchBuffer.link(m_calls.last().from, operationUnconditionalWriteBarrier);
1075 }
1076#endif
93a37866 1077
81345200
A
1078 bool isDirect = m_interpreter->getOpcodeID(currentInstruction->u.opcode) == op_put_by_val_direct;
1079 if (!isDirect) {
1080 byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1081 m_codeBlock, patchBuffer,
1082 ("Baseline put_by_val stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1083
1084 } else {
1085 byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1086 m_codeBlock, patchBuffer,
1087 ("Baseline put_by_val_direct stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1088 }
93a37866
A
1089 RepatchBuffer repatchBuffer(m_codeBlock);
1090 repatchBuffer.relink(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
81345200 1091 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(isDirect ? operationDirectPutByValGeneric : operationPutByValGeneric));
93a37866
A
1092}
1093
81345200 1094JIT::JumpList JIT::emitIntTypedArrayGetByVal(Instruction*, PatchableJump& badType, TypedArrayType type)
93a37866 1095{
81345200
A
1096 ASSERT(isInt(type));
1097
93a37866
A
1098 // The best way to test the array type is to use the classInfo. We need to do so without
1099 // clobbering the register that holds the indexing type, base, and property.
1100
1101#if USE(JSVALUE64)
1102 RegisterID base = regT0;
1103 RegisterID property = regT1;
1104 RegisterID resultPayload = regT0;
1105 RegisterID scratch = regT3;
1106#else
1107 RegisterID base = regT0;
1108 RegisterID property = regT2;
1109 RegisterID resultPayload = regT0;
1110 RegisterID resultTag = regT1;
1111 RegisterID scratch = regT3;
1112#endif
1113
1114 JumpList slowCases;
1115
81345200
A
1116 load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1117 badType = patchableBranch32(NotEqual, scratch, TrustedImm32(typeForTypedArrayType(type)));
1118 slowCases.append(branch32(AboveOrEqual, property, Address(base, JSArrayBufferView::offsetOfLength())));
1119 loadPtr(Address(base, JSArrayBufferView::offsetOfVector()), base);
93a37866 1120
81345200 1121 switch (elementSize(type)) {
93a37866 1122 case 1:
81345200 1123 if (isSigned(type))
93a37866
A
1124 load8Signed(BaseIndex(base, property, TimesOne), resultPayload);
1125 else
1126 load8(BaseIndex(base, property, TimesOne), resultPayload);
1127 break;
1128 case 2:
81345200 1129 if (isSigned(type))
93a37866
A
1130 load16Signed(BaseIndex(base, property, TimesTwo), resultPayload);
1131 else
1132 load16(BaseIndex(base, property, TimesTwo), resultPayload);
1133 break;
1134 case 4:
1135 load32(BaseIndex(base, property, TimesFour), resultPayload);
1136 break;
1137 default:
1138 CRASH();
1139 }
1140
1141 Jump done;
81345200 1142 if (type == TypeUint32) {
93a37866
A
1143 Jump canBeInt = branch32(GreaterThanOrEqual, resultPayload, TrustedImm32(0));
1144
1145 convertInt32ToDouble(resultPayload, fpRegT0);
1146 addDouble(AbsoluteAddress(&twoToThe32), fpRegT0);
1147#if USE(JSVALUE64)
1148 moveDoubleTo64(fpRegT0, resultPayload);
1149 sub64(tagTypeNumberRegister, resultPayload);
1150#else
1151 moveDoubleToInts(fpRegT0, resultPayload, resultTag);
1152#endif
1153
1154 done = jump();
1155 canBeInt.link(this);
1156 }
1157
1158#if USE(JSVALUE64)
1159 or64(tagTypeNumberRegister, resultPayload);
1160#else
1161 move(TrustedImm32(JSValue::Int32Tag), resultTag);
1162#endif
1163 if (done.isSet())
1164 done.link(this);
1165 return slowCases;
1166}
1167
81345200 1168JIT::JumpList JIT::emitFloatTypedArrayGetByVal(Instruction*, PatchableJump& badType, TypedArrayType type)
93a37866 1169{
81345200
A
1170 ASSERT(isFloat(type));
1171
93a37866
A
1172#if USE(JSVALUE64)
1173 RegisterID base = regT0;
1174 RegisterID property = regT1;
1175 RegisterID resultPayload = regT0;
1176 RegisterID scratch = regT3;
1177#else
1178 RegisterID base = regT0;
1179 RegisterID property = regT2;
1180 RegisterID resultPayload = regT0;
1181 RegisterID resultTag = regT1;
1182 RegisterID scratch = regT3;
1183#endif
1184
1185 JumpList slowCases;
81345200
A
1186
1187 load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1188 badType = patchableBranch32(NotEqual, scratch, TrustedImm32(typeForTypedArrayType(type)));
1189 slowCases.append(branch32(AboveOrEqual, property, Address(base, JSArrayBufferView::offsetOfLength())));
1190 loadPtr(Address(base, JSArrayBufferView::offsetOfVector()), base);
93a37866 1191
81345200 1192 switch (elementSize(type)) {
93a37866
A
1193 case 4:
1194 loadFloat(BaseIndex(base, property, TimesFour), fpRegT0);
1195 convertFloatToDouble(fpRegT0, fpRegT0);
1196 break;
1197 case 8: {
1198 loadDouble(BaseIndex(base, property, TimesEight), fpRegT0);
1199 break;
1200 }
1201 default:
1202 CRASH();
1203 }
1204
1205 Jump notNaN = branchDouble(DoubleEqual, fpRegT0, fpRegT0);
81345200
A
1206 static const double NaN = PNaN;
1207 loadDouble(TrustedImmPtr(&NaN), fpRegT0);
93a37866
A
1208 notNaN.link(this);
1209
1210#if USE(JSVALUE64)
1211 moveDoubleTo64(fpRegT0, resultPayload);
1212 sub64(tagTypeNumberRegister, resultPayload);
1213#else
1214 moveDoubleToInts(fpRegT0, resultPayload, resultTag);
1215#endif
1216 return slowCases;
1217}
1218
81345200 1219JIT::JumpList JIT::emitIntTypedArrayPutByVal(Instruction* currentInstruction, PatchableJump& badType, TypedArrayType type)
93a37866 1220{
81345200
A
1221 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1222 ASSERT(isInt(type));
1223
1224 int value = currentInstruction[3].u.operand;
93a37866
A
1225
1226#if USE(JSVALUE64)
1227 RegisterID base = regT0;
1228 RegisterID property = regT1;
1229 RegisterID earlyScratch = regT3;
1230 RegisterID lateScratch = regT2;
1231#else
1232 RegisterID base = regT0;
1233 RegisterID property = regT2;
1234 RegisterID earlyScratch = regT3;
1235 RegisterID lateScratch = regT1;
1236#endif
1237
1238 JumpList slowCases;
1239
81345200
A
1240 load8(Address(base, JSCell::typeInfoTypeOffset()), earlyScratch);
1241 badType = patchableBranch32(NotEqual, earlyScratch, TrustedImm32(typeForTypedArrayType(type)));
1242 Jump inBounds = branch32(Below, property, Address(base, JSArrayBufferView::offsetOfLength()));
1243 emitArrayProfileOutOfBoundsSpecialCase(profile);
1244 Jump done = jump();
1245 inBounds.link(this);
93a37866
A
1246
1247#if USE(JSVALUE64)
1248 emitGetVirtualRegister(value, earlyScratch);
1249 slowCases.append(emitJumpIfNotImmediateInteger(earlyScratch));
1250#else
1251 emitLoad(value, lateScratch, earlyScratch);
1252 slowCases.append(branch32(NotEqual, lateScratch, TrustedImm32(JSValue::Int32Tag)));
1253#endif
1254
1255 // We would be loading this into base as in get_by_val, except that the slow
1256 // path expects the base to be unclobbered.
81345200 1257 loadPtr(Address(base, JSArrayBufferView::offsetOfVector()), lateScratch);
93a37866 1258
81345200
A
1259 if (isClamped(type)) {
1260 ASSERT(elementSize(type) == 1);
1261 ASSERT(!isSigned(type));
93a37866
A
1262 Jump inBounds = branch32(BelowOrEqual, earlyScratch, TrustedImm32(0xff));
1263 Jump tooBig = branch32(GreaterThan, earlyScratch, TrustedImm32(0xff));
1264 xor32(earlyScratch, earlyScratch);
1265 Jump clamped = jump();
1266 tooBig.link(this);
1267 move(TrustedImm32(0xff), earlyScratch);
1268 clamped.link(this);
1269 inBounds.link(this);
1270 }
1271
81345200 1272 switch (elementSize(type)) {
93a37866
A
1273 case 1:
1274 store8(earlyScratch, BaseIndex(lateScratch, property, TimesOne));
1275 break;
1276 case 2:
1277 store16(earlyScratch, BaseIndex(lateScratch, property, TimesTwo));
1278 break;
1279 case 4:
1280 store32(earlyScratch, BaseIndex(lateScratch, property, TimesFour));
1281 break;
1282 default:
1283 CRASH();
1284 }
1285
81345200
A
1286 done.link(this);
1287
93a37866
A
1288 return slowCases;
1289}
1290
81345200 1291JIT::JumpList JIT::emitFloatTypedArrayPutByVal(Instruction* currentInstruction, PatchableJump& badType, TypedArrayType type)
93a37866 1292{
81345200
A
1293 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1294 ASSERT(isFloat(type));
1295
1296 int value = currentInstruction[3].u.operand;
93a37866
A
1297
1298#if USE(JSVALUE64)
1299 RegisterID base = regT0;
1300 RegisterID property = regT1;
1301 RegisterID earlyScratch = regT3;
1302 RegisterID lateScratch = regT2;
1303#else
1304 RegisterID base = regT0;
1305 RegisterID property = regT2;
1306 RegisterID earlyScratch = regT3;
1307 RegisterID lateScratch = regT1;
1308#endif
1309
1310 JumpList slowCases;
1311
81345200
A
1312 load8(Address(base, JSCell::typeInfoTypeOffset()), earlyScratch);
1313 badType = patchableBranch32(NotEqual, earlyScratch, TrustedImm32(typeForTypedArrayType(type)));
1314 Jump inBounds = branch32(Below, property, Address(base, JSArrayBufferView::offsetOfLength()));
1315 emitArrayProfileOutOfBoundsSpecialCase(profile);
1316 Jump done = jump();
1317 inBounds.link(this);
93a37866
A
1318
1319#if USE(JSVALUE64)
1320 emitGetVirtualRegister(value, earlyScratch);
1321 Jump doubleCase = emitJumpIfNotImmediateInteger(earlyScratch);
1322 convertInt32ToDouble(earlyScratch, fpRegT0);
1323 Jump ready = jump();
1324 doubleCase.link(this);
1325 slowCases.append(emitJumpIfNotImmediateNumber(earlyScratch));
1326 add64(tagTypeNumberRegister, earlyScratch);
1327 move64ToDouble(earlyScratch, fpRegT0);
1328 ready.link(this);
1329#else
1330 emitLoad(value, lateScratch, earlyScratch);
1331 Jump doubleCase = branch32(NotEqual, lateScratch, TrustedImm32(JSValue::Int32Tag));
1332 convertInt32ToDouble(earlyScratch, fpRegT0);
1333 Jump ready = jump();
1334 doubleCase.link(this);
1335 slowCases.append(branch32(Above, lateScratch, TrustedImm32(JSValue::LowestTag)));
1336 moveIntsToDouble(earlyScratch, lateScratch, fpRegT0, fpRegT1);
1337 ready.link(this);
1338#endif
1339
1340 // We would be loading this into base as in get_by_val, except that the slow
1341 // path expects the base to be unclobbered.
81345200 1342 loadPtr(Address(base, JSArrayBufferView::offsetOfVector()), lateScratch);
93a37866 1343
81345200 1344 switch (elementSize(type)) {
93a37866
A
1345 case 4:
1346 convertDoubleToFloat(fpRegT0, fpRegT0);
1347 storeFloat(fpRegT0, BaseIndex(lateScratch, property, TimesFour));
1348 break;
1349 case 8:
1350 storeDouble(fpRegT0, BaseIndex(lateScratch, property, TimesEight));
1351 break;
1352 default:
1353 CRASH();
1354 }
1355
81345200
A
1356 done.link(this);
1357
93a37866
A
1358 return slowCases;
1359}
1360
9dae56ea
A
1361} // namespace JSC
1362
1363#endif // ENABLE(JIT)