]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JITPropertyAccess.cpp
JavaScriptCore-7601.1.46.3.tar.gz
[apple/javascriptcore.git] / jit / JITPropertyAccess.cpp
1 /*
2 * Copyright (C) 2008, 2009, 2014, 2015 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #include "JIT.h"
30
31 #include "CodeBlock.h"
32 #include "DirectArguments.h"
33 #include "GCAwareJITStubRoutine.h"
34 #include "GetterSetter.h"
35 #include "Interpreter.h"
36 #include "JITInlines.h"
37 #include "JSArray.h"
38 #include "JSEnvironmentRecord.h"
39 #include "JSFunction.h"
40 #include "LinkBuffer.h"
41 #include "RepatchBuffer.h"
42 #include "ResultType.h"
43 #include "SamplingTool.h"
44 #include "ScopedArguments.h"
45 #include "ScopedArgumentsTable.h"
46 #include <wtf/StringPrintStream.h>
47
48
49 namespace JSC {
50 #if USE(JSVALUE64)
51
52 JIT::CodeRef JIT::stringGetByValStubGenerator(VM* vm)
53 {
54 JSInterfaceJIT jit(vm);
55 JumpList failures;
56 failures.append(JSC::branchStructure(jit,
57 NotEqual,
58 Address(regT0, JSCell::structureIDOffset()),
59 vm->stringStructure.get()));
60
61 // Load string length to regT2, and start the process of loading the data pointer into regT0
62 jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT2);
63 jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
64 failures.append(jit.branchTest32(Zero, regT0));
65
66 // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
67 failures.append(jit.branch32(AboveOrEqual, regT1, regT2));
68
69 // Load the character
70 JumpList is16Bit;
71 JumpList cont8Bit;
72 // Load the string flags
73 jit.loadPtr(Address(regT0, StringImpl::flagsOffset()), regT2);
74 jit.loadPtr(Address(regT0, StringImpl::dataOffset()), regT0);
75 is16Bit.append(jit.branchTest32(Zero, regT2, TrustedImm32(StringImpl::flagIs8Bit())));
76 jit.load8(BaseIndex(regT0, regT1, TimesOne, 0), regT0);
77 cont8Bit.append(jit.jump());
78 is16Bit.link(&jit);
79 jit.load16(BaseIndex(regT0, regT1, TimesTwo, 0), regT0);
80 cont8Bit.link(&jit);
81
82 failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
83 jit.move(TrustedImmPtr(vm->smallStrings.singleCharacterStrings()), regT1);
84 jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
85 jit.ret();
86
87 failures.link(&jit);
88 jit.move(TrustedImm32(0), regT0);
89 jit.ret();
90
91 LinkBuffer patchBuffer(*vm, jit, GLOBAL_THUNK_ID);
92 return FINALIZE_CODE(patchBuffer, ("String get_by_val stub"));
93 }
94
95 void JIT::emit_op_get_by_val(Instruction* currentInstruction)
96 {
97 int dst = currentInstruction[1].u.operand;
98 int base = currentInstruction[2].u.operand;
99 int property = currentInstruction[3].u.operand;
100 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
101
102 emitGetVirtualRegisters(base, regT0, property, regT1);
103 emitJumpSlowCaseIfNotImmediateInteger(regT1);
104
105 // This is technically incorrect - we're zero-extending an int32. On the hot path this doesn't matter.
106 // We check the value as if it was a uint32 against the m_vectorLength - which will always fail if
107 // number was signed since m_vectorLength is always less than intmax (since the total allocation
108 // size is always less than 4Gb). As such zero extending will have been correct (and extending the value
109 // to 64-bits is necessary since it's used in the address calculation). We zero extend rather than sign
110 // extending since it makes it easier to re-tag the value in the slow case.
111 zeroExtend32ToPtr(regT1, regT1);
112
113 emitJumpSlowCaseIfNotJSCell(regT0, base);
114 emitArrayProfilingSiteWithCell(regT0, regT2, profile);
115 and32(TrustedImm32(IndexingShapeMask), regT2);
116
117 PatchableJump badType;
118 JumpList slowCases;
119
120 JITArrayMode mode = chooseArrayMode(profile);
121 switch (mode) {
122 case JITInt32:
123 slowCases = emitInt32GetByVal(currentInstruction, badType);
124 break;
125 case JITDouble:
126 slowCases = emitDoubleGetByVal(currentInstruction, badType);
127 break;
128 case JITContiguous:
129 slowCases = emitContiguousGetByVal(currentInstruction, badType);
130 break;
131 case JITArrayStorage:
132 slowCases = emitArrayStorageGetByVal(currentInstruction, badType);
133 break;
134 default:
135 CRASH();
136 break;
137 }
138
139 addSlowCase(badType);
140 addSlowCase(slowCases);
141
142 Label done = label();
143
144 if (!ASSERT_DISABLED) {
145 Jump resultOK = branchTest64(NonZero, regT0);
146 abortWithReason(JITGetByValResultIsNotEmpty);
147 resultOK.link(this);
148 }
149
150 emitValueProfilingSite();
151 emitPutVirtualRegister(dst);
152
153 m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
154 }
155
156 JIT::JumpList JIT::emitDoubleLoad(Instruction*, PatchableJump& badType)
157 {
158 JumpList slowCases;
159
160 badType = patchableBranch32(NotEqual, regT2, TrustedImm32(DoubleShape));
161 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
162 slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength())));
163 loadDouble(BaseIndex(regT2, regT1, TimesEight), fpRegT0);
164 slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
165
166 return slowCases;
167 }
168
169 JIT::JumpList JIT::emitContiguousLoad(Instruction*, PatchableJump& badType, IndexingType expectedShape)
170 {
171 JumpList slowCases;
172
173 badType = patchableBranch32(NotEqual, regT2, TrustedImm32(expectedShape));
174 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
175 slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength())));
176 load64(BaseIndex(regT2, regT1, TimesEight), regT0);
177 slowCases.append(branchTest64(Zero, regT0));
178
179 return slowCases;
180 }
181
182 JIT::JumpList JIT::emitArrayStorageLoad(Instruction*, PatchableJump& badType)
183 {
184 JumpList slowCases;
185
186 add32(TrustedImm32(-ArrayStorageShape), regT2, regT3);
187 badType = patchableBranch32(Above, regT3, TrustedImm32(SlowPutArrayStorageShape - ArrayStorageShape));
188
189 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
190 slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, ArrayStorage::vectorLengthOffset())));
191
192 load64(BaseIndex(regT2, regT1, TimesEight, ArrayStorage::vectorOffset()), regT0);
193 slowCases.append(branchTest64(Zero, regT0));
194
195 return slowCases;
196 }
197
198 void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
199 {
200 int dst = currentInstruction[1].u.operand;
201 int base = currentInstruction[2].u.operand;
202 int property = currentInstruction[3].u.operand;
203 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
204
205 linkSlowCase(iter); // property int32 check
206 linkSlowCaseIfNotJSCell(iter, base); // base cell check
207 Jump nonCell = jump();
208 linkSlowCase(iter); // base array check
209 Jump notString = branchStructure(NotEqual,
210 Address(regT0, JSCell::structureIDOffset()),
211 m_vm->stringStructure.get());
212 emitNakedCall(CodeLocationLabel(m_vm->getCTIStub(stringGetByValStubGenerator).code()));
213 Jump failed = branchTest64(Zero, regT0);
214 emitPutVirtualRegister(dst, regT0);
215 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
216 failed.link(this);
217 notString.link(this);
218 nonCell.link(this);
219
220 linkSlowCase(iter); // vector length check
221 linkSlowCase(iter); // empty value
222
223 Label slowPath = label();
224
225 emitGetVirtualRegister(base, regT0);
226 emitGetVirtualRegister(property, regT1);
227 Call call = callOperation(operationGetByValDefault, dst, regT0, regT1, profile);
228
229 m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
230 m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
231 m_byValInstructionIndex++;
232
233 emitValueProfilingSite();
234 }
235
236 void JIT::compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID offset, RegisterID scratch, FinalObjectMode finalObjectMode)
237 {
238 ASSERT(sizeof(JSValue) == 8);
239
240 if (finalObjectMode == MayBeFinal) {
241 Jump isInline = branch32(LessThan, offset, TrustedImm32(firstOutOfLineOffset));
242 loadPtr(Address(base, JSObject::butterflyOffset()), scratch);
243 neg32(offset);
244 Jump done = jump();
245 isInline.link(this);
246 addPtr(TrustedImm32(JSObject::offsetOfInlineStorage() - (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), base, scratch);
247 done.link(this);
248 } else {
249 if (!ASSERT_DISABLED) {
250 Jump isOutOfLine = branch32(GreaterThanOrEqual, offset, TrustedImm32(firstOutOfLineOffset));
251 abortWithReason(JITOffsetIsNotOutOfLine);
252 isOutOfLine.link(this);
253 }
254 loadPtr(Address(base, JSObject::butterflyOffset()), scratch);
255 neg32(offset);
256 }
257 signExtend32ToPtr(offset, offset);
258 load64(BaseIndex(scratch, offset, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), result);
259 }
260
261 void JIT::emit_op_put_by_val(Instruction* currentInstruction)
262 {
263 int base = currentInstruction[1].u.operand;
264 int property = currentInstruction[2].u.operand;
265 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
266
267 emitGetVirtualRegisters(base, regT0, property, regT1);
268 emitJumpSlowCaseIfNotImmediateInteger(regT1);
269 // See comment in op_get_by_val.
270 zeroExtend32ToPtr(regT1, regT1);
271 emitJumpSlowCaseIfNotJSCell(regT0, base);
272 emitArrayProfilingSiteWithCell(regT0, regT2, profile);
273 and32(TrustedImm32(IndexingShapeMask), regT2);
274
275 PatchableJump badType;
276 JumpList slowCases;
277
278 JITArrayMode mode = chooseArrayMode(profile);
279 switch (mode) {
280 case JITInt32:
281 slowCases = emitInt32PutByVal(currentInstruction, badType);
282 break;
283 case JITDouble:
284 slowCases = emitDoublePutByVal(currentInstruction, badType);
285 break;
286 case JITContiguous:
287 slowCases = emitContiguousPutByVal(currentInstruction, badType);
288 break;
289 case JITArrayStorage:
290 slowCases = emitArrayStoragePutByVal(currentInstruction, badType);
291 break;
292 default:
293 CRASH();
294 break;
295 }
296
297 addSlowCase(badType);
298 addSlowCase(slowCases);
299
300 Label done = label();
301
302 m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
303
304 }
305
306 JIT::JumpList JIT::emitGenericContiguousPutByVal(Instruction* currentInstruction, PatchableJump& badType, IndexingType indexingShape)
307 {
308 int value = currentInstruction[3].u.operand;
309 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
310
311 JumpList slowCases;
312
313 badType = patchableBranch32(NotEqual, regT2, TrustedImm32(indexingShape));
314
315 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
316 Jump outOfBounds = branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfPublicLength()));
317
318 Label storeResult = label();
319 emitGetVirtualRegister(value, regT3);
320 switch (indexingShape) {
321 case Int32Shape:
322 slowCases.append(emitJumpIfNotImmediateInteger(regT3));
323 store64(regT3, BaseIndex(regT2, regT1, TimesEight));
324 break;
325 case DoubleShape: {
326 Jump notInt = emitJumpIfNotImmediateInteger(regT3);
327 convertInt32ToDouble(regT3, fpRegT0);
328 Jump ready = jump();
329 notInt.link(this);
330 add64(tagTypeNumberRegister, regT3);
331 move64ToDouble(regT3, fpRegT0);
332 slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
333 ready.link(this);
334 storeDouble(fpRegT0, BaseIndex(regT2, regT1, TimesEight));
335 break;
336 }
337 case ContiguousShape:
338 store64(regT3, BaseIndex(regT2, regT1, TimesEight));
339 emitWriteBarrier(currentInstruction[1].u.operand, value, ShouldFilterValue);
340 break;
341 default:
342 CRASH();
343 break;
344 }
345
346 Jump done = jump();
347 outOfBounds.link(this);
348
349 slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, Butterfly::offsetOfVectorLength())));
350
351 emitArrayProfileStoreToHoleSpecialCase(profile);
352
353 add32(TrustedImm32(1), regT1, regT3);
354 store32(regT3, Address(regT2, Butterfly::offsetOfPublicLength()));
355 jump().linkTo(storeResult, this);
356
357 done.link(this);
358
359 return slowCases;
360 }
361
362 JIT::JumpList JIT::emitArrayStoragePutByVal(Instruction* currentInstruction, PatchableJump& badType)
363 {
364 int value = currentInstruction[3].u.operand;
365 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
366
367 JumpList slowCases;
368
369 badType = patchableBranch32(NotEqual, regT2, TrustedImm32(ArrayStorageShape));
370 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
371 slowCases.append(branch32(AboveOrEqual, regT1, Address(regT2, ArrayStorage::vectorLengthOffset())));
372
373 Jump empty = branchTest64(Zero, BaseIndex(regT2, regT1, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
374
375 Label storeResult(this);
376 emitGetVirtualRegister(value, regT3);
377 store64(regT3, BaseIndex(regT2, regT1, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])));
378 emitWriteBarrier(currentInstruction[1].u.operand, value, ShouldFilterValue);
379 Jump end = jump();
380
381 empty.link(this);
382 emitArrayProfileStoreToHoleSpecialCase(profile);
383 add32(TrustedImm32(1), Address(regT2, ArrayStorage::numValuesInVectorOffset()));
384 branch32(Below, regT1, Address(regT2, ArrayStorage::lengthOffset())).linkTo(storeResult, this);
385
386 add32(TrustedImm32(1), regT1);
387 store32(regT1, Address(regT2, ArrayStorage::lengthOffset()));
388 sub32(TrustedImm32(1), regT1);
389 jump().linkTo(storeResult, this);
390
391 end.link(this);
392
393 return slowCases;
394 }
395
396 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
397 {
398 int base = currentInstruction[1].u.operand;
399 int property = currentInstruction[2].u.operand;
400 int value = currentInstruction[3].u.operand;
401 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
402
403 linkSlowCase(iter); // property int32 check
404 linkSlowCaseIfNotJSCell(iter, base); // base cell check
405 linkSlowCase(iter); // base not array check
406
407 JITArrayMode mode = chooseArrayMode(profile);
408 switch (mode) {
409 case JITInt32:
410 case JITDouble:
411 linkSlowCase(iter); // value type check
412 break;
413 default:
414 break;
415 }
416
417 Jump skipProfiling = jump();
418 linkSlowCase(iter); // out of bounds
419 emitArrayProfileOutOfBoundsSpecialCase(profile);
420 skipProfiling.link(this);
421
422 Label slowPath = label();
423
424 emitGetVirtualRegister(property, regT1);
425 emitGetVirtualRegister(value, regT2);
426 bool isDirect = m_interpreter->getOpcodeID(currentInstruction->u.opcode) == op_put_by_val_direct;
427 Call call = callOperation(isDirect ? operationDirectPutByVal : operationPutByVal, regT0, regT1, regT2, profile);
428
429 m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
430 m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
431 m_byValInstructionIndex++;
432 }
433
434 void JIT::emit_op_put_by_index(Instruction* currentInstruction)
435 {
436 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
437 emitGetVirtualRegister(currentInstruction[3].u.operand, regT1);
438 callOperation(operationPutByIndex, regT0, currentInstruction[2].u.operand, regT1);
439 }
440
441 void JIT::emit_op_put_getter_by_id(Instruction* currentInstruction)
442 {
443 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
444 emitGetVirtualRegister(currentInstruction[3].u.operand, regT1);
445 callOperation(operationPutGetterById, regT0, &m_codeBlock->identifier(currentInstruction[2].u.operand), regT1);
446 }
447
448 void JIT::emit_op_put_setter_by_id(Instruction* currentInstruction)
449 {
450 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
451 emitGetVirtualRegister(currentInstruction[3].u.operand, regT1);
452 callOperation(operationPutSetterById, regT0, &m_codeBlock->identifier(currentInstruction[2].u.operand), regT1);
453 }
454
455 void JIT::emit_op_put_getter_setter(Instruction* currentInstruction)
456 {
457 emitGetVirtualRegister(currentInstruction[1].u.operand, regT0);
458 emitGetVirtualRegister(currentInstruction[3].u.operand, regT1);
459 emitGetVirtualRegister(currentInstruction[4].u.operand, regT2);
460 callOperation(operationPutGetterSetter, regT0, &m_codeBlock->identifier(currentInstruction[2].u.operand), regT1, regT2);
461 }
462
463 void JIT::emit_op_del_by_id(Instruction* currentInstruction)
464 {
465 int dst = currentInstruction[1].u.operand;
466 int base = currentInstruction[2].u.operand;
467 int property = currentInstruction[3].u.operand;
468 emitGetVirtualRegister(base, regT0);
469 callOperation(operationDeleteById, dst, regT0, &m_codeBlock->identifier(property));
470 }
471
472 void JIT::emit_op_get_by_id(Instruction* currentInstruction)
473 {
474 int resultVReg = currentInstruction[1].u.operand;
475 int baseVReg = currentInstruction[2].u.operand;
476 const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
477
478 emitGetVirtualRegister(baseVReg, regT0);
479
480 emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
481
482 if (*ident == m_vm->propertyNames->length && shouldEmitProfiling())
483 emitArrayProfilingSiteForBytecodeIndexWithCell(regT0, regT1, m_bytecodeOffset);
484
485 JITGetByIdGenerator gen(
486 m_codeBlock, CodeOrigin(m_bytecodeOffset), RegisterSet::specialRegisters(),
487 JSValueRegs(regT0), JSValueRegs(regT0), DontSpill);
488 gen.generateFastPath(*this);
489 addSlowCase(gen.slowPathJump());
490 m_getByIds.append(gen);
491
492 emitValueProfilingSite();
493 emitPutVirtualRegister(resultVReg);
494 assertStackPointerOffset();
495 }
496
497 void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
498 {
499 int resultVReg = currentInstruction[1].u.operand;
500 int baseVReg = currentInstruction[2].u.operand;
501 const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
502
503 linkSlowCaseIfNotJSCell(iter, baseVReg);
504 linkSlowCase(iter);
505
506 JITGetByIdGenerator& gen = m_getByIds[m_getByIdIndex++];
507
508 Label coldPathBegin = label();
509
510 Call call = callOperation(WithProfile, operationGetByIdOptimize, resultVReg, gen.stubInfo(), regT0, ident->impl());
511
512 gen.reportSlowPathCall(coldPathBegin, call);
513 }
514
515 void JIT::emit_op_put_by_id(Instruction* currentInstruction)
516 {
517 int baseVReg = currentInstruction[1].u.operand;
518 int valueVReg = currentInstruction[3].u.operand;
519 unsigned direct = currentInstruction[8].u.operand;
520
521 emitWriteBarrier(baseVReg, valueVReg, ShouldFilterBase);
522
523 // In order to be able to patch both the Structure, and the object offset, we store one pointer,
524 // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
525 // such that the Structure & offset are always at the same distance from this.
526
527 emitGetVirtualRegisters(baseVReg, regT0, valueVReg, regT1);
528
529 // Jump to a slow case if either the base object is an immediate, or if the Structure does not match.
530 emitJumpSlowCaseIfNotJSCell(regT0, baseVReg);
531
532 JITPutByIdGenerator gen(
533 m_codeBlock, CodeOrigin(m_bytecodeOffset), RegisterSet::specialRegisters(),
534 JSValueRegs(regT0), JSValueRegs(regT1), regT2, DontSpill, m_codeBlock->ecmaMode(),
535 direct ? Direct : NotDirect);
536
537 gen.generateFastPath(*this);
538 addSlowCase(gen.slowPathJump());
539
540 m_putByIds.append(gen);
541 }
542
543 void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
544 {
545 int baseVReg = currentInstruction[1].u.operand;
546 const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
547
548 linkSlowCaseIfNotJSCell(iter, baseVReg);
549 linkSlowCase(iter);
550
551 Label coldPathBegin(this);
552
553 JITPutByIdGenerator& gen = m_putByIds[m_putByIdIndex++];
554
555 Call call = callOperation(
556 gen.slowPathFunction(), gen.stubInfo(), regT1, regT0, ident->impl());
557
558 gen.reportSlowPathCall(coldPathBegin, call);
559 }
560
561 // Compile a store into an object's property storage. May overwrite the
562 // value in objectReg.
563 void JIT::compilePutDirectOffset(RegisterID base, RegisterID value, PropertyOffset cachedOffset)
564 {
565 if (isInlineOffset(cachedOffset)) {
566 store64(value, Address(base, JSObject::offsetOfInlineStorage() + sizeof(JSValue) * offsetInInlineStorage(cachedOffset)));
567 return;
568 }
569
570 loadPtr(Address(base, JSObject::butterflyOffset()), base);
571 store64(value, Address(base, sizeof(JSValue) * offsetInButterfly(cachedOffset)));
572 }
573
574 // Compile a load from an object's property storage. May overwrite base.
575 void JIT::compileGetDirectOffset(RegisterID base, RegisterID result, PropertyOffset cachedOffset)
576 {
577 if (isInlineOffset(cachedOffset)) {
578 load64(Address(base, JSObject::offsetOfInlineStorage() + sizeof(JSValue) * offsetInInlineStorage(cachedOffset)), result);
579 return;
580 }
581
582 loadPtr(Address(base, JSObject::butterflyOffset()), result);
583 load64(Address(result, sizeof(JSValue) * offsetInButterfly(cachedOffset)), result);
584 }
585
586 void JIT::compileGetDirectOffset(JSObject* base, RegisterID result, PropertyOffset cachedOffset)
587 {
588 if (isInlineOffset(cachedOffset)) {
589 load64(base->locationForOffset(cachedOffset), result);
590 return;
591 }
592
593 loadPtr(base->butterflyAddress(), result);
594 load64(Address(result, offsetInButterfly(cachedOffset) * sizeof(WriteBarrier<Unknown>)), result);
595 }
596
597 void JIT::emitVarInjectionCheck(bool needsVarInjectionChecks)
598 {
599 if (!needsVarInjectionChecks)
600 return;
601 addSlowCase(branch8(Equal, AbsoluteAddress(m_codeBlock->globalObject()->varInjectionWatchpoint()->addressOfState()), TrustedImm32(IsInvalidated)));
602 }
603
604 void JIT::emitResolveClosure(int dst, int scope, bool needsVarInjectionChecks, unsigned depth)
605 {
606 emitVarInjectionCheck(needsVarInjectionChecks);
607 emitGetVirtualRegister(scope, regT0);
608 for (unsigned i = 0; i < depth; ++i)
609 loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
610 emitPutVirtualRegister(dst);
611 }
612
613 void JIT::emit_op_resolve_scope(Instruction* currentInstruction)
614 {
615 int dst = currentInstruction[1].u.operand;
616 int scope = currentInstruction[2].u.operand;
617 ResolveType resolveType = static_cast<ResolveType>(currentInstruction[4].u.operand);
618 unsigned depth = currentInstruction[5].u.operand;
619
620 switch (resolveType) {
621 case GlobalProperty:
622 case GlobalVar:
623 case GlobalPropertyWithVarInjectionChecks:
624 case GlobalVarWithVarInjectionChecks:
625 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
626 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
627 emitPutVirtualRegister(dst);
628 break;
629 case ClosureVar:
630 case ClosureVarWithVarInjectionChecks:
631 emitResolveClosure(dst, scope, needsVarInjectionChecks(resolveType), depth);
632 break;
633 case Dynamic:
634 addSlowCase(jump());
635 break;
636 case LocalClosureVar:
637 RELEASE_ASSERT_NOT_REACHED();
638 }
639 }
640
641 void JIT::emitSlow_op_resolve_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
642 {
643 int dst = currentInstruction[1].u.operand;
644 ResolveType resolveType = static_cast<ResolveType>(currentInstruction[4].u.operand);
645
646 if (resolveType == GlobalProperty || resolveType == GlobalVar || resolveType == ClosureVar)
647 return;
648
649 linkSlowCase(iter);
650 int32_t scope = currentInstruction[2].u.operand;
651 int32_t identifierIndex = currentInstruction[3].u.operand;
652 callOperation(operationResolveScope, dst, scope, identifierIndex);
653 }
654
655 void JIT::emitLoadWithStructureCheck(int scope, Structure** structureSlot)
656 {
657 emitGetVirtualRegister(scope, regT0);
658 loadPtr(structureSlot, regT1);
659 addSlowCase(branchTestPtr(Zero, regT1));
660 load32(Address(regT1, Structure::structureIDOffset()), regT1);
661 addSlowCase(branch32(NotEqual, Address(regT0, JSCell::structureIDOffset()), regT1));
662 }
663
664 void JIT::emitGetGlobalProperty(uintptr_t* operandSlot)
665 {
666 load32(operandSlot, regT1);
667 compileGetDirectOffset(regT0, regT0, regT1, regT2, KnownNotFinal);
668 }
669
670 void JIT::emitGetGlobalVar(uintptr_t operand)
671 {
672 loadPtr(reinterpret_cast<void*>(operand), regT0);
673 }
674
675 void JIT::emitGetClosureVar(int scope, uintptr_t operand)
676 {
677 emitGetVirtualRegister(scope, regT0);
678 loadPtr(Address(regT0, JSEnvironmentRecord::offsetOfVariables() + operand * sizeof(Register)), regT0);
679 }
680
681 void JIT::emit_op_get_from_scope(Instruction* currentInstruction)
682 {
683 int dst = currentInstruction[1].u.operand;
684 int scope = currentInstruction[2].u.operand;
685 ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
686 Structure** structureSlot = currentInstruction[5].u.structure.slot();
687 uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(&currentInstruction[6].u.pointer);
688
689 switch (resolveType) {
690 case GlobalProperty:
691 case GlobalPropertyWithVarInjectionChecks:
692 emitLoadWithStructureCheck(scope, structureSlot); // Structure check covers var injection.
693 emitGetGlobalProperty(operandSlot);
694 break;
695 case GlobalVar:
696 case GlobalVarWithVarInjectionChecks:
697 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
698 emitGetGlobalVar(*operandSlot);
699 break;
700 case ClosureVar:
701 case ClosureVarWithVarInjectionChecks:
702 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
703 emitGetClosureVar(scope, *operandSlot);
704 break;
705 case Dynamic:
706 addSlowCase(jump());
707 break;
708 case LocalClosureVar:
709 RELEASE_ASSERT_NOT_REACHED();
710 }
711 emitPutVirtualRegister(dst);
712 emitValueProfilingSite();
713 }
714
715 void JIT::emitSlow_op_get_from_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
716 {
717 int dst = currentInstruction[1].u.operand;
718 ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
719
720 if (resolveType == GlobalVar || resolveType == ClosureVar)
721 return;
722
723 if (resolveType == GlobalProperty || resolveType == GlobalPropertyWithVarInjectionChecks)
724 linkSlowCase(iter);
725 linkSlowCase(iter);
726 callOperation(WithProfile, operationGetFromScope, dst, currentInstruction);
727 }
728
729 void JIT::emitPutGlobalProperty(uintptr_t* operandSlot, int value)
730 {
731 emitGetVirtualRegister(value, regT2);
732
733 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
734 loadPtr(operandSlot, regT1);
735 negPtr(regT1);
736 storePtr(regT2, BaseIndex(regT0, regT1, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)));
737 }
738
739 void JIT::emitPutGlobalVar(uintptr_t operand, int value, WatchpointSet* set)
740 {
741 emitGetVirtualRegister(value, regT0);
742 emitNotifyWrite(set);
743 storePtr(regT0, reinterpret_cast<void*>(operand));
744 }
745
746 void JIT::emitPutClosureVar(int scope, uintptr_t operand, int value, WatchpointSet* set)
747 {
748 emitGetVirtualRegister(value, regT1);
749 emitGetVirtualRegister(scope, regT0);
750 emitNotifyWrite(set);
751 storePtr(regT1, Address(regT0, JSEnvironmentRecord::offsetOfVariables() + operand * sizeof(Register)));
752 }
753
754 void JIT::emit_op_put_to_scope(Instruction* currentInstruction)
755 {
756 int scope = currentInstruction[1].u.operand;
757 int value = currentInstruction[3].u.operand;
758 ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
759 Structure** structureSlot = currentInstruction[5].u.structure.slot();
760 uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(&currentInstruction[6].u.pointer);
761
762 switch (resolveType) {
763 case GlobalProperty:
764 case GlobalPropertyWithVarInjectionChecks:
765 emitWriteBarrier(m_codeBlock->globalObject(), value, ShouldFilterValue);
766 emitLoadWithStructureCheck(scope, structureSlot); // Structure check covers var injection.
767 emitPutGlobalProperty(operandSlot, value);
768 break;
769 case GlobalVar:
770 case GlobalVarWithVarInjectionChecks:
771 emitWriteBarrier(m_codeBlock->globalObject(), value, ShouldFilterValue);
772 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
773 emitPutGlobalVar(*operandSlot, value, currentInstruction[5].u.watchpointSet);
774 break;
775 case LocalClosureVar:
776 case ClosureVar:
777 case ClosureVarWithVarInjectionChecks:
778 emitWriteBarrier(scope, value, ShouldFilterValue);
779 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
780 emitPutClosureVar(scope, *operandSlot, value, currentInstruction[5].u.watchpointSet);
781 break;
782 case Dynamic:
783 addSlowCase(jump());
784 break;
785 }
786 }
787
788 void JIT::emitSlow_op_put_to_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
789 {
790 ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
791 unsigned linkCount = 0;
792 if (resolveType != GlobalVar && resolveType != ClosureVar && resolveType != LocalClosureVar)
793 linkCount++;
794 if ((resolveType == GlobalVar || resolveType == GlobalVarWithVarInjectionChecks || resolveType == LocalClosureVar)
795 && currentInstruction[5].u.watchpointSet->state() != IsInvalidated)
796 linkCount++;
797 if (resolveType == GlobalProperty || resolveType == GlobalPropertyWithVarInjectionChecks)
798 linkCount++;
799 if (!linkCount)
800 return;
801 while (linkCount--)
802 linkSlowCase(iter);
803 callOperation(operationPutToScope, currentInstruction);
804 }
805
806 void JIT::emit_op_get_from_arguments(Instruction* currentInstruction)
807 {
808 int dst = currentInstruction[1].u.operand;
809 int arguments = currentInstruction[2].u.operand;
810 int index = currentInstruction[3].u.operand;
811
812 emitGetVirtualRegister(arguments, regT0);
813 load64(Address(regT0, DirectArguments::storageOffset() + index * sizeof(WriteBarrier<Unknown>)), regT0);
814 emitValueProfilingSite();
815 emitPutVirtualRegister(dst);
816 }
817
818 void JIT::emit_op_put_to_arguments(Instruction* currentInstruction)
819 {
820 int arguments = currentInstruction[1].u.operand;
821 int index = currentInstruction[2].u.operand;
822 int value = currentInstruction[3].u.operand;
823
824 emitWriteBarrier(arguments, value, ShouldFilterValue);
825
826 emitGetVirtualRegister(arguments, regT0);
827 emitGetVirtualRegister(value, regT1);
828 store64(regT1, Address(regT0, DirectArguments::storageOffset() + index * sizeof(WriteBarrier<Unknown>)));
829 }
830
831 void JIT::emit_op_init_global_const(Instruction* currentInstruction)
832 {
833 JSGlobalObject* globalObject = m_codeBlock->globalObject();
834 emitWriteBarrier(globalObject, currentInstruction[2].u.operand, ShouldFilterValue);
835 emitGetVirtualRegister(currentInstruction[2].u.operand, regT0);
836 store64(regT0, currentInstruction[1].u.variablePointer);
837 }
838
839 #endif // USE(JSVALUE64)
840
841 #if USE(JSVALUE64)
842 void JIT::emitWriteBarrier(unsigned owner, unsigned value, WriteBarrierMode mode)
843 {
844 #if ENABLE(GGC)
845 Jump valueNotCell;
846 if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue) {
847 emitGetVirtualRegister(value, regT0);
848 valueNotCell = branchTest64(NonZero, regT0, tagMaskRegister);
849 }
850
851 emitGetVirtualRegister(owner, regT0);
852 Jump ownerNotCell;
853 if (mode == ShouldFilterBaseAndValue || mode == ShouldFilterBase)
854 ownerNotCell = branchTest64(NonZero, regT0, tagMaskRegister);
855
856 Jump ownerIsRememberedOrInEden = jumpIfIsRememberedOrInEden(regT0);
857 callOperation(operationUnconditionalWriteBarrier, regT0);
858 ownerIsRememberedOrInEden.link(this);
859
860 if (mode == ShouldFilterBaseAndValue || mode == ShouldFilterBase)
861 ownerNotCell.link(this);
862 if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue)
863 valueNotCell.link(this);
864 #else
865 UNUSED_PARAM(owner);
866 UNUSED_PARAM(value);
867 UNUSED_PARAM(mode);
868 #endif
869 }
870
871 void JIT::emitWriteBarrier(JSCell* owner, unsigned value, WriteBarrierMode mode)
872 {
873 #if ENABLE(GGC)
874 emitGetVirtualRegister(value, regT0);
875 Jump valueNotCell;
876 if (mode == ShouldFilterValue)
877 valueNotCell = branchTest64(NonZero, regT0, tagMaskRegister);
878
879 emitWriteBarrier(owner);
880
881 if (mode == ShouldFilterValue)
882 valueNotCell.link(this);
883 #else
884 UNUSED_PARAM(owner);
885 UNUSED_PARAM(value);
886 UNUSED_PARAM(mode);
887 #endif
888 }
889
890 #else // USE(JSVALUE64)
891
892 void JIT::emitWriteBarrier(unsigned owner, unsigned value, WriteBarrierMode mode)
893 {
894 #if ENABLE(GGC)
895 Jump valueNotCell;
896 if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue) {
897 emitLoadTag(value, regT0);
898 valueNotCell = branch32(NotEqual, regT0, TrustedImm32(JSValue::CellTag));
899 }
900
901 emitLoad(owner, regT0, regT1);
902 Jump ownerNotCell;
903 if (mode == ShouldFilterBase || mode == ShouldFilterBaseAndValue)
904 ownerNotCell = branch32(NotEqual, regT0, TrustedImm32(JSValue::CellTag));
905
906 Jump ownerIsRememberedOrInEden = jumpIfIsRememberedOrInEden(regT1);
907 callOperation(operationUnconditionalWriteBarrier, regT1);
908 ownerIsRememberedOrInEden.link(this);
909
910 if (mode == ShouldFilterBase || mode == ShouldFilterBaseAndValue)
911 ownerNotCell.link(this);
912 if (mode == ShouldFilterValue || mode == ShouldFilterBaseAndValue)
913 valueNotCell.link(this);
914 #else
915 UNUSED_PARAM(owner);
916 UNUSED_PARAM(value);
917 UNUSED_PARAM(mode);
918 #endif
919 }
920
921 void JIT::emitWriteBarrier(JSCell* owner, unsigned value, WriteBarrierMode mode)
922 {
923 #if ENABLE(GGC)
924 Jump valueNotCell;
925 if (mode == ShouldFilterValue) {
926 emitLoadTag(value, regT0);
927 valueNotCell = branch32(NotEqual, regT0, TrustedImm32(JSValue::CellTag));
928 }
929
930 emitWriteBarrier(owner);
931
932 if (mode == ShouldFilterValue)
933 valueNotCell.link(this);
934 #else
935 UNUSED_PARAM(owner);
936 UNUSED_PARAM(value);
937 UNUSED_PARAM(mode);
938 #endif
939 }
940
941 #endif // USE(JSVALUE64)
942
943 void JIT::emitWriteBarrier(JSCell* owner)
944 {
945 #if ENABLE(GGC)
946 if (!MarkedBlock::blockFor(owner)->isMarked(owner)) {
947 Jump ownerIsRememberedOrInEden = jumpIfIsRememberedOrInEden(owner);
948 callOperation(operationUnconditionalWriteBarrier, owner);
949 ownerIsRememberedOrInEden.link(this);
950 } else
951 callOperation(operationUnconditionalWriteBarrier, owner);
952 #else
953 UNUSED_PARAM(owner);
954 #endif // ENABLE(GGC)
955 }
956
957 void JIT::privateCompileGetByVal(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
958 {
959 Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
960
961 PatchableJump badType;
962 JumpList slowCases;
963
964 switch (arrayMode) {
965 case JITInt32:
966 slowCases = emitInt32GetByVal(currentInstruction, badType);
967 break;
968 case JITDouble:
969 slowCases = emitDoubleGetByVal(currentInstruction, badType);
970 break;
971 case JITContiguous:
972 slowCases = emitContiguousGetByVal(currentInstruction, badType);
973 break;
974 case JITArrayStorage:
975 slowCases = emitArrayStorageGetByVal(currentInstruction, badType);
976 break;
977 case JITDirectArguments:
978 slowCases = emitDirectArgumentsGetByVal(currentInstruction, badType);
979 break;
980 case JITScopedArguments:
981 slowCases = emitScopedArgumentsGetByVal(currentInstruction, badType);
982 break;
983 default:
984 TypedArrayType type = typedArrayTypeForJITArrayMode(arrayMode);
985 if (isInt(type))
986 slowCases = emitIntTypedArrayGetByVal(currentInstruction, badType, type);
987 else
988 slowCases = emitFloatTypedArrayGetByVal(currentInstruction, badType, type);
989 break;
990 }
991
992 Jump done = jump();
993
994 LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
995
996 patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
997 patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
998
999 patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1000
1001 byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1002 m_codeBlock, patchBuffer,
1003 ("Baseline get_by_val stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1004
1005 RepatchBuffer repatchBuffer(m_codeBlock);
1006 repatchBuffer.relink(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1007 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(operationGetByValGeneric));
1008 }
1009
1010 void JIT::privateCompilePutByVal(ByValInfo* byValInfo, ReturnAddressPtr returnAddress, JITArrayMode arrayMode)
1011 {
1012 Instruction* currentInstruction = m_codeBlock->instructions().begin() + byValInfo->bytecodeIndex;
1013
1014 PatchableJump badType;
1015 JumpList slowCases;
1016
1017 #if ENABLE(GGC)
1018 bool needsLinkForWriteBarrier = false;
1019 #endif
1020
1021 switch (arrayMode) {
1022 case JITInt32:
1023 slowCases = emitInt32PutByVal(currentInstruction, badType);
1024 break;
1025 case JITDouble:
1026 slowCases = emitDoublePutByVal(currentInstruction, badType);
1027 break;
1028 case JITContiguous:
1029 slowCases = emitContiguousPutByVal(currentInstruction, badType);
1030 #if ENABLE(GGC)
1031 needsLinkForWriteBarrier = true;
1032 #endif
1033 break;
1034 case JITArrayStorage:
1035 slowCases = emitArrayStoragePutByVal(currentInstruction, badType);
1036 #if ENABLE(GGC)
1037 needsLinkForWriteBarrier = true;
1038 #endif
1039 break;
1040 default:
1041 TypedArrayType type = typedArrayTypeForJITArrayMode(arrayMode);
1042 if (isInt(type))
1043 slowCases = emitIntTypedArrayPutByVal(currentInstruction, badType, type);
1044 else
1045 slowCases = emitFloatTypedArrayPutByVal(currentInstruction, badType, type);
1046 break;
1047 }
1048
1049 Jump done = jump();
1050
1051 LinkBuffer patchBuffer(*m_vm, *this, m_codeBlock);
1052 patchBuffer.link(badType, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1053 patchBuffer.link(slowCases, CodeLocationLabel(MacroAssemblerCodePtr::createFromExecutableAddress(returnAddress.value())).labelAtOffset(byValInfo->returnAddressToSlowPath));
1054 patchBuffer.link(done, byValInfo->badTypeJump.labelAtOffset(byValInfo->badTypeJumpToDone));
1055 #if ENABLE(GGC)
1056 if (needsLinkForWriteBarrier) {
1057 ASSERT(m_calls.last().to == operationUnconditionalWriteBarrier);
1058 patchBuffer.link(m_calls.last().from, operationUnconditionalWriteBarrier);
1059 }
1060 #endif
1061
1062 bool isDirect = m_interpreter->getOpcodeID(currentInstruction->u.opcode) == op_put_by_val_direct;
1063 if (!isDirect) {
1064 byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1065 m_codeBlock, patchBuffer,
1066 ("Baseline put_by_val stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1067
1068 } else {
1069 byValInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
1070 m_codeBlock, patchBuffer,
1071 ("Baseline put_by_val_direct stub for %s, return point %p", toCString(*m_codeBlock).data(), returnAddress.value()));
1072 }
1073 RepatchBuffer repatchBuffer(m_codeBlock);
1074 repatchBuffer.relink(byValInfo->badTypeJump, CodeLocationLabel(byValInfo->stubRoutine->code().code()));
1075 repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(isDirect ? operationDirectPutByValGeneric : operationPutByValGeneric));
1076 }
1077
1078 JIT::JumpList JIT::emitDirectArgumentsGetByVal(Instruction*, PatchableJump& badType)
1079 {
1080 JumpList slowCases;
1081
1082 #if USE(JSVALUE64)
1083 RegisterID base = regT0;
1084 RegisterID property = regT1;
1085 JSValueRegs result = JSValueRegs(regT0);
1086 RegisterID scratch = regT3;
1087 #else
1088 RegisterID base = regT0;
1089 RegisterID property = regT2;
1090 JSValueRegs result = JSValueRegs(regT1, regT0);
1091 RegisterID scratch = regT3;
1092 #endif
1093
1094 load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1095 badType = patchableBranch32(NotEqual, scratch, TrustedImm32(DirectArgumentsType));
1096
1097 slowCases.append(branch32(AboveOrEqual, property, Address(base, DirectArguments::offsetOfLength())));
1098 slowCases.append(branchTestPtr(NonZero, Address(base, DirectArguments::offsetOfOverrides())));
1099
1100 zeroExtend32ToPtr(property, scratch);
1101 loadValue(BaseIndex(base, scratch, TimesEight, DirectArguments::storageOffset()), result);
1102
1103 return slowCases;
1104 }
1105
1106 JIT::JumpList JIT::emitScopedArgumentsGetByVal(Instruction*, PatchableJump& badType)
1107 {
1108 JumpList slowCases;
1109
1110 #if USE(JSVALUE64)
1111 RegisterID base = regT0;
1112 RegisterID property = regT1;
1113 JSValueRegs result = JSValueRegs(regT0);
1114 RegisterID scratch = regT3;
1115 RegisterID scratch2 = regT4;
1116 #else
1117 RegisterID base = regT0;
1118 RegisterID property = regT2;
1119 JSValueRegs result = JSValueRegs(regT1, regT0);
1120 RegisterID scratch = regT3;
1121 RegisterID scratch2 = regT4;
1122 #endif
1123
1124 load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1125 badType = patchableBranch32(NotEqual, scratch, TrustedImm32(ScopedArgumentsType));
1126 slowCases.append(branch32(AboveOrEqual, property, Address(base, ScopedArguments::offsetOfTotalLength())));
1127
1128 loadPtr(Address(base, ScopedArguments::offsetOfTable()), scratch);
1129 load32(Address(scratch, ScopedArgumentsTable::offsetOfLength()), scratch2);
1130 Jump overflowCase = branch32(AboveOrEqual, property, scratch2);
1131 loadPtr(Address(base, ScopedArguments::offsetOfScope()), scratch2);
1132 loadPtr(Address(scratch, ScopedArgumentsTable::offsetOfArguments()), scratch);
1133 load32(BaseIndex(scratch, property, TimesFour), scratch);
1134 slowCases.append(branch32(Equal, scratch, TrustedImm32(ScopeOffset::invalidOffset)));
1135 loadValue(BaseIndex(scratch2, scratch, TimesEight, JSEnvironmentRecord::offsetOfVariables()), result);
1136 Jump done = jump();
1137 overflowCase.link(this);
1138 sub32(property, scratch2);
1139 neg32(scratch2);
1140 loadValue(BaseIndex(base, scratch2, TimesEight, ScopedArguments::overflowStorageOffset()), result);
1141 slowCases.append(branchIfEmpty(result));
1142 done.link(this);
1143
1144 return slowCases;
1145 }
1146
1147 JIT::JumpList JIT::emitIntTypedArrayGetByVal(Instruction*, PatchableJump& badType, TypedArrayType type)
1148 {
1149 ASSERT(isInt(type));
1150
1151 // The best way to test the array type is to use the classInfo. We need to do so without
1152 // clobbering the register that holds the indexing type, base, and property.
1153
1154 #if USE(JSVALUE64)
1155 RegisterID base = regT0;
1156 RegisterID property = regT1;
1157 RegisterID resultPayload = regT0;
1158 RegisterID scratch = regT3;
1159 #else
1160 RegisterID base = regT0;
1161 RegisterID property = regT2;
1162 RegisterID resultPayload = regT0;
1163 RegisterID resultTag = regT1;
1164 RegisterID scratch = regT3;
1165 #endif
1166
1167 JumpList slowCases;
1168
1169 load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1170 badType = patchableBranch32(NotEqual, scratch, TrustedImm32(typeForTypedArrayType(type)));
1171 slowCases.append(branch32(AboveOrEqual, property, Address(base, JSArrayBufferView::offsetOfLength())));
1172 loadPtr(Address(base, JSArrayBufferView::offsetOfVector()), base);
1173
1174 switch (elementSize(type)) {
1175 case 1:
1176 if (isSigned(type))
1177 load8SignedExtendTo32(BaseIndex(base, property, TimesOne), resultPayload);
1178 else
1179 load8(BaseIndex(base, property, TimesOne), resultPayload);
1180 break;
1181 case 2:
1182 if (isSigned(type))
1183 load16SignedExtendTo32(BaseIndex(base, property, TimesTwo), resultPayload);
1184 else
1185 load16(BaseIndex(base, property, TimesTwo), resultPayload);
1186 break;
1187 case 4:
1188 load32(BaseIndex(base, property, TimesFour), resultPayload);
1189 break;
1190 default:
1191 CRASH();
1192 }
1193
1194 Jump done;
1195 if (type == TypeUint32) {
1196 Jump canBeInt = branch32(GreaterThanOrEqual, resultPayload, TrustedImm32(0));
1197
1198 convertInt32ToDouble(resultPayload, fpRegT0);
1199 addDouble(AbsoluteAddress(&twoToThe32), fpRegT0);
1200 #if USE(JSVALUE64)
1201 moveDoubleTo64(fpRegT0, resultPayload);
1202 sub64(tagTypeNumberRegister, resultPayload);
1203 #else
1204 moveDoubleToInts(fpRegT0, resultPayload, resultTag);
1205 #endif
1206
1207 done = jump();
1208 canBeInt.link(this);
1209 }
1210
1211 #if USE(JSVALUE64)
1212 or64(tagTypeNumberRegister, resultPayload);
1213 #else
1214 move(TrustedImm32(JSValue::Int32Tag), resultTag);
1215 #endif
1216 if (done.isSet())
1217 done.link(this);
1218 return slowCases;
1219 }
1220
1221 JIT::JumpList JIT::emitFloatTypedArrayGetByVal(Instruction*, PatchableJump& badType, TypedArrayType type)
1222 {
1223 ASSERT(isFloat(type));
1224
1225 #if USE(JSVALUE64)
1226 RegisterID base = regT0;
1227 RegisterID property = regT1;
1228 RegisterID resultPayload = regT0;
1229 RegisterID scratch = regT3;
1230 #else
1231 RegisterID base = regT0;
1232 RegisterID property = regT2;
1233 RegisterID resultPayload = regT0;
1234 RegisterID resultTag = regT1;
1235 RegisterID scratch = regT3;
1236 #endif
1237
1238 JumpList slowCases;
1239
1240 load8(Address(base, JSCell::typeInfoTypeOffset()), scratch);
1241 badType = patchableBranch32(NotEqual, scratch, TrustedImm32(typeForTypedArrayType(type)));
1242 slowCases.append(branch32(AboveOrEqual, property, Address(base, JSArrayBufferView::offsetOfLength())));
1243 loadPtr(Address(base, JSArrayBufferView::offsetOfVector()), base);
1244
1245 switch (elementSize(type)) {
1246 case 4:
1247 loadFloat(BaseIndex(base, property, TimesFour), fpRegT0);
1248 convertFloatToDouble(fpRegT0, fpRegT0);
1249 break;
1250 case 8: {
1251 loadDouble(BaseIndex(base, property, TimesEight), fpRegT0);
1252 break;
1253 }
1254 default:
1255 CRASH();
1256 }
1257
1258 Jump notNaN = branchDouble(DoubleEqual, fpRegT0, fpRegT0);
1259 static const double NaN = PNaN;
1260 loadDouble(TrustedImmPtr(&NaN), fpRegT0);
1261 notNaN.link(this);
1262
1263 #if USE(JSVALUE64)
1264 moveDoubleTo64(fpRegT0, resultPayload);
1265 sub64(tagTypeNumberRegister, resultPayload);
1266 #else
1267 moveDoubleToInts(fpRegT0, resultPayload, resultTag);
1268 #endif
1269 return slowCases;
1270 }
1271
1272 JIT::JumpList JIT::emitIntTypedArrayPutByVal(Instruction* currentInstruction, PatchableJump& badType, TypedArrayType type)
1273 {
1274 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1275 ASSERT(isInt(type));
1276
1277 int value = currentInstruction[3].u.operand;
1278
1279 #if USE(JSVALUE64)
1280 RegisterID base = regT0;
1281 RegisterID property = regT1;
1282 RegisterID earlyScratch = regT3;
1283 RegisterID lateScratch = regT2;
1284 #else
1285 RegisterID base = regT0;
1286 RegisterID property = regT2;
1287 RegisterID earlyScratch = regT3;
1288 RegisterID lateScratch = regT1;
1289 #endif
1290
1291 JumpList slowCases;
1292
1293 load8(Address(base, JSCell::typeInfoTypeOffset()), earlyScratch);
1294 badType = patchableBranch32(NotEqual, earlyScratch, TrustedImm32(typeForTypedArrayType(type)));
1295 Jump inBounds = branch32(Below, property, Address(base, JSArrayBufferView::offsetOfLength()));
1296 emitArrayProfileOutOfBoundsSpecialCase(profile);
1297 Jump done = jump();
1298 inBounds.link(this);
1299
1300 #if USE(JSVALUE64)
1301 emitGetVirtualRegister(value, earlyScratch);
1302 slowCases.append(emitJumpIfNotImmediateInteger(earlyScratch));
1303 #else
1304 emitLoad(value, lateScratch, earlyScratch);
1305 slowCases.append(branch32(NotEqual, lateScratch, TrustedImm32(JSValue::Int32Tag)));
1306 #endif
1307
1308 // We would be loading this into base as in get_by_val, except that the slow
1309 // path expects the base to be unclobbered.
1310 loadPtr(Address(base, JSArrayBufferView::offsetOfVector()), lateScratch);
1311
1312 if (isClamped(type)) {
1313 ASSERT(elementSize(type) == 1);
1314 ASSERT(!isSigned(type));
1315 Jump inBounds = branch32(BelowOrEqual, earlyScratch, TrustedImm32(0xff));
1316 Jump tooBig = branch32(GreaterThan, earlyScratch, TrustedImm32(0xff));
1317 xor32(earlyScratch, earlyScratch);
1318 Jump clamped = jump();
1319 tooBig.link(this);
1320 move(TrustedImm32(0xff), earlyScratch);
1321 clamped.link(this);
1322 inBounds.link(this);
1323 }
1324
1325 switch (elementSize(type)) {
1326 case 1:
1327 store8(earlyScratch, BaseIndex(lateScratch, property, TimesOne));
1328 break;
1329 case 2:
1330 store16(earlyScratch, BaseIndex(lateScratch, property, TimesTwo));
1331 break;
1332 case 4:
1333 store32(earlyScratch, BaseIndex(lateScratch, property, TimesFour));
1334 break;
1335 default:
1336 CRASH();
1337 }
1338
1339 done.link(this);
1340
1341 return slowCases;
1342 }
1343
1344 JIT::JumpList JIT::emitFloatTypedArrayPutByVal(Instruction* currentInstruction, PatchableJump& badType, TypedArrayType type)
1345 {
1346 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
1347 ASSERT(isFloat(type));
1348
1349 int value = currentInstruction[3].u.operand;
1350
1351 #if USE(JSVALUE64)
1352 RegisterID base = regT0;
1353 RegisterID property = regT1;
1354 RegisterID earlyScratch = regT3;
1355 RegisterID lateScratch = regT2;
1356 #else
1357 RegisterID base = regT0;
1358 RegisterID property = regT2;
1359 RegisterID earlyScratch = regT3;
1360 RegisterID lateScratch = regT1;
1361 #endif
1362
1363 JumpList slowCases;
1364
1365 load8(Address(base, JSCell::typeInfoTypeOffset()), earlyScratch);
1366 badType = patchableBranch32(NotEqual, earlyScratch, TrustedImm32(typeForTypedArrayType(type)));
1367 Jump inBounds = branch32(Below, property, Address(base, JSArrayBufferView::offsetOfLength()));
1368 emitArrayProfileOutOfBoundsSpecialCase(profile);
1369 Jump done = jump();
1370 inBounds.link(this);
1371
1372 #if USE(JSVALUE64)
1373 emitGetVirtualRegister(value, earlyScratch);
1374 Jump doubleCase = emitJumpIfNotImmediateInteger(earlyScratch);
1375 convertInt32ToDouble(earlyScratch, fpRegT0);
1376 Jump ready = jump();
1377 doubleCase.link(this);
1378 slowCases.append(emitJumpIfNotImmediateNumber(earlyScratch));
1379 add64(tagTypeNumberRegister, earlyScratch);
1380 move64ToDouble(earlyScratch, fpRegT0);
1381 ready.link(this);
1382 #else
1383 emitLoad(value, lateScratch, earlyScratch);
1384 Jump doubleCase = branch32(NotEqual, lateScratch, TrustedImm32(JSValue::Int32Tag));
1385 convertInt32ToDouble(earlyScratch, fpRegT0);
1386 Jump ready = jump();
1387 doubleCase.link(this);
1388 slowCases.append(branch32(Above, lateScratch, TrustedImm32(JSValue::LowestTag)));
1389 moveIntsToDouble(earlyScratch, lateScratch, fpRegT0, fpRegT1);
1390 ready.link(this);
1391 #endif
1392
1393 // We would be loading this into base as in get_by_val, except that the slow
1394 // path expects the base to be unclobbered.
1395 loadPtr(Address(base, JSArrayBufferView::offsetOfVector()), lateScratch);
1396
1397 switch (elementSize(type)) {
1398 case 4:
1399 convertDoubleToFloat(fpRegT0, fpRegT0);
1400 storeFloat(fpRegT0, BaseIndex(lateScratch, property, TimesFour));
1401 break;
1402 case 8:
1403 storeDouble(fpRegT0, BaseIndex(lateScratch, property, TimesEight));
1404 break;
1405 default:
1406 CRASH();
1407 }
1408
1409 done.link(this);
1410
1411 return slowCases;
1412 }
1413
1414 } // namespace JSC
1415
1416 #endif // ENABLE(JIT)