]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JITPropertyAccess32_64.cpp
JavaScriptCore-7601.1.46.3.tar.gz
[apple/javascriptcore.git] / jit / JITPropertyAccess32_64.cpp
1 /*
2 * Copyright (C) 2008, 2009, 2014, 2015 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #include "config.h"
27
28 #if ENABLE(JIT)
29 #if USE(JSVALUE32_64)
30 #include "JIT.h"
31
32 #include "CodeBlock.h"
33 #include "DirectArguments.h"
34 #include "GCAwareJITStubRoutine.h"
35 #include "Interpreter.h"
36 #include "JITInlines.h"
37 #include "JSArray.h"
38 #include "JSEnvironmentRecord.h"
39 #include "JSFunction.h"
40 #include "LinkBuffer.h"
41 #include "RepatchBuffer.h"
42 #include "ResultType.h"
43 #include "SamplingTool.h"
44 #include <wtf/StringPrintStream.h>
45
46
47 namespace JSC {
48
49 void JIT::emit_op_put_by_index(Instruction* currentInstruction)
50 {
51 int base = currentInstruction[1].u.operand;
52 int property = currentInstruction[2].u.operand;
53 int value = currentInstruction[3].u.operand;
54
55 emitLoad(base, regT1, regT0);
56 emitLoad(value, regT3, regT2);
57 callOperation(operationPutByIndex, regT1, regT0, property, regT3, regT2);
58 }
59
60 void JIT::emit_op_put_getter_by_id(Instruction* currentInstruction)
61 {
62 int base = currentInstruction[1].u.operand;
63 int property = currentInstruction[2].u.operand;
64 int getter = currentInstruction[3].u.operand;
65
66 emitLoadPayload(base, regT1);
67 emitLoadPayload(getter, regT3);
68 callOperation(operationPutGetterById, regT1, &m_codeBlock->identifier(property), regT3);
69 }
70
71 void JIT::emit_op_put_setter_by_id(Instruction* currentInstruction)
72 {
73 int base = currentInstruction[1].u.operand;
74 int property = currentInstruction[2].u.operand;
75 int setter = currentInstruction[3].u.operand;
76
77 emitLoadPayload(base, regT1);
78 emitLoadPayload(setter, regT3);
79 callOperation(operationPutSetterById, regT1, &m_codeBlock->identifier(property), regT3);
80 }
81
82 void JIT::emit_op_put_getter_setter(Instruction* currentInstruction)
83 {
84 int base = currentInstruction[1].u.operand;
85 int property = currentInstruction[2].u.operand;
86 int getter = currentInstruction[3].u.operand;
87 int setter = currentInstruction[4].u.operand;
88
89 emitLoadPayload(base, regT1);
90 emitLoadPayload(getter, regT3);
91 emitLoadPayload(setter, regT4);
92 callOperation(operationPutGetterSetter, regT1, &m_codeBlock->identifier(property), regT3, regT4);
93 }
94
95 void JIT::emit_op_del_by_id(Instruction* currentInstruction)
96 {
97 int dst = currentInstruction[1].u.operand;
98 int base = currentInstruction[2].u.operand;
99 int property = currentInstruction[3].u.operand;
100 emitLoad(base, regT1, regT0);
101 callOperation(operationDeleteById, dst, regT1, regT0, &m_codeBlock->identifier(property));
102 }
103
104 JIT::CodeRef JIT::stringGetByValStubGenerator(VM* vm)
105 {
106 JSInterfaceJIT jit(vm);
107 JumpList failures;
108 failures.append(JSC::branchStructure(jit, NotEqual, Address(regT0, JSCell::structureIDOffset()), vm->stringStructure.get()));
109
110 // Load string length to regT1, and start the process of loading the data pointer into regT0
111 jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT1);
112 jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
113 failures.append(jit.branchTest32(Zero, regT0));
114
115 // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
116 failures.append(jit.branch32(AboveOrEqual, regT2, regT1));
117
118 // Load the character
119 JumpList is16Bit;
120 JumpList cont8Bit;
121 // Load the string flags
122 jit.loadPtr(Address(regT0, StringImpl::flagsOffset()), regT1);
123 jit.loadPtr(Address(regT0, StringImpl::dataOffset()), regT0);
124 is16Bit.append(jit.branchTest32(Zero, regT1, TrustedImm32(StringImpl::flagIs8Bit())));
125 jit.load8(BaseIndex(regT0, regT2, TimesOne, 0), regT0);
126 cont8Bit.append(jit.jump());
127 is16Bit.link(&jit);
128 jit.load16(BaseIndex(regT0, regT2, TimesTwo, 0), regT0);
129
130 cont8Bit.link(&jit);
131
132 failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
133 jit.move(TrustedImmPtr(vm->smallStrings.singleCharacterStrings()), regT1);
134 jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
135 jit.move(TrustedImm32(JSValue::CellTag), regT1); // We null check regT0 on return so this is safe
136 jit.ret();
137
138 failures.link(&jit);
139 jit.move(TrustedImm32(0), regT0);
140 jit.ret();
141
142 LinkBuffer patchBuffer(*vm, jit, GLOBAL_THUNK_ID);
143 return FINALIZE_CODE(patchBuffer, ("String get_by_val stub"));
144 }
145
146 void JIT::emit_op_get_by_val(Instruction* currentInstruction)
147 {
148 int dst = currentInstruction[1].u.operand;
149 int base = currentInstruction[2].u.operand;
150 int property = currentInstruction[3].u.operand;
151 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
152
153 emitLoad2(base, regT1, regT0, property, regT3, regT2);
154
155 addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
156 emitJumpSlowCaseIfNotJSCell(base, regT1);
157 emitArrayProfilingSiteWithCell(regT0, regT1, profile);
158 and32(TrustedImm32(IndexingShapeMask), regT1);
159
160 PatchableJump badType;
161 JumpList slowCases;
162
163 JITArrayMode mode = chooseArrayMode(profile);
164 switch (mode) {
165 case JITInt32:
166 slowCases = emitInt32GetByVal(currentInstruction, badType);
167 break;
168 case JITDouble:
169 slowCases = emitDoubleGetByVal(currentInstruction, badType);
170 break;
171 case JITContiguous:
172 slowCases = emitContiguousGetByVal(currentInstruction, badType);
173 break;
174 case JITArrayStorage:
175 slowCases = emitArrayStorageGetByVal(currentInstruction, badType);
176 break;
177 default:
178 CRASH();
179 }
180
181 addSlowCase(badType);
182 addSlowCase(slowCases);
183
184 Label done = label();
185
186 if (!ASSERT_DISABLED) {
187 Jump resultOK = branch32(NotEqual, regT1, TrustedImm32(JSValue::EmptyValueTag));
188 abortWithReason(JITGetByValResultIsNotEmpty);
189 resultOK.link(this);
190 }
191
192 emitValueProfilingSite();
193 emitStore(dst, regT1, regT0);
194
195 m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
196 }
197
198 JIT::JumpList JIT::emitContiguousLoad(Instruction*, PatchableJump& badType, IndexingType expectedShape)
199 {
200 JumpList slowCases;
201
202 badType = patchableBranch32(NotEqual, regT1, TrustedImm32(expectedShape));
203 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
204 slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfPublicLength())));
205 load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
206 load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
207 slowCases.append(branch32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag)));
208
209 return slowCases;
210 }
211
212 JIT::JumpList JIT::emitDoubleLoad(Instruction*, PatchableJump& badType)
213 {
214 JumpList slowCases;
215
216 badType = patchableBranch32(NotEqual, regT1, TrustedImm32(DoubleShape));
217 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
218 slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfPublicLength())));
219 loadDouble(BaseIndex(regT3, regT2, TimesEight), fpRegT0);
220 slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
221
222 return slowCases;
223 }
224
225 JIT::JumpList JIT::emitArrayStorageLoad(Instruction*, PatchableJump& badType)
226 {
227 JumpList slowCases;
228
229 add32(TrustedImm32(-ArrayStorageShape), regT1, regT3);
230 badType = patchableBranch32(Above, regT3, TrustedImm32(SlowPutArrayStorageShape - ArrayStorageShape));
231 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
232 slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, ArrayStorage::vectorLengthOffset())));
233 load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
234 load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
235 slowCases.append(branch32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag)));
236
237 return slowCases;
238 }
239
240 void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
241 {
242 int dst = currentInstruction[1].u.operand;
243 int base = currentInstruction[2].u.operand;
244 int property = currentInstruction[3].u.operand;
245 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
246
247 linkSlowCase(iter); // property int32 check
248 linkSlowCaseIfNotJSCell(iter, base); // base cell check
249
250 Jump nonCell = jump();
251 linkSlowCase(iter); // base array check
252 Jump notString = branchStructure(NotEqual, Address(regT0, JSCell::structureIDOffset()), m_vm->stringStructure.get());
253 emitNakedCall(m_vm->getCTIStub(stringGetByValStubGenerator).code());
254 Jump failed = branchTestPtr(Zero, regT0);
255 emitStore(dst, regT1, regT0);
256 emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
257 failed.link(this);
258 notString.link(this);
259 nonCell.link(this);
260
261 linkSlowCase(iter); // vector length check
262 linkSlowCase(iter); // empty value
263
264 Label slowPath = label();
265
266 emitLoad(base, regT1, regT0);
267 emitLoad(property, regT3, regT2);
268 Call call = callOperation(operationGetByValDefault, dst, regT1, regT0, regT3, regT2, profile);
269
270 m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
271 m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
272 m_byValInstructionIndex++;
273
274 emitValueProfilingSite();
275 }
276
277 void JIT::emit_op_put_by_val(Instruction* currentInstruction)
278 {
279 int base = currentInstruction[1].u.operand;
280 int property = currentInstruction[2].u.operand;
281 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
282
283 emitLoad2(base, regT1, regT0, property, regT3, regT2);
284
285 addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
286 emitJumpSlowCaseIfNotJSCell(base, regT1);
287 emitArrayProfilingSiteWithCell(regT0, regT1, profile);
288 and32(TrustedImm32(IndexingShapeMask), regT1);
289
290 PatchableJump badType;
291 JumpList slowCases;
292
293 JITArrayMode mode = chooseArrayMode(profile);
294 switch (mode) {
295 case JITInt32:
296 slowCases = emitInt32PutByVal(currentInstruction, badType);
297 break;
298 case JITDouble:
299 slowCases = emitDoublePutByVal(currentInstruction, badType);
300 break;
301 case JITContiguous:
302 slowCases = emitContiguousPutByVal(currentInstruction, badType);
303 break;
304 case JITArrayStorage:
305 slowCases = emitArrayStoragePutByVal(currentInstruction, badType);
306 break;
307 default:
308 CRASH();
309 break;
310 }
311
312 addSlowCase(badType);
313 addSlowCase(slowCases);
314
315 Label done = label();
316
317 m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
318 }
319
320 JIT::JumpList JIT::emitGenericContiguousPutByVal(Instruction* currentInstruction, PatchableJump& badType, IndexingType indexingShape)
321 {
322 int base = currentInstruction[1].u.operand;
323 int value = currentInstruction[3].u.operand;
324 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
325
326 JumpList slowCases;
327
328 badType = patchableBranch32(NotEqual, regT1, TrustedImm32(ContiguousShape));
329
330 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
331 Jump outOfBounds = branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfPublicLength()));
332
333 Label storeResult = label();
334 emitLoad(value, regT1, regT0);
335 switch (indexingShape) {
336 case Int32Shape:
337 slowCases.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
338 store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
339 store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
340 break;
341 case ContiguousShape:
342 store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
343 store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
344 emitLoad(base, regT2, regT3);
345 emitWriteBarrier(base, value, ShouldFilterValue);
346 break;
347 case DoubleShape: {
348 Jump notInt = branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag));
349 convertInt32ToDouble(regT0, fpRegT0);
350 Jump ready = jump();
351 notInt.link(this);
352 moveIntsToDouble(regT0, regT1, fpRegT0, fpRegT1);
353 slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
354 ready.link(this);
355 storeDouble(fpRegT0, BaseIndex(regT3, regT2, TimesEight));
356 break;
357 }
358 default:
359 CRASH();
360 break;
361 }
362
363 Jump done = jump();
364
365 outOfBounds.link(this);
366 slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfVectorLength())));
367
368 emitArrayProfileStoreToHoleSpecialCase(profile);
369
370 add32(TrustedImm32(1), regT2, regT1);
371 store32(regT1, Address(regT3, Butterfly::offsetOfPublicLength()));
372 jump().linkTo(storeResult, this);
373
374 done.link(this);
375
376 return slowCases;
377 }
378
379 JIT::JumpList JIT::emitArrayStoragePutByVal(Instruction* currentInstruction, PatchableJump& badType)
380 {
381 int base = currentInstruction[1].u.operand;
382 int value = currentInstruction[3].u.operand;
383 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
384
385 JumpList slowCases;
386
387 badType = patchableBranch32(NotEqual, regT1, TrustedImm32(ArrayStorageShape));
388
389 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
390 slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, ArrayStorage::vectorLengthOffset())));
391
392 Jump empty = branch32(Equal, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
393
394 Label storeResult(this);
395 emitLoad(value, regT1, regT0);
396 store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload))); // payload
397 store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag))); // tag
398 Jump end = jump();
399
400 empty.link(this);
401 emitArrayProfileStoreToHoleSpecialCase(profile);
402 add32(TrustedImm32(1), Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
403 branch32(Below, regT2, Address(regT3, ArrayStorage::lengthOffset())).linkTo(storeResult, this);
404
405 add32(TrustedImm32(1), regT2, regT0);
406 store32(regT0, Address(regT3, ArrayStorage::lengthOffset()));
407 jump().linkTo(storeResult, this);
408
409 end.link(this);
410
411 emitWriteBarrier(base, value, ShouldFilterValue);
412
413 return slowCases;
414 }
415
416 void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
417 {
418 int base = currentInstruction[1].u.operand;
419 int property = currentInstruction[2].u.operand;
420 int value = currentInstruction[3].u.operand;
421 ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
422
423 linkSlowCase(iter); // property int32 check
424 linkSlowCaseIfNotJSCell(iter, base); // base cell check
425 linkSlowCase(iter); // base not array check
426
427 JITArrayMode mode = chooseArrayMode(profile);
428 switch (mode) {
429 case JITInt32:
430 case JITDouble:
431 linkSlowCase(iter); // value type check
432 break;
433 default:
434 break;
435 }
436
437 Jump skipProfiling = jump();
438 linkSlowCase(iter); // out of bounds
439 emitArrayProfileOutOfBoundsSpecialCase(profile);
440 skipProfiling.link(this);
441
442 Label slowPath = label();
443
444 bool isDirect = m_interpreter->getOpcodeID(currentInstruction->u.opcode) == op_put_by_val_direct;
445
446 #if CPU(X86)
447 // FIXME: We only have 5 temp registers, but need 6 to make this call, therefore we materialize
448 // our own call. When we finish moving JSC to the C call stack, we'll get another register so
449 // we can use the normal case.
450 resetCallArguments();
451 addCallArgument(GPRInfo::callFrameRegister);
452 emitLoad(base, regT0, regT1);
453 addCallArgument(regT1);
454 addCallArgument(regT0);
455 emitLoad(property, regT0, regT1);
456 addCallArgument(regT1);
457 addCallArgument(regT0);
458 emitLoad(value, regT0, regT1);
459 addCallArgument(regT1);
460 addCallArgument(regT0);
461 addCallArgument(TrustedImmPtr(profile));
462 Call call = appendCallWithExceptionCheck(isDirect ? operationDirectPutByVal : operationPutByVal);
463 #else
464 // The register selection below is chosen to reduce register swapping on ARM.
465 // Swapping shouldn't happen on other platforms.
466 emitLoad(base, regT2, regT1);
467 emitLoad(property, regT3, regT0);
468 emitLoad(value, regT5, regT4);
469 Call call = callOperation(isDirect ? operationDirectPutByVal : operationPutByVal, regT2, regT1, regT3, regT0, regT5, regT4, profile);
470 #endif
471
472 m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
473 m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
474 m_byValInstructionIndex++;
475 }
476
477 void JIT::emit_op_get_by_id(Instruction* currentInstruction)
478 {
479 int dst = currentInstruction[1].u.operand;
480 int base = currentInstruction[2].u.operand;
481 const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
482
483 emitLoad(base, regT1, regT0);
484 emitJumpSlowCaseIfNotJSCell(base, regT1);
485
486 if (*ident == m_vm->propertyNames->length && shouldEmitProfiling())
487 emitArrayProfilingSiteForBytecodeIndexWithCell(regT0, regT2, m_bytecodeOffset);
488
489 JITGetByIdGenerator gen(
490 m_codeBlock, CodeOrigin(m_bytecodeOffset), RegisterSet::specialRegisters(),
491 JSValueRegs::payloadOnly(regT0), JSValueRegs(regT1, regT0), DontSpill);
492 gen.generateFastPath(*this);
493 addSlowCase(gen.slowPathJump());
494 m_getByIds.append(gen);
495
496 emitValueProfilingSite();
497 emitStore(dst, regT1, regT0);
498 }
499
500 void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
501 {
502 int resultVReg = currentInstruction[1].u.operand;
503 int baseVReg = currentInstruction[2].u.operand;
504 const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
505
506 linkSlowCaseIfNotJSCell(iter, baseVReg);
507 linkSlowCase(iter);
508
509 JITGetByIdGenerator& gen = m_getByIds[m_getByIdIndex++];
510
511 Label coldPathBegin = label();
512
513 Call call = callOperation(WithProfile, operationGetByIdOptimize, resultVReg, gen.stubInfo(), regT1, regT0, ident->impl());
514
515 gen.reportSlowPathCall(coldPathBegin, call);
516 }
517
518 void JIT::emit_op_put_by_id(Instruction* currentInstruction)
519 {
520 // In order to be able to patch both the Structure, and the object offset, we store one pointer,
521 // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
522 // such that the Structure & offset are always at the same distance from this.
523
524 int base = currentInstruction[1].u.operand;
525 int value = currentInstruction[3].u.operand;
526 int direct = currentInstruction[8].u.operand;
527
528 emitWriteBarrier(base, value, ShouldFilterBase);
529
530 emitLoad2(base, regT1, regT0, value, regT3, regT2);
531
532 emitJumpSlowCaseIfNotJSCell(base, regT1);
533
534 JITPutByIdGenerator gen(
535 m_codeBlock, CodeOrigin(m_bytecodeOffset), RegisterSet::specialRegisters(),
536 JSValueRegs::payloadOnly(regT0), JSValueRegs(regT3, regT2),
537 regT1, DontSpill, m_codeBlock->ecmaMode(), direct ? Direct : NotDirect);
538
539 gen.generateFastPath(*this);
540 addSlowCase(gen.slowPathJump());
541
542 m_putByIds.append(gen);
543 }
544
545 void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
546 {
547 int base = currentInstruction[1].u.operand;
548 const Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
549
550 linkSlowCaseIfNotJSCell(iter, base);
551 linkSlowCase(iter);
552
553 Label coldPathBegin(this);
554
555 // JITPutByIdGenerator only preserve the value and the base's payload, we have to reload the tag.
556 emitLoadTag(base, regT1);
557
558 JITPutByIdGenerator& gen = m_putByIds[m_putByIdIndex++];
559
560 Call call = callOperation(
561 gen.slowPathFunction(), gen.stubInfo(), regT3, regT2, regT1, regT0, ident->impl());
562
563 gen.reportSlowPathCall(coldPathBegin, call);
564 }
565
566 // Compile a store into an object's property storage. May overwrite base.
567 void JIT::compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, PropertyOffset cachedOffset)
568 {
569 if (isOutOfLineOffset(cachedOffset))
570 loadPtr(Address(base, JSObject::butterflyOffset()), base);
571 emitStore(indexRelativeToBase(cachedOffset), valueTag, valuePayload, base);
572 }
573
574 // Compile a load from an object's property storage. May overwrite base.
575 void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset)
576 {
577 if (isInlineOffset(cachedOffset)) {
578 emitLoad(indexRelativeToBase(cachedOffset), resultTag, resultPayload, base);
579 return;
580 }
581
582 RegisterID temp = resultPayload;
583 loadPtr(Address(base, JSObject::butterflyOffset()), temp);
584 emitLoad(indexRelativeToBase(cachedOffset), resultTag, resultPayload, temp);
585 }
586
587 void JIT::compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset)
588 {
589 if (isInlineOffset(cachedOffset)) {
590 move(TrustedImmPtr(base->locationForOffset(cachedOffset)), resultTag);
591 load32(Address(resultTag, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
592 load32(Address(resultTag, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
593 return;
594 }
595
596 loadPtr(base->butterflyAddress(), resultTag);
597 load32(Address(resultTag, offsetInButterfly(cachedOffset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
598 load32(Address(resultTag, offsetInButterfly(cachedOffset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
599 }
600
601 void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset, FinalObjectMode finalObjectMode)
602 {
603 ASSERT(sizeof(JSValue) == 8);
604
605 if (finalObjectMode == MayBeFinal) {
606 Jump isInline = branch32(LessThan, offset, TrustedImm32(firstOutOfLineOffset));
607 loadPtr(Address(base, JSObject::butterflyOffset()), base);
608 neg32(offset);
609 Jump done = jump();
610 isInline.link(this);
611 addPtr(TrustedImmPtr(JSObject::offsetOfInlineStorage() - (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), base);
612 done.link(this);
613 } else {
614 if (!ASSERT_DISABLED) {
615 Jump isOutOfLine = branch32(GreaterThanOrEqual, offset, TrustedImm32(firstOutOfLineOffset));
616 abortWithReason(JITOffsetIsNotOutOfLine);
617 isOutOfLine.link(this);
618 }
619 loadPtr(Address(base, JSObject::butterflyOffset()), base);
620 neg32(offset);
621 }
622 load32(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) + (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), resultPayload);
623 load32(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) + (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), resultTag);
624 }
625
626 void JIT::emitVarInjectionCheck(bool needsVarInjectionChecks)
627 {
628 if (!needsVarInjectionChecks)
629 return;
630 addSlowCase(branch8(Equal, AbsoluteAddress(m_codeBlock->globalObject()->varInjectionWatchpoint()->addressOfState()), TrustedImm32(IsInvalidated)));
631 }
632
633 void JIT::emitResolveClosure(int dst, int scope, bool needsVarInjectionChecks, unsigned depth)
634 {
635 emitVarInjectionCheck(needsVarInjectionChecks);
636 move(TrustedImm32(JSValue::CellTag), regT1);
637 emitLoadPayload(scope, regT0);
638 for (unsigned i = 0; i < depth; ++i)
639 loadPtr(Address(regT0, JSScope::offsetOfNext()), regT0);
640 emitStore(dst, regT1, regT0);
641 }
642
643 void JIT::emit_op_resolve_scope(Instruction* currentInstruction)
644 {
645 int dst = currentInstruction[1].u.operand;
646 int scope = currentInstruction[2].u.operand;
647 ResolveType resolveType = static_cast<ResolveType>(currentInstruction[4].u.operand);
648 unsigned depth = currentInstruction[5].u.operand;
649
650 switch (resolveType) {
651 case GlobalProperty:
652 case GlobalVar:
653 case GlobalPropertyWithVarInjectionChecks:
654 case GlobalVarWithVarInjectionChecks:
655 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
656 move(TrustedImm32(JSValue::CellTag), regT1);
657 move(TrustedImmPtr(m_codeBlock->globalObject()), regT0);
658 emitStore(dst, regT1, regT0);
659 break;
660 case ClosureVar:
661 case ClosureVarWithVarInjectionChecks:
662 emitResolveClosure(dst, scope, needsVarInjectionChecks(resolveType), depth);
663 break;
664 case Dynamic:
665 addSlowCase(jump());
666 break;
667 case LocalClosureVar:
668 RELEASE_ASSERT_NOT_REACHED();
669 }
670 }
671
672 void JIT::emitSlow_op_resolve_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
673 {
674 int dst = currentInstruction[1].u.operand;
675 ResolveType resolveType = static_cast<ResolveType>(currentInstruction[4].u.operand);
676
677 if (resolveType == GlobalProperty || resolveType == GlobalVar || resolveType == ClosureVar)
678 return;
679
680 linkSlowCase(iter);
681 int32_t scope = currentInstruction[2].u.operand;
682 int32_t identifierIndex = currentInstruction[3].u.operand;
683 callOperation(operationResolveScope, dst, scope, identifierIndex);
684 }
685
686 void JIT::emitLoadWithStructureCheck(int scope, Structure** structureSlot)
687 {
688 emitLoad(scope, regT1, regT0);
689 loadPtr(structureSlot, regT2);
690 addSlowCase(branchPtr(NotEqual, Address(regT0, JSCell::structureIDOffset()), regT2));
691 }
692
693 void JIT::emitGetGlobalProperty(uintptr_t* operandSlot)
694 {
695 move(regT0, regT2);
696 load32(operandSlot, regT3);
697 compileGetDirectOffset(regT2, regT1, regT0, regT3, KnownNotFinal);
698 }
699
700 void JIT::emitGetGlobalVar(uintptr_t operand)
701 {
702 load32(reinterpret_cast<char*>(operand) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag), regT1);
703 load32(reinterpret_cast<char*>(operand) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload), regT0);
704 }
705
706 void JIT::emitGetClosureVar(int scope, uintptr_t operand)
707 {
708 emitLoad(scope, regT1, regT0);
709 load32(Address(regT0, JSEnvironmentRecord::offsetOfVariables() + operand * sizeof(Register) + TagOffset), regT1);
710 load32(Address(regT0, JSEnvironmentRecord::offsetOfVariables() + operand * sizeof(Register) + PayloadOffset), regT0);
711 }
712
713 void JIT::emit_op_get_from_scope(Instruction* currentInstruction)
714 {
715 int dst = currentInstruction[1].u.operand;
716 int scope = currentInstruction[2].u.operand;
717 ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
718 Structure** structureSlot = currentInstruction[5].u.structure.slot();
719 uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(&currentInstruction[6].u.pointer);
720
721 switch (resolveType) {
722 case GlobalProperty:
723 case GlobalPropertyWithVarInjectionChecks:
724 emitLoadWithStructureCheck(scope, structureSlot); // Structure check covers var injection.
725 emitGetGlobalProperty(operandSlot);
726 break;
727 case GlobalVar:
728 case GlobalVarWithVarInjectionChecks:
729 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
730 emitGetGlobalVar(*operandSlot);
731 break;
732 case ClosureVar:
733 case ClosureVarWithVarInjectionChecks:
734 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
735 emitGetClosureVar(scope, *operandSlot);
736 break;
737 case Dynamic:
738 addSlowCase(jump());
739 break;
740 case LocalClosureVar:
741 RELEASE_ASSERT_NOT_REACHED();
742 }
743 emitValueProfilingSite();
744 emitStore(dst, regT1, regT0);
745 }
746
747 void JIT::emitSlow_op_get_from_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
748 {
749 int dst = currentInstruction[1].u.operand;
750 ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
751
752 if (resolveType == GlobalVar || resolveType == ClosureVar)
753 return;
754
755 linkSlowCase(iter);
756 callOperation(WithProfile, operationGetFromScope, dst, currentInstruction);
757 }
758
759 void JIT::emitPutGlobalProperty(uintptr_t* operandSlot, int value)
760 {
761 emitLoad(value, regT3, regT2);
762
763 loadPtr(Address(regT0, JSObject::butterflyOffset()), regT0);
764 loadPtr(operandSlot, regT1);
765 negPtr(regT1);
766 store32(regT3, BaseIndex(regT0, regT1, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
767 store32(regT2, BaseIndex(regT0, regT1, TimesEight, (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
768 }
769
770 void JIT::emitPutGlobalVar(uintptr_t operand, int value, WatchpointSet* set)
771 {
772 emitLoad(value, regT1, regT0);
773 emitNotifyWrite(set);
774 store32(regT1, reinterpret_cast<char*>(operand) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
775 store32(regT0, reinterpret_cast<char*>(operand) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
776 }
777
778 void JIT::emitPutClosureVar(int scope, uintptr_t operand, int value, WatchpointSet* set)
779 {
780 emitLoad(value, regT3, regT2);
781 emitLoad(scope, regT1, regT0);
782 emitNotifyWrite(set);
783 store32(regT3, Address(regT0, JSEnvironmentRecord::offsetOfVariables() + operand * sizeof(Register) + TagOffset));
784 store32(regT2, Address(regT0, JSEnvironmentRecord::offsetOfVariables() + operand * sizeof(Register) + PayloadOffset));
785 }
786
787 void JIT::emit_op_put_to_scope(Instruction* currentInstruction)
788 {
789 int scope = currentInstruction[1].u.operand;
790 int value = currentInstruction[3].u.operand;
791 ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
792 Structure** structureSlot = currentInstruction[5].u.structure.slot();
793 uintptr_t* operandSlot = reinterpret_cast<uintptr_t*>(&currentInstruction[6].u.pointer);
794
795 switch (resolveType) {
796 case GlobalProperty:
797 case GlobalPropertyWithVarInjectionChecks:
798 emitWriteBarrier(m_codeBlock->globalObject(), value, ShouldFilterValue);
799 emitLoadWithStructureCheck(scope, structureSlot); // Structure check covers var injection.
800 emitPutGlobalProperty(operandSlot, value);
801 break;
802 case GlobalVar:
803 case GlobalVarWithVarInjectionChecks:
804 emitWriteBarrier(m_codeBlock->globalObject(), value, ShouldFilterValue);
805 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
806 emitPutGlobalVar(*operandSlot, value, currentInstruction[5].u.watchpointSet);
807 break;
808 case LocalClosureVar:
809 case ClosureVar:
810 case ClosureVarWithVarInjectionChecks:
811 emitWriteBarrier(scope, value, ShouldFilterValue);
812 emitVarInjectionCheck(needsVarInjectionChecks(resolveType));
813 emitPutClosureVar(scope, *operandSlot, value, currentInstruction[5].u.watchpointSet);
814 break;
815 case Dynamic:
816 addSlowCase(jump());
817 break;
818 }
819 }
820
821 void JIT::emitSlow_op_put_to_scope(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
822 {
823 ResolveType resolveType = ResolveModeAndType(currentInstruction[4].u.operand).type();
824 unsigned linkCount = 0;
825 if (resolveType != GlobalVar && resolveType != ClosureVar && resolveType != LocalClosureVar)
826 linkCount++;
827 if ((resolveType == GlobalVar || resolveType == GlobalVarWithVarInjectionChecks || resolveType == LocalClosureVar)
828 && currentInstruction[5].u.watchpointSet->state() != IsInvalidated)
829 linkCount++;
830 if (!linkCount)
831 return;
832 while (linkCount--)
833 linkSlowCase(iter);
834 callOperation(operationPutToScope, currentInstruction);
835 }
836
837 void JIT::emit_op_get_from_arguments(Instruction* currentInstruction)
838 {
839 int dst = currentInstruction[1].u.operand;
840 int arguments = currentInstruction[2].u.operand;
841 int index = currentInstruction[3].u.operand;
842
843 emitLoadPayload(arguments, regT0);
844 load32(Address(regT0, DirectArguments::storageOffset() + index * sizeof(WriteBarrier<Unknown>) + TagOffset), regT1);
845 load32(Address(regT0, DirectArguments::storageOffset() + index * sizeof(WriteBarrier<Unknown>) + PayloadOffset), regT0);
846 emitValueProfilingSite();
847 emitStore(dst, regT1, regT0);
848 }
849
850 void JIT::emit_op_put_to_arguments(Instruction* currentInstruction)
851 {
852 int arguments = currentInstruction[1].u.operand;
853 int index = currentInstruction[2].u.operand;
854 int value = currentInstruction[3].u.operand;
855
856 emitWriteBarrier(arguments, value, ShouldFilterValue);
857
858 emitLoadPayload(arguments, regT0);
859 emitLoad(value, regT1, regT2);
860 store32(regT1, Address(regT0, DirectArguments::storageOffset() + index * sizeof(WriteBarrier<Unknown>) + TagOffset));
861 store32(regT2, Address(regT0, DirectArguments::storageOffset() + index * sizeof(WriteBarrier<Unknown>) + PayloadOffset));
862 }
863
864 void JIT::emit_op_init_global_const(Instruction* currentInstruction)
865 {
866 WriteBarrier<Unknown>* variablePointer = currentInstruction[1].u.variablePointer;
867 int value = currentInstruction[2].u.operand;
868
869 JSGlobalObject* globalObject = m_codeBlock->globalObject();
870
871 emitWriteBarrier(globalObject, value, ShouldFilterValue);
872
873 emitLoad(value, regT1, regT0);
874
875 store32(regT1, variablePointer->tagPointer());
876 store32(regT0, variablePointer->payloadPointer());
877 }
878
879 } // namespace JSC
880
881 #endif // USE(JSVALUE32_64)
882 #endif // ENABLE(JIT)