]> git.saurik.com Git - apple/javascriptcore.git/blob - jit/JITInlines.h
JavaScriptCore-7601.1.46.3.tar.gz
[apple/javascriptcore.git] / jit / JITInlines.h
1 /*
2 * Copyright (C) 2008, 2012, 2013, 2015 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26 #ifndef JITInlines_h
27 #define JITInlines_h
28
29 #if ENABLE(JIT)
30
31 #include "JSCInlines.h"
32
33 namespace JSC {
34
35 #if USE(JSVALUE64)
36 inline MacroAssembler::JumpList JIT::emitDoubleGetByVal(Instruction* instruction, PatchableJump& badType)
37 {
38 JumpList slowCases = emitDoubleLoad(instruction, badType);
39 moveDoubleTo64(fpRegT0, regT0);
40 sub64(tagTypeNumberRegister, regT0);
41 return slowCases;
42 }
43 #else
44 inline MacroAssembler::JumpList JIT::emitDoubleGetByVal(Instruction* instruction, PatchableJump& badType)
45 {
46 JumpList slowCases = emitDoubleLoad(instruction, badType);
47 moveDoubleToInts(fpRegT0, regT0, regT1);
48 return slowCases;
49 }
50 #endif // USE(JSVALUE64)
51
52 ALWAYS_INLINE MacroAssembler::JumpList JIT::emitLoadForArrayMode(Instruction* currentInstruction, JITArrayMode arrayMode, PatchableJump& badType)
53 {
54 switch (arrayMode) {
55 case JITInt32:
56 return emitInt32Load(currentInstruction, badType);
57 case JITDouble:
58 return emitDoubleLoad(currentInstruction, badType);
59 case JITContiguous:
60 return emitContiguousLoad(currentInstruction, badType);
61 case JITArrayStorage:
62 return emitArrayStorageLoad(currentInstruction, badType);
63 default:
64 break;
65 }
66 RELEASE_ASSERT_NOT_REACHED();
67 return MacroAssembler::JumpList();
68 }
69
70 inline MacroAssembler::JumpList JIT::emitContiguousGetByVal(Instruction* instruction, PatchableJump& badType, IndexingType expectedShape)
71 {
72 return emitContiguousLoad(instruction, badType, expectedShape);
73 }
74
75 inline MacroAssembler::JumpList JIT::emitArrayStorageGetByVal(Instruction* instruction, PatchableJump& badType)
76 {
77 return emitArrayStorageLoad(instruction, badType);
78 }
79
80 ALWAYS_INLINE bool JIT::isOperandConstantImmediateDouble(int src)
81 {
82 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isDouble();
83 }
84
85 ALWAYS_INLINE JSValue JIT::getConstantOperand(int src)
86 {
87 ASSERT(m_codeBlock->isConstantRegisterIndex(src));
88 return m_codeBlock->getConstant(src);
89 }
90
91 ALWAYS_INLINE void JIT::emitPutIntToCallFrameHeader(RegisterID from, JSStack::CallFrameHeaderEntry entry)
92 {
93 #if USE(JSVALUE32_64)
94 store32(TrustedImm32(Int32Tag), intTagFor(entry, callFrameRegister));
95 store32(from, intPayloadFor(entry, callFrameRegister));
96 #else
97 store64(from, addressFor(entry, callFrameRegister));
98 #endif
99 }
100
101 ALWAYS_INLINE void JIT::emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures)
102 {
103 failures.append(branchStructure(NotEqual, Address(src, JSCell::structureIDOffset()), m_vm->stringStructure.get()));
104 failures.append(branch32(NotEqual, MacroAssembler::Address(src, ThunkHelpers::jsStringLengthOffset()), TrustedImm32(1)));
105 loadPtr(MacroAssembler::Address(src, ThunkHelpers::jsStringValueOffset()), dst);
106 failures.append(branchTest32(Zero, dst));
107 loadPtr(MacroAssembler::Address(dst, StringImpl::flagsOffset()), regT1);
108 loadPtr(MacroAssembler::Address(dst, StringImpl::dataOffset()), dst);
109
110 JumpList is16Bit;
111 JumpList cont8Bit;
112 is16Bit.append(branchTest32(Zero, regT1, TrustedImm32(StringImpl::flagIs8Bit())));
113 load8(MacroAssembler::Address(dst, 0), dst);
114 cont8Bit.append(jump());
115 is16Bit.link(this);
116 load16(MacroAssembler::Address(dst, 0), dst);
117 cont8Bit.link(this);
118 }
119
120 ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr function)
121 {
122 ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
123 Call nakedCall = nearCall();
124 m_calls.append(CallRecord(nakedCall, m_bytecodeOffset, function.executableAddress()));
125 return nakedCall;
126 }
127
128 ALWAYS_INLINE void JIT::updateTopCallFrame()
129 {
130 ASSERT(static_cast<int>(m_bytecodeOffset) >= 0);
131 #if USE(JSVALUE32_64)
132 Instruction* instruction = m_codeBlock->instructions().begin() + m_bytecodeOffset + 1;
133 uint32_t locationBits = CallFrame::Location::encodeAsBytecodeInstruction(instruction);
134 #else
135 uint32_t locationBits = CallFrame::Location::encodeAsBytecodeOffset(m_bytecodeOffset + 1);
136 #endif
137 store32(TrustedImm32(locationBits), intTagFor(JSStack::ArgumentCount));
138 storePtr(callFrameRegister, &m_vm->topCallFrame);
139 }
140
141 ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheck(const FunctionPtr& function)
142 {
143 updateTopCallFrame();
144 MacroAssembler::Call call = appendCall(function);
145 exceptionCheck();
146 return call;
147 }
148
149 #if OS(WINDOWS) && CPU(X86_64)
150 ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheckAndSlowPathReturnType(const FunctionPtr& function)
151 {
152 updateTopCallFrame();
153 MacroAssembler::Call call = appendCallWithSlowPathReturnType(function);
154 exceptionCheck();
155 return call;
156 }
157 #endif
158
159 ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithCallFrameRollbackOnException(const FunctionPtr& function)
160 {
161 updateTopCallFrame(); // The callee is responsible for setting topCallFrame to their caller
162 MacroAssembler::Call call = appendCall(function);
163 exceptionCheckWithCallFrameRollback();
164 return call;
165 }
166
167 ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheckSetJSValueResult(const FunctionPtr& function, int dst)
168 {
169 MacroAssembler::Call call = appendCallWithExceptionCheck(function);
170 #if USE(JSVALUE64)
171 emitPutVirtualRegister(dst, returnValueGPR);
172 #else
173 emitStore(dst, returnValueGPR2, returnValueGPR);
174 #endif
175 return call;
176 }
177
178 ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheckSetJSValueResultWithProfile(const FunctionPtr& function, int dst)
179 {
180 MacroAssembler::Call call = appendCallWithExceptionCheck(function);
181 emitValueProfilingSite();
182 #if USE(JSVALUE64)
183 emitPutVirtualRegister(dst, returnValueGPR);
184 #else
185 emitStore(dst, returnValueGPR2, returnValueGPR);
186 #endif
187 return call;
188 }
189
190 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_E operation)
191 {
192 setupArgumentsExecState();
193 return appendCallWithExceptionCheck(operation);
194 }
195
196 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_EJsc operation, GPRReg arg1)
197 {
198 setupArgumentsWithExecState(arg1);
199 return appendCallWithExceptionCheck(operation);
200 }
201
202 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_EJscZ operation, GPRReg arg1, int32_t arg2)
203 {
204 setupArgumentsWithExecState(arg1, TrustedImm32(arg2));
205 return appendCallWithExceptionCheck(operation);
206 }
207
208 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_EL operation, GPRReg arg1)
209 {
210 setupArgumentsWithExecState(arg1);
211 return appendCallWithExceptionCheck(operation);
212 }
213
214 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_EL operation, TrustedImmPtr arg1)
215 {
216 setupArgumentsWithExecState(arg1);
217 return appendCallWithExceptionCheck(operation);
218 }
219
220 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_EO operation, GPRReg arg)
221 {
222 setupArgumentsWithExecState(arg);
223 return appendCallWithExceptionCheck(operation);
224 }
225
226 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_ESt operation, Structure* structure)
227 {
228 setupArgumentsWithExecState(TrustedImmPtr(structure));
229 return appendCallWithExceptionCheck(operation);
230 }
231
232 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_EZ operation, int32_t arg)
233 {
234 setupArgumentsWithExecState(TrustedImm32(arg));
235 return appendCallWithExceptionCheck(operation);
236 }
237
238 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_E operation, int dst)
239 {
240 setupArgumentsExecState();
241 return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
242 }
243
244 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EAapJcpZ operation, int dst, ArrayAllocationProfile* arg1, GPRReg arg2, int32_t arg3)
245 {
246 setupArgumentsWithExecState(TrustedImmPtr(arg1), arg2, TrustedImm32(arg3));
247 return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
248 }
249
250 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EAapJcpZ operation, int dst, ArrayAllocationProfile* arg1, const JSValue* arg2, int32_t arg3)
251 {
252 setupArgumentsWithExecState(TrustedImmPtr(arg1), TrustedImmPtr(arg2), TrustedImm32(arg3));
253 return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
254 }
255
256 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EC operation, int dst, JSCell* cell)
257 {
258 setupArgumentsWithExecState(TrustedImmPtr(cell));
259 return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
260 }
261
262 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EC operation, JSCell* cell)
263 {
264 setupArgumentsWithExecState(TrustedImmPtr(cell));
265 return appendCallWithExceptionCheck(operation);
266 }
267
268 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJscC operation, int dst, GPRReg arg1, JSCell* cell)
269 {
270 setupArgumentsWithExecState(arg1, TrustedImmPtr(cell));
271 return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
272 }
273
274 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EP operation, int dst, void* pointer)
275 {
276 setupArgumentsWithExecState(TrustedImmPtr(pointer));
277 return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
278 }
279
280 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(WithProfileTag, J_JITOperation_EPc operation, int dst, Instruction* bytecodePC)
281 {
282 setupArgumentsWithExecState(TrustedImmPtr(bytecodePC));
283 return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, dst);
284 }
285
286 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EZ operation, int dst, int32_t arg)
287 {
288 setupArgumentsWithExecState(TrustedImm32(arg));
289 return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
290 }
291
292 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EZZ operation, int dst, int32_t arg1, int32_t arg2)
293 {
294 setupArgumentsWithExecState(TrustedImm32(arg1), TrustedImm32(arg2));
295 return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
296 }
297
298 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_ECC operation, RegisterID regOp1, RegisterID regOp2)
299 {
300 setupArgumentsWithExecState(regOp1, regOp2);
301 return appendCallWithExceptionCheck(operation);
302 }
303
304 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_EOJss operation, RegisterID regOp1, RegisterID regOp2)
305 {
306 setupArgumentsWithExecState(regOp1, regOp2);
307 return appendCallWithExceptionCheck(operation);
308 }
309
310 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(Sprt_JITOperation_EZ operation, int32_t op)
311 {
312 #if OS(WINDOWS) && CPU(X86_64)
313 setupArgumentsWithExecStateForCallWithSlowPathReturnType(TrustedImm32(op));
314 return appendCallWithExceptionCheckAndSlowPathReturnType(operation);
315 #else
316 setupArgumentsWithExecState(TrustedImm32(op));
317 return appendCallWithExceptionCheck(operation);
318 #endif
319 }
320
321 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_E operation)
322 {
323 setupArgumentsExecState();
324 return appendCallWithExceptionCheck(operation);
325 }
326
327 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EC operation, RegisterID regOp)
328 {
329 setupArgumentsWithExecState(regOp);
330 return appendCallWithExceptionCheck(operation);
331 }
332
333 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ECC operation, RegisterID regOp1, RegisterID regOp2)
334 {
335 setupArgumentsWithExecState(regOp1, regOp2);
336 return appendCallWithExceptionCheck(operation);
337 }
338
339 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EE operation, RegisterID regOp)
340 {
341 setupArgumentsWithExecState(regOp);
342 updateTopCallFrame();
343 return appendCallWithExceptionCheck(operation);
344 }
345
346 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EPc operation, Instruction* bytecodePC)
347 {
348 setupArgumentsWithExecState(TrustedImmPtr(bytecodePC));
349 return appendCallWithExceptionCheck(operation);
350 }
351
352 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EZ operation, int32_t op)
353 {
354 setupArgumentsWithExecState(TrustedImm32(op));
355 return appendCallWithExceptionCheck(operation);
356 }
357
358 ALWAYS_INLINE MacroAssembler::Call JIT::callOperationWithCallFrameRollbackOnException(J_JITOperation_E operation)
359 {
360 setupArgumentsExecState();
361 return appendCallWithCallFrameRollbackOnException(operation);
362 }
363
364 ALWAYS_INLINE MacroAssembler::Call JIT::callOperationWithCallFrameRollbackOnException(V_JITOperation_ECb operation, CodeBlock* pointer)
365 {
366 setupArgumentsWithExecState(TrustedImmPtr(pointer));
367 return appendCallWithCallFrameRollbackOnException(operation);
368 }
369
370 ALWAYS_INLINE MacroAssembler::Call JIT::callOperationWithCallFrameRollbackOnException(Z_JITOperation_E operation)
371 {
372 setupArgumentsExecState();
373 return appendCallWithCallFrameRollbackOnException(operation);
374 }
375
376
377 #if USE(JSVALUE64)
378 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(Z_JITOperation_EJZZ operation, GPRReg arg1, int32_t arg2, int32_t arg3)
379 {
380 setupArgumentsWithExecState(arg1, TrustedImm32(arg2), TrustedImm32(arg3));
381 return appendCallWithExceptionCheck(operation);
382 }
383
384 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(F_JITOperation_EFJZZ operation, GPRReg arg1, GPRReg arg2, int32_t arg3, GPRReg arg4)
385 {
386 setupArgumentsWithExecState(arg1, arg2, TrustedImm32(arg3), arg4);
387 return appendCallWithExceptionCheck(operation);
388 }
389
390 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ESsiJJI operation, StructureStubInfo* stubInfo, RegisterID regOp1, RegisterID regOp2, UniquedStringImpl* uid)
391 {
392 setupArgumentsWithExecState(TrustedImmPtr(stubInfo), regOp1, regOp2, TrustedImmPtr(uid));
393 return appendCallWithExceptionCheck(operation);
394 }
395
396 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJJJ operation, RegisterID regOp1, RegisterID regOp2, RegisterID regOp3)
397 {
398 setupArgumentsWithExecState(regOp1, regOp2, regOp3);
399 return appendCallWithExceptionCheck(operation);
400 }
401
402 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJJJAp operation, RegisterID regOp1, RegisterID regOp2, RegisterID regOp3, ArrayProfile* arrayProfile)
403 {
404 setupArgumentsWithExecState(regOp1, regOp2, regOp3, TrustedImmPtr(arrayProfile));
405 return appendCallWithExceptionCheck(operation);
406 }
407
408 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EZJ operation, int dst, GPRReg arg)
409 {
410 setupArgumentsWithExecState(TrustedImm32(dst), arg);
411 return appendCallWithExceptionCheck(operation);
412 }
413
414 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(JIT::WithProfileTag, J_JITOperation_ESsiJI operation, int dst, StructureStubInfo* stubInfo, GPRReg arg1, UniquedStringImpl* uid)
415 {
416 setupArgumentsWithExecState(TrustedImmPtr(stubInfo), arg1, TrustedImmPtr(uid));
417 return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, dst);
418 }
419
420 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(JIT::WithProfileTag, J_JITOperation_EJJ operation, int dst, GPRReg arg1, GPRReg arg2)
421 {
422 setupArgumentsWithExecState(arg1, arg2);
423 return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, dst);
424 }
425
426 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EAapJ operation, int dst, ArrayAllocationProfile* arg1, GPRReg arg2)
427 {
428 setupArgumentsWithExecState(TrustedImmPtr(arg1), arg2);
429 return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
430 }
431
432 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJ operation, int dst, GPRReg arg1)
433 {
434 setupArgumentsWithExecState(arg1);
435 return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
436 }
437
438 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJIdc operation, int dst, GPRReg arg1, const Identifier* arg2)
439 {
440 setupArgumentsWithExecState(arg1, TrustedImmPtr(arg2));
441 return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
442 }
443
444 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJJ operation, int dst, GPRReg arg1, GPRReg arg2)
445 {
446 setupArgumentsWithExecState(arg1, arg2);
447 return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
448 }
449
450 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJJAp operation, int dst, GPRReg arg1, GPRReg arg2, ArrayProfile* arrayProfile)
451 {
452 setupArgumentsWithExecState(arg1, arg2, TrustedImmPtr(arrayProfile));
453 return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
454 }
455
456 ALWAYS_INLINE MacroAssembler::Call JIT::callOperationNoExceptionCheck(V_JITOperation_EJ operation, GPRReg arg1)
457 {
458 setupArgumentsWithExecState(arg1);
459 updateTopCallFrame();
460 return appendCall(operation);
461 }
462
463 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(P_JITOperation_EJS operation, GPRReg arg1, size_t arg2)
464 {
465 setupArgumentsWithExecState(arg1, TrustedImmPtr(arg2));
466 return appendCallWithExceptionCheck(operation);
467 }
468
469 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_EJ operation, RegisterID regOp)
470 {
471 setupArgumentsWithExecState(regOp);
472 return appendCallWithExceptionCheck(operation);
473 }
474
475 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_EJJ operation, RegisterID regOp1, RegisterID regOp2)
476 {
477 setupArgumentsWithExecState(regOp1, regOp2);
478 return appendCallWithExceptionCheck(operation);
479 }
480
481 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EZSymtabJ operation, int op1, SymbolTable* symbolTable, RegisterID regOp3)
482 {
483 setupArgumentsWithExecState(TrustedImm32(op1), TrustedImmPtr(symbolTable), regOp3);
484 return appendCallWithExceptionCheck(operation);
485 }
486
487 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJ operation, RegisterID regOp)
488 {
489 setupArgumentsWithExecState(regOp);
490 return appendCallWithExceptionCheck(operation);
491 }
492
493 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJIdJ operation, RegisterID regOp1, const Identifier* identOp2, RegisterID regOp3)
494 {
495 setupArgumentsWithExecState(regOp1, TrustedImmPtr(identOp2), regOp3);
496 return appendCallWithExceptionCheck(operation);
497 }
498
499 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJIdJJ operation, RegisterID regOp1, const Identifier* identOp2, RegisterID regOp3, RegisterID regOp4)
500 {
501 setupArgumentsWithExecState(regOp1, TrustedImmPtr(identOp2), regOp3, regOp4);
502 return appendCallWithExceptionCheck(operation);
503 }
504
505 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJZ operation, RegisterID regOp1, int32_t op2)
506 {
507 setupArgumentsWithExecState(regOp1, TrustedImm32(op2));
508 return appendCallWithExceptionCheck(operation);
509 }
510
511 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJZJ operation, RegisterID regOp1, int32_t op2, RegisterID regOp3)
512 {
513 setupArgumentsWithExecState(regOp1, TrustedImm32(op2), regOp3);
514 return appendCallWithExceptionCheck(operation);
515 }
516
517 #else // USE(JSVALUE32_64)
518
519 // EncodedJSValue in JSVALUE32_64 is a 64-bit integer. When being compiled in ARM EABI, it must be aligned even-numbered register (r0, r2 or [sp]).
520 // To avoid assemblies from using wrong registers, let's occupy r1 or r3 with a dummy argument when necessary.
521 #if (COMPILER_SUPPORTS(EABI) && CPU(ARM)) || CPU(MIPS)
522 #define EABI_32BIT_DUMMY_ARG TrustedImm32(0),
523 #else
524 #define EABI_32BIT_DUMMY_ARG
525 #endif
526
527 // JSVALUE32_64 is a 64-bit integer that cannot be put half in an argument register and half on stack when using SH4 architecture.
528 // To avoid this, let's occupy the 4th argument register (r7) with a dummy argument when necessary. This must only be done when there
529 // is no other 32-bit value argument behind this 64-bit JSValue.
530 #if CPU(SH4)
531 #define SH4_32BIT_DUMMY_ARG TrustedImm32(0),
532 #else
533 #define SH4_32BIT_DUMMY_ARG
534 #endif
535
536 ALWAYS_INLINE MacroAssembler::Call JIT::callOperationNoExceptionCheck(V_JITOperation_EJ operation, GPRReg arg1Tag, GPRReg arg1Payload)
537 {
538 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag);
539 updateTopCallFrame();
540 return appendCall(operation);
541 }
542
543 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(Z_JITOperation_EJZZ operation, GPRReg arg1Tag, GPRReg arg1Payload, int32_t arg2, int32_t arg3)
544 {
545 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, TrustedImm32(arg2), TrustedImm32(arg3));
546 return appendCallWithExceptionCheck(operation);
547 }
548
549 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(F_JITOperation_EFJZZ operation, GPRReg arg1, GPRReg arg2Tag, GPRReg arg2Payload, int32_t arg3, GPRReg arg4)
550 {
551 setupArgumentsWithExecState(arg1, arg2Payload, arg2Tag, TrustedImm32(arg3), arg4);
552 return appendCallWithExceptionCheck(operation);
553 }
554
555 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EAapJ operation, int dst, ArrayAllocationProfile* arg1, GPRReg arg2Tag, GPRReg arg2Payload)
556 {
557 setupArgumentsWithExecState(TrustedImmPtr(arg1), arg2Payload, arg2Tag);
558 return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
559 }
560
561 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJ operation, int dst, GPRReg arg1Tag, GPRReg arg1Payload)
562 {
563 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag);
564 return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
565 }
566
567 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(JIT::WithProfileTag, J_JITOperation_ESsiJI operation, int dst, StructureStubInfo* stubInfo, GPRReg arg1Tag, GPRReg arg1Payload, UniquedStringImpl* uid)
568 {
569 setupArgumentsWithExecState(TrustedImmPtr(stubInfo), arg1Payload, arg1Tag, TrustedImmPtr(uid));
570 return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, dst);
571 }
572
573 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJIdc operation, int dst, GPRReg arg1Tag, GPRReg arg1Payload, const Identifier* arg2)
574 {
575 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, TrustedImmPtr(arg2));
576 return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
577 }
578
579 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJJ operation, int dst, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2Tag, GPRReg arg2Payload)
580 {
581 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, SH4_32BIT_DUMMY_ARG arg2Payload, arg2Tag);
582 return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
583 }
584
585 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJJAp operation, int dst, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2Tag, GPRReg arg2Payload, ArrayProfile* arrayProfile)
586 {
587 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, SH4_32BIT_DUMMY_ARG arg2Payload, arg2Tag, TrustedImmPtr(arrayProfile));
588 return appendCallWithExceptionCheckSetJSValueResult(operation, dst);
589 }
590
591 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(JIT::WithProfileTag, J_JITOperation_EJJ operation, int dst, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2Tag, GPRReg arg2Payload)
592 {
593 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, SH4_32BIT_DUMMY_ARG arg2Payload, arg2Tag);
594 return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, dst);
595 }
596
597 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(P_JITOperation_EJS operation, GPRReg arg1Tag, GPRReg arg1Payload, size_t arg2)
598 {
599 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, TrustedImmPtr(arg2));
600 return appendCallWithExceptionCheck(operation);
601 }
602
603 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_EJ operation, RegisterID argTag, RegisterID argPayload)
604 {
605 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG argPayload, argTag);
606 return appendCallWithExceptionCheck(operation);
607 }
608
609 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_EJJ operation, RegisterID arg1Tag, RegisterID arg1Payload, RegisterID arg2Tag, RegisterID arg2Payload)
610 {
611 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, SH4_32BIT_DUMMY_ARG arg2Payload, arg2Tag);
612 return appendCallWithExceptionCheck(operation);
613 }
614
615 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ECIC operation, RegisterID regOp1, const Identifier* identOp2, RegisterID regOp3)
616 {
617 setupArgumentsWithExecState(regOp1, TrustedImmPtr(identOp2), regOp3);
618 return appendCallWithExceptionCheck(operation);
619 }
620
621 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ECICC operation, RegisterID regOp1, const Identifier* identOp2, RegisterID regOp3, RegisterID regOp4)
622 {
623 setupArgumentsWithExecState(regOp1, TrustedImmPtr(identOp2), regOp3, regOp4);
624 return appendCallWithExceptionCheck(operation);
625 }
626
627 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJ operation, RegisterID regOp1Tag, RegisterID regOp1Payload)
628 {
629 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag);
630 return appendCallWithExceptionCheck(operation);
631 }
632
633 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EZSymtabJ operation, int32_t op1, SymbolTable* symbolTable, RegisterID regOp3Tag, RegisterID regOp3Payload)
634 {
635 setupArgumentsWithExecState(TrustedImm32(op1), TrustedImmPtr(symbolTable), EABI_32BIT_DUMMY_ARG regOp3Payload, regOp3Tag);
636 return appendCallWithExceptionCheck(operation);
637 }
638
639 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ESsiJJI operation, StructureStubInfo* stubInfo, RegisterID regOp1Tag, RegisterID regOp1Payload, RegisterID regOp2Tag, RegisterID regOp2Payload, UniquedStringImpl* uid)
640 {
641 setupArgumentsWithExecState(TrustedImmPtr(stubInfo), regOp1Payload, regOp1Tag, regOp2Payload, regOp2Tag, TrustedImmPtr(uid));
642 return appendCallWithExceptionCheck(operation);
643 }
644
645 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJJJ operation, RegisterID regOp1Tag, RegisterID regOp1Payload, RegisterID regOp2Tag, RegisterID regOp2Payload, RegisterID regOp3Tag, RegisterID regOp3Payload)
646 {
647 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag, SH4_32BIT_DUMMY_ARG regOp2Payload, regOp2Tag, regOp3Payload, regOp3Tag);
648 return appendCallWithExceptionCheck(operation);
649 }
650
651 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJJJAp operation, RegisterID regOp1Tag, RegisterID regOp1Payload, RegisterID regOp2Tag, RegisterID regOp2Payload, RegisterID regOp3Tag, RegisterID regOp3Payload, ArrayProfile* arrayProfile)
652 {
653 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag, SH4_32BIT_DUMMY_ARG regOp2Payload, regOp2Tag, regOp3Payload, regOp3Tag, TrustedImmPtr(arrayProfile));
654 return appendCallWithExceptionCheck(operation);
655 }
656
657 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EZJ operation, int dst, RegisterID regOp1Tag, RegisterID regOp1Payload)
658 {
659 setupArgumentsWithExecState(TrustedImm32(dst), regOp1Payload, regOp1Tag);
660 return appendCallWithExceptionCheck(operation);
661 }
662
663 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJZ operation, RegisterID regOp1Tag, RegisterID regOp1Payload, int32_t op2)
664 {
665 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag, TrustedImm32(op2));
666 return appendCallWithExceptionCheck(operation);
667 }
668
669 ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJZJ operation, RegisterID regOp1Tag, RegisterID regOp1Payload, int32_t op2, RegisterID regOp3Tag, RegisterID regOp3Payload)
670 {
671 setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag, TrustedImm32(op2), EABI_32BIT_DUMMY_ARG regOp3Payload, regOp3Tag);
672 return appendCallWithExceptionCheck(operation);
673 }
674
675 #undef EABI_32BIT_DUMMY_ARG
676 #undef SH4_32BIT_DUMMY_ARG
677
678 #endif // USE(JSVALUE32_64)
679
680 ALWAYS_INLINE JIT::Jump JIT::checkStructure(RegisterID reg, Structure* structure)
681 {
682 return branchStructure(NotEqual, Address(reg, JSCell::structureIDOffset()), structure);
683 }
684
685 ALWAYS_INLINE void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, int vReg)
686 {
687 if (!m_codeBlock->isKnownNotImmediate(vReg))
688 linkSlowCase(iter);
689 }
690
691 ALWAYS_INLINE void JIT::addSlowCase(Jump jump)
692 {
693 ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
694
695 m_slowCases.append(SlowCaseEntry(jump, m_bytecodeOffset));
696 }
697
698 ALWAYS_INLINE void JIT::addSlowCase(JumpList jumpList)
699 {
700 ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
701
702 const JumpList::JumpVector& jumpVector = jumpList.jumps();
703 size_t size = jumpVector.size();
704 for (size_t i = 0; i < size; ++i)
705 m_slowCases.append(SlowCaseEntry(jumpVector[i], m_bytecodeOffset));
706 }
707
708 ALWAYS_INLINE void JIT::addSlowCase()
709 {
710 ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
711
712 Jump emptyJump; // Doing it this way to make Windows happy.
713 m_slowCases.append(SlowCaseEntry(emptyJump, m_bytecodeOffset));
714 }
715
716 ALWAYS_INLINE void JIT::addJump(Jump jump, int relativeOffset)
717 {
718 ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
719
720 m_jmpTable.append(JumpTable(jump, m_bytecodeOffset + relativeOffset));
721 }
722
723 ALWAYS_INLINE void JIT::emitJumpSlowToHot(Jump jump, int relativeOffset)
724 {
725 ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
726
727 jump.linkTo(m_labels[m_bytecodeOffset + relativeOffset], this);
728 }
729
730 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfCellObject(RegisterID cellReg)
731 {
732 return branch8(AboveOrEqual, Address(cellReg, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType));
733 }
734
735 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfCellNotObject(RegisterID cellReg)
736 {
737 return branch8(Below, Address(cellReg, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType));
738 }
739
740 #if ENABLE(SAMPLING_FLAGS)
741 ALWAYS_INLINE void JIT::setSamplingFlag(int32_t flag)
742 {
743 ASSERT(flag >= 1);
744 ASSERT(flag <= 32);
745 or32(TrustedImm32(1u << (flag - 1)), AbsoluteAddress(SamplingFlags::addressOfFlags()));
746 }
747
748 ALWAYS_INLINE void JIT::clearSamplingFlag(int32_t flag)
749 {
750 ASSERT(flag >= 1);
751 ASSERT(flag <= 32);
752 and32(TrustedImm32(~(1u << (flag - 1))), AbsoluteAddress(SamplingFlags::addressOfFlags()));
753 }
754 #endif
755
756 #if ENABLE(SAMPLING_COUNTERS)
757 ALWAYS_INLINE void JIT::emitCount(AbstractSamplingCounter& counter, int32_t count)
758 {
759 add64(TrustedImm32(count), AbsoluteAddress(counter.addressOfCounter()));
760 }
761 #endif
762
763 #if ENABLE(OPCODE_SAMPLING)
764 #if CPU(X86_64)
765 ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
766 {
767 move(TrustedImmPtr(m_interpreter->sampler()->sampleSlot()), X86Registers::ecx);
768 storePtr(TrustedImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), X86Registers::ecx);
769 }
770 #else
771 ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
772 {
773 storePtr(TrustedImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), m_interpreter->sampler()->sampleSlot());
774 }
775 #endif
776 #endif
777
778 #if ENABLE(CODEBLOCK_SAMPLING)
779 #if CPU(X86_64)
780 ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
781 {
782 move(TrustedImmPtr(m_interpreter->sampler()->codeBlockSlot()), X86Registers::ecx);
783 storePtr(TrustedImmPtr(codeBlock), X86Registers::ecx);
784 }
785 #else
786 ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
787 {
788 storePtr(TrustedImmPtr(codeBlock), m_interpreter->sampler()->codeBlockSlot());
789 }
790 #endif
791 #endif
792
793 ALWAYS_INLINE bool JIT::isOperandConstantImmediateChar(int src)
794 {
795 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isString() && asString(getConstantOperand(src).asCell())->length() == 1;
796 }
797
798 template<typename StructureType>
799 inline void JIT::emitAllocateJSObject(RegisterID allocator, StructureType structure, RegisterID result, RegisterID scratch)
800 {
801 loadPtr(Address(allocator, MarkedAllocator::offsetOfFreeListHead()), result);
802 addSlowCase(branchTestPtr(Zero, result));
803
804 // remove the object from the free list
805 loadPtr(Address(result), scratch);
806 storePtr(scratch, Address(allocator, MarkedAllocator::offsetOfFreeListHead()));
807
808 // initialize the object's property storage pointer
809 storePtr(TrustedImmPtr(0), Address(result, JSObject::butterflyOffset()));
810
811 // initialize the object's structure
812 emitStoreStructureWithTypeInfo(structure, result, scratch);
813 }
814
815 inline void JIT::emitValueProfilingSite(ValueProfile* valueProfile)
816 {
817 ASSERT(shouldEmitProfiling());
818 ASSERT(valueProfile);
819
820 const RegisterID value = regT0;
821 #if USE(JSVALUE32_64)
822 const RegisterID valueTag = regT1;
823 #endif
824
825 // We're in a simple configuration: only one bucket, so we can just do a direct
826 // store.
827 #if USE(JSVALUE64)
828 store64(value, valueProfile->m_buckets);
829 #else
830 EncodedValueDescriptor* descriptor = bitwise_cast<EncodedValueDescriptor*>(valueProfile->m_buckets);
831 store32(value, &descriptor->asBits.payload);
832 store32(valueTag, &descriptor->asBits.tag);
833 #endif
834 }
835
836 inline void JIT::emitValueProfilingSite(unsigned bytecodeOffset)
837 {
838 if (!shouldEmitProfiling())
839 return;
840 emitValueProfilingSite(m_codeBlock->valueProfileForBytecodeOffset(bytecodeOffset));
841 }
842
843 inline void JIT::emitValueProfilingSite()
844 {
845 emitValueProfilingSite(m_bytecodeOffset);
846 }
847
848 inline void JIT::emitArrayProfilingSiteWithCell(RegisterID cell, RegisterID indexingType, ArrayProfile* arrayProfile)
849 {
850 if (shouldEmitProfiling()) {
851 load32(MacroAssembler::Address(cell, JSCell::structureIDOffset()), indexingType);
852 store32(indexingType, arrayProfile->addressOfLastSeenStructureID());
853 }
854
855 load8(Address(cell, JSCell::indexingTypeOffset()), indexingType);
856 }
857
858 inline void JIT::emitArrayProfilingSiteForBytecodeIndexWithCell(RegisterID cell, RegisterID indexingType, unsigned bytecodeIndex)
859 {
860 emitArrayProfilingSiteWithCell(cell, indexingType, m_codeBlock->getOrAddArrayProfile(bytecodeIndex));
861 }
862
863 inline void JIT::emitArrayProfileStoreToHoleSpecialCase(ArrayProfile* arrayProfile)
864 {
865 store8(TrustedImm32(1), arrayProfile->addressOfMayStoreToHole());
866 }
867
868 inline void JIT::emitArrayProfileOutOfBoundsSpecialCase(ArrayProfile* arrayProfile)
869 {
870 store8(TrustedImm32(1), arrayProfile->addressOfOutOfBounds());
871 }
872
873 static inline bool arrayProfileSaw(ArrayModes arrayModes, IndexingType capability)
874 {
875 return arrayModesInclude(arrayModes, capability);
876 }
877
878 inline JITArrayMode JIT::chooseArrayMode(ArrayProfile* profile)
879 {
880 ConcurrentJITLocker locker(m_codeBlock->m_lock);
881 profile->computeUpdatedPrediction(locker, m_codeBlock);
882 ArrayModes arrayModes = profile->observedArrayModes(locker);
883 if (arrayProfileSaw(arrayModes, DoubleShape))
884 return JITDouble;
885 if (arrayProfileSaw(arrayModes, Int32Shape))
886 return JITInt32;
887 if (arrayProfileSaw(arrayModes, ArrayStorageShape))
888 return JITArrayStorage;
889 return JITContiguous;
890 }
891
892 #if USE(JSVALUE32_64)
893
894 inline void JIT::emitLoadTag(int index, RegisterID tag)
895 {
896 if (m_codeBlock->isConstantRegisterIndex(index)) {
897 move(Imm32(getConstantOperand(index).tag()), tag);
898 return;
899 }
900
901 load32(tagFor(index), tag);
902 }
903
904 inline void JIT::emitLoadPayload(int index, RegisterID payload)
905 {
906 if (m_codeBlock->isConstantRegisterIndex(index)) {
907 move(Imm32(getConstantOperand(index).payload()), payload);
908 return;
909 }
910
911 load32(payloadFor(index), payload);
912 }
913
914 inline void JIT::emitLoad(const JSValue& v, RegisterID tag, RegisterID payload)
915 {
916 move(Imm32(v.payload()), payload);
917 move(Imm32(v.tag()), tag);
918 }
919
920 inline void JIT::emitLoad(int index, RegisterID tag, RegisterID payload, RegisterID base)
921 {
922 RELEASE_ASSERT(tag != payload);
923
924 if (base == callFrameRegister) {
925 RELEASE_ASSERT(payload != base);
926 emitLoadPayload(index, payload);
927 emitLoadTag(index, tag);
928 return;
929 }
930
931 if (payload == base) { // avoid stomping base
932 load32(tagFor(index, base), tag);
933 load32(payloadFor(index, base), payload);
934 return;
935 }
936
937 load32(payloadFor(index, base), payload);
938 load32(tagFor(index, base), tag);
939 }
940
941 inline void JIT::emitLoad2(int index1, RegisterID tag1, RegisterID payload1, int index2, RegisterID tag2, RegisterID payload2)
942 {
943 emitLoad(index2, tag2, payload2);
944 emitLoad(index1, tag1, payload1);
945 }
946
947 inline void JIT::emitLoadDouble(int index, FPRegisterID value)
948 {
949 if (m_codeBlock->isConstantRegisterIndex(index)) {
950 WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
951 loadDouble(TrustedImmPtr(&inConstantPool), value);
952 } else
953 loadDouble(addressFor(index), value);
954 }
955
956 inline void JIT::emitLoadInt32ToDouble(int index, FPRegisterID value)
957 {
958 if (m_codeBlock->isConstantRegisterIndex(index)) {
959 WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
960 char* bytePointer = reinterpret_cast<char*>(&inConstantPool);
961 convertInt32ToDouble(AbsoluteAddress(bytePointer + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), value);
962 } else
963 convertInt32ToDouble(payloadFor(index), value);
964 }
965
966 inline void JIT::emitStore(int index, RegisterID tag, RegisterID payload, RegisterID base)
967 {
968 store32(payload, payloadFor(index, base));
969 store32(tag, tagFor(index, base));
970 }
971
972 inline void JIT::emitStoreInt32(int index, RegisterID payload, bool indexIsInt32)
973 {
974 store32(payload, payloadFor(index, callFrameRegister));
975 if (!indexIsInt32)
976 store32(TrustedImm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
977 }
978
979 inline void JIT::emitStoreInt32(int index, TrustedImm32 payload, bool indexIsInt32)
980 {
981 store32(payload, payloadFor(index, callFrameRegister));
982 if (!indexIsInt32)
983 store32(TrustedImm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
984 }
985
986 inline void JIT::emitStoreCell(int index, RegisterID payload, bool indexIsCell)
987 {
988 store32(payload, payloadFor(index, callFrameRegister));
989 if (!indexIsCell)
990 store32(TrustedImm32(JSValue::CellTag), tagFor(index, callFrameRegister));
991 }
992
993 inline void JIT::emitStoreBool(int index, RegisterID payload, bool indexIsBool)
994 {
995 store32(payload, payloadFor(index, callFrameRegister));
996 if (!indexIsBool)
997 store32(TrustedImm32(JSValue::BooleanTag), tagFor(index, callFrameRegister));
998 }
999
1000 inline void JIT::emitStoreDouble(int index, FPRegisterID value)
1001 {
1002 storeDouble(value, addressFor(index));
1003 }
1004
1005 inline void JIT::emitStore(int index, const JSValue constant, RegisterID base)
1006 {
1007 store32(Imm32(constant.payload()), payloadFor(index, base));
1008 store32(Imm32(constant.tag()), tagFor(index, base));
1009 }
1010
1011 ALWAYS_INLINE void JIT::emitInitRegister(int dst)
1012 {
1013 emitStore(dst, jsUndefined());
1014 }
1015
1016 inline void JIT::emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex)
1017 {
1018 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
1019 if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
1020 addSlowCase(jump());
1021 else
1022 addSlowCase(emitJumpIfNotJSCell(virtualRegisterIndex));
1023 }
1024 }
1025
1026 inline void JIT::emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex, RegisterID tag)
1027 {
1028 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
1029 if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
1030 addSlowCase(jump());
1031 else
1032 addSlowCase(branch32(NotEqual, tag, TrustedImm32(JSValue::CellTag)));
1033 }
1034 }
1035
1036 ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(int src)
1037 {
1038 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
1039 }
1040
1041 ALWAYS_INLINE bool JIT::getOperandConstantImmediateInt(int op1, int op2, int& op, int32_t& constant)
1042 {
1043 if (isOperandConstantImmediateInt(op1)) {
1044 constant = getConstantOperand(op1).asInt32();
1045 op = op2;
1046 return true;
1047 }
1048
1049 if (isOperandConstantImmediateInt(op2)) {
1050 constant = getConstantOperand(op2).asInt32();
1051 op = op1;
1052 return true;
1053 }
1054
1055 return false;
1056 }
1057
1058 #else // USE(JSVALUE32_64)
1059
1060 // get arg puts an arg from the SF register array into a h/w register
1061 ALWAYS_INLINE void JIT::emitGetVirtualRegister(int src, RegisterID dst)
1062 {
1063 ASSERT(m_bytecodeOffset != std::numeric_limits<unsigned>::max()); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
1064
1065 // TODO: we want to reuse values that are already in registers if we can - add a register allocator!
1066 if (m_codeBlock->isConstantRegisterIndex(src)) {
1067 JSValue value = m_codeBlock->getConstant(src);
1068 if (!value.isNumber())
1069 move(TrustedImm64(JSValue::encode(value)), dst);
1070 else
1071 move(Imm64(JSValue::encode(value)), dst);
1072 return;
1073 }
1074
1075 load64(Address(callFrameRegister, src * sizeof(Register)), dst);
1076 }
1077
1078 ALWAYS_INLINE void JIT::emitGetVirtualRegister(VirtualRegister src, RegisterID dst)
1079 {
1080 emitGetVirtualRegister(src.offset(), dst);
1081 }
1082
1083 ALWAYS_INLINE void JIT::emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2)
1084 {
1085 emitGetVirtualRegister(src1, dst1);
1086 emitGetVirtualRegister(src2, dst2);
1087 }
1088
1089 ALWAYS_INLINE void JIT::emitGetVirtualRegisters(VirtualRegister src1, RegisterID dst1, VirtualRegister src2, RegisterID dst2)
1090 {
1091 emitGetVirtualRegisters(src1.offset(), dst1, src2.offset(), dst2);
1092 }
1093
1094 ALWAYS_INLINE int32_t JIT::getConstantOperandImmediateInt(int src)
1095 {
1096 return getConstantOperand(src).asInt32();
1097 }
1098
1099 ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(int src)
1100 {
1101 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
1102 }
1103
1104 ALWAYS_INLINE void JIT::emitPutVirtualRegister(int dst, RegisterID from)
1105 {
1106 store64(from, Address(callFrameRegister, dst * sizeof(Register)));
1107 }
1108
1109 ALWAYS_INLINE void JIT::emitPutVirtualRegister(VirtualRegister dst, RegisterID from)
1110 {
1111 emitPutVirtualRegister(dst.offset(), from);
1112 }
1113
1114 ALWAYS_INLINE void JIT::emitInitRegister(int dst)
1115 {
1116 store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, dst * sizeof(Register)));
1117 }
1118
1119 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfJSCell(RegisterID reg)
1120 {
1121 return branchTest64(Zero, reg, tagMaskRegister);
1122 }
1123
1124 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfBothJSCells(RegisterID reg1, RegisterID reg2, RegisterID scratch)
1125 {
1126 move(reg1, scratch);
1127 or64(reg2, scratch);
1128 return emitJumpIfJSCell(scratch);
1129 }
1130
1131 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg)
1132 {
1133 addSlowCase(emitJumpIfJSCell(reg));
1134 }
1135
1136 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg)
1137 {
1138 addSlowCase(emitJumpIfNotJSCell(reg));
1139 }
1140
1141 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, int vReg)
1142 {
1143 if (!m_codeBlock->isKnownNotImmediate(vReg))
1144 emitJumpSlowCaseIfNotJSCell(reg);
1145 }
1146
1147 inline void JIT::emitLoadDouble(int index, FPRegisterID value)
1148 {
1149 if (m_codeBlock->isConstantRegisterIndex(index)) {
1150 WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
1151 loadDouble(TrustedImmPtr(&inConstantPool), value);
1152 } else
1153 loadDouble(addressFor(index), value);
1154 }
1155
1156 inline void JIT::emitLoadInt32ToDouble(int index, FPRegisterID value)
1157 {
1158 if (m_codeBlock->isConstantRegisterIndex(index)) {
1159 ASSERT(isOperandConstantImmediateInt(index));
1160 convertInt32ToDouble(Imm32(getConstantOperand(index).asInt32()), value);
1161 } else
1162 convertInt32ToDouble(addressFor(index), value);
1163 }
1164
1165 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateInteger(RegisterID reg)
1166 {
1167 return branch64(AboveOrEqual, reg, tagTypeNumberRegister);
1168 }
1169
1170 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateInteger(RegisterID reg)
1171 {
1172 return branch64(Below, reg, tagTypeNumberRegister);
1173 }
1174
1175 ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
1176 {
1177 move(reg1, scratch);
1178 and64(reg2, scratch);
1179 return emitJumpIfNotImmediateInteger(scratch);
1180 }
1181
1182 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg)
1183 {
1184 addSlowCase(emitJumpIfNotImmediateInteger(reg));
1185 }
1186
1187 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
1188 {
1189 addSlowCase(emitJumpIfNotImmediateIntegers(reg1, reg2, scratch));
1190 }
1191
1192 ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateNumber(RegisterID reg)
1193 {
1194 addSlowCase(emitJumpIfNotImmediateNumber(reg));
1195 }
1196
1197 ALWAYS_INLINE void JIT::emitFastArithReTagImmediate(RegisterID src, RegisterID dest)
1198 {
1199 emitFastArithIntToImmNoCheck(src, dest);
1200 }
1201
1202 ALWAYS_INLINE void JIT::emitTagAsBoolImmediate(RegisterID reg)
1203 {
1204 or32(TrustedImm32(static_cast<int32_t>(ValueFalse)), reg);
1205 }
1206
1207 #endif // USE(JSVALUE32_64)
1208
1209 template <typename T>
1210 JIT::Jump JIT::branchStructure(RelationalCondition condition, T leftHandSide, Structure* structure)
1211 {
1212 #if USE(JSVALUE64)
1213 return branch32(condition, leftHandSide, TrustedImm32(structure->id()));
1214 #else
1215 return branchPtr(condition, leftHandSide, TrustedImmPtr(structure));
1216 #endif
1217 }
1218
1219 template <typename T>
1220 MacroAssembler::Jump branchStructure(MacroAssembler& jit, MacroAssembler::RelationalCondition condition, T leftHandSide, Structure* structure)
1221 {
1222 #if USE(JSVALUE64)
1223 return jit.branch32(condition, leftHandSide, MacroAssembler::TrustedImm32(structure->id()));
1224 #else
1225 return jit.branchPtr(condition, leftHandSide, MacroAssembler::TrustedImmPtr(structure));
1226 #endif
1227 }
1228
1229 } // namespace JSC
1230
1231 #endif // ENABLE(JIT)
1232
1233 #endif // JITInlines_h
1234