]>
Commit | Line | Data |
---|---|---|
1 | /* | |
2 | * Copyright (C) 2008, 2012, 2013 Apple Inc. All rights reserved. | |
3 | * | |
4 | * Redistribution and use in source and binary forms, with or without | |
5 | * modification, are permitted provided that the following conditions | |
6 | * are met: | |
7 | * 1. Redistributions of source code must retain the above copyright | |
8 | * notice, this list of conditions and the following disclaimer. | |
9 | * 2. Redistributions in binary form must reproduce the above copyright | |
10 | * notice, this list of conditions and the following disclaimer in the | |
11 | * documentation and/or other materials provided with the distribution. | |
12 | * | |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY | |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR | |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, | |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR | |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY | |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
24 | */ | |
25 | ||
26 | #ifndef JITInlines_h | |
27 | #define JITInlines_h | |
28 | ||
29 | #if ENABLE(JIT) | |
30 | ||
31 | #include "JSCInlines.h" | |
32 | ||
33 | namespace JSC { | |
34 | ||
35 | ALWAYS_INLINE bool JIT::isOperandConstantImmediateDouble(int src) | |
36 | { | |
37 | return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isDouble(); | |
38 | } | |
39 | ||
40 | ALWAYS_INLINE JSValue JIT::getConstantOperand(int src) | |
41 | { | |
42 | ASSERT(m_codeBlock->isConstantRegisterIndex(src)); | |
43 | return m_codeBlock->getConstant(src); | |
44 | } | |
45 | ||
46 | ALWAYS_INLINE void JIT::emitPutIntToCallFrameHeader(RegisterID from, JSStack::CallFrameHeaderEntry entry) | |
47 | { | |
48 | #if USE(JSVALUE32_64) | |
49 | store32(TrustedImm32(Int32Tag), intTagFor(entry, callFrameRegister)); | |
50 | store32(from, intPayloadFor(entry, callFrameRegister)); | |
51 | #else | |
52 | store64(from, addressFor(entry, callFrameRegister)); | |
53 | #endif | |
54 | } | |
55 | ||
56 | ALWAYS_INLINE void JIT::emitGetFromCallFrameHeaderPtr(JSStack::CallFrameHeaderEntry entry, RegisterID to, RegisterID from) | |
57 | { | |
58 | loadPtr(Address(from, entry * sizeof(Register)), to); | |
59 | } | |
60 | ||
61 | ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader32(JSStack::CallFrameHeaderEntry entry, RegisterID to, RegisterID from) | |
62 | { | |
63 | load32(Address(from, entry * sizeof(Register)), to); | |
64 | } | |
65 | ||
66 | #if USE(JSVALUE64) | |
67 | ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader64(JSStack::CallFrameHeaderEntry entry, RegisterID to, RegisterID from) | |
68 | { | |
69 | load64(Address(from, entry * sizeof(Register)), to); | |
70 | } | |
71 | #endif | |
72 | ||
73 | ALWAYS_INLINE void JIT::emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures) | |
74 | { | |
75 | failures.append(branchStructure(NotEqual, Address(src, JSCell::structureIDOffset()), m_vm->stringStructure.get())); | |
76 | failures.append(branch32(NotEqual, MacroAssembler::Address(src, ThunkHelpers::jsStringLengthOffset()), TrustedImm32(1))); | |
77 | loadPtr(MacroAssembler::Address(src, ThunkHelpers::jsStringValueOffset()), dst); | |
78 | failures.append(branchTest32(Zero, dst)); | |
79 | loadPtr(MacroAssembler::Address(dst, StringImpl::flagsOffset()), regT1); | |
80 | loadPtr(MacroAssembler::Address(dst, StringImpl::dataOffset()), dst); | |
81 | ||
82 | JumpList is16Bit; | |
83 | JumpList cont8Bit; | |
84 | is16Bit.append(branchTest32(Zero, regT1, TrustedImm32(StringImpl::flagIs8Bit()))); | |
85 | load8(MacroAssembler::Address(dst, 0), dst); | |
86 | cont8Bit.append(jump()); | |
87 | is16Bit.link(this); | |
88 | load16(MacroAssembler::Address(dst, 0), dst); | |
89 | cont8Bit.link(this); | |
90 | } | |
91 | ||
92 | ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr function) | |
93 | { | |
94 | ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set. | |
95 | Call nakedCall = nearCall(); | |
96 | m_calls.append(CallRecord(nakedCall, m_bytecodeOffset, function.executableAddress())); | |
97 | return nakedCall; | |
98 | } | |
99 | ||
100 | ALWAYS_INLINE void JIT::updateTopCallFrame() | |
101 | { | |
102 | ASSERT(static_cast<int>(m_bytecodeOffset) >= 0); | |
103 | #if USE(JSVALUE32_64) | |
104 | Instruction* instruction = m_codeBlock->instructions().begin() + m_bytecodeOffset + 1; | |
105 | uint32_t locationBits = CallFrame::Location::encodeAsBytecodeInstruction(instruction); | |
106 | #else | |
107 | uint32_t locationBits = CallFrame::Location::encodeAsBytecodeOffset(m_bytecodeOffset + 1); | |
108 | #endif | |
109 | store32(TrustedImm32(locationBits), intTagFor(JSStack::ArgumentCount)); | |
110 | storePtr(callFrameRegister, &m_vm->topCallFrame); | |
111 | } | |
112 | ||
113 | ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheck(const FunctionPtr& function) | |
114 | { | |
115 | updateTopCallFrame(); | |
116 | MacroAssembler::Call call = appendCall(function); | |
117 | exceptionCheck(); | |
118 | return call; | |
119 | } | |
120 | ||
121 | #if OS(WINDOWS) && CPU(X86_64) | |
122 | ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheckAndSlowPathReturnType(const FunctionPtr& function) | |
123 | { | |
124 | updateTopCallFrame(); | |
125 | MacroAssembler::Call call = appendCallWithSlowPathReturnType(function); | |
126 | exceptionCheck(); | |
127 | return call; | |
128 | } | |
129 | #endif | |
130 | ||
131 | ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithCallFrameRollbackOnException(const FunctionPtr& function) | |
132 | { | |
133 | updateTopCallFrame(); // The callee is responsible for setting topCallFrame to their caller | |
134 | MacroAssembler::Call call = appendCall(function); | |
135 | exceptionCheckWithCallFrameRollback(); | |
136 | return call; | |
137 | } | |
138 | ||
139 | ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheckSetJSValueResult(const FunctionPtr& function, int dst) | |
140 | { | |
141 | MacroAssembler::Call call = appendCallWithExceptionCheck(function); | |
142 | #if USE(JSVALUE64) | |
143 | emitPutVirtualRegister(dst, returnValueGPR); | |
144 | #else | |
145 | emitStore(dst, returnValueGPR2, returnValueGPR); | |
146 | #endif | |
147 | return call; | |
148 | } | |
149 | ||
150 | ALWAYS_INLINE MacroAssembler::Call JIT::appendCallWithExceptionCheckSetJSValueResultWithProfile(const FunctionPtr& function, int dst) | |
151 | { | |
152 | MacroAssembler::Call call = appendCallWithExceptionCheck(function); | |
153 | emitValueProfilingSite(); | |
154 | #if USE(JSVALUE64) | |
155 | emitPutVirtualRegister(dst, returnValueGPR); | |
156 | #else | |
157 | emitStore(dst, returnValueGPR2, returnValueGPR); | |
158 | #endif | |
159 | return call; | |
160 | } | |
161 | ||
162 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_E operation) | |
163 | { | |
164 | setupArgumentsExecState(); | |
165 | return appendCallWithExceptionCheck(operation); | |
166 | } | |
167 | ||
168 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_EO operation, GPRReg arg) | |
169 | { | |
170 | setupArgumentsWithExecState(arg); | |
171 | return appendCallWithExceptionCheck(operation); | |
172 | } | |
173 | ||
174 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_ESt operation, Structure* structure) | |
175 | { | |
176 | setupArgumentsWithExecState(TrustedImmPtr(structure)); | |
177 | return appendCallWithExceptionCheck(operation); | |
178 | } | |
179 | ||
180 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(C_JITOperation_EZ operation, int32_t arg) | |
181 | { | |
182 | setupArgumentsWithExecState(TrustedImm32(arg)); | |
183 | return appendCallWithExceptionCheck(operation); | |
184 | } | |
185 | ||
186 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_E operation, int dst) | |
187 | { | |
188 | setupArgumentsExecState(); | |
189 | return appendCallWithExceptionCheckSetJSValueResult(operation, dst); | |
190 | } | |
191 | ||
192 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EAapJcpZ operation, int dst, ArrayAllocationProfile* arg1, GPRReg arg2, int32_t arg3) | |
193 | { | |
194 | setupArgumentsWithExecState(TrustedImmPtr(arg1), arg2, TrustedImm32(arg3)); | |
195 | return appendCallWithExceptionCheckSetJSValueResult(operation, dst); | |
196 | } | |
197 | ||
198 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EAapJcpZ operation, int dst, ArrayAllocationProfile* arg1, const JSValue* arg2, int32_t arg3) | |
199 | { | |
200 | setupArgumentsWithExecState(TrustedImmPtr(arg1), TrustedImmPtr(arg2), TrustedImm32(arg3)); | |
201 | return appendCallWithExceptionCheckSetJSValueResult(operation, dst); | |
202 | } | |
203 | ||
204 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EC operation, int dst, JSCell* cell) | |
205 | { | |
206 | setupArgumentsWithExecState(TrustedImmPtr(cell)); | |
207 | return appendCallWithExceptionCheckSetJSValueResult(operation, dst); | |
208 | } | |
209 | ||
210 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EC operation, JSCell* cell) | |
211 | { | |
212 | setupArgumentsWithExecState(TrustedImmPtr(cell)); | |
213 | return appendCallWithExceptionCheck(operation); | |
214 | } | |
215 | ||
216 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EP operation, int dst, void* pointer) | |
217 | { | |
218 | setupArgumentsWithExecState(TrustedImmPtr(pointer)); | |
219 | return appendCallWithExceptionCheckSetJSValueResult(operation, dst); | |
220 | } | |
221 | ||
222 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(WithProfileTag, J_JITOperation_EPc operation, int dst, Instruction* bytecodePC) | |
223 | { | |
224 | setupArgumentsWithExecState(TrustedImmPtr(bytecodePC)); | |
225 | return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, dst); | |
226 | } | |
227 | ||
228 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EZ operation, int dst, int32_t arg) | |
229 | { | |
230 | setupArgumentsWithExecState(TrustedImm32(arg)); | |
231 | return appendCallWithExceptionCheckSetJSValueResult(operation, dst); | |
232 | } | |
233 | ||
234 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_ECC operation, RegisterID regOp1, RegisterID regOp2) | |
235 | { | |
236 | setupArgumentsWithExecState(regOp1, regOp2); | |
237 | return appendCallWithExceptionCheck(operation); | |
238 | } | |
239 | ||
240 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_EOJss operation, RegisterID regOp1, RegisterID regOp2) | |
241 | { | |
242 | setupArgumentsWithExecState(regOp1, regOp2); | |
243 | return appendCallWithExceptionCheck(operation); | |
244 | } | |
245 | ||
246 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(Sprt_JITOperation_EZ operation, int32_t op) | |
247 | { | |
248 | #if OS(WINDOWS) && CPU(X86_64) | |
249 | setupArgumentsWithExecStateForCallWithSlowPathReturnType(TrustedImm32(op)); | |
250 | return appendCallWithExceptionCheckAndSlowPathReturnType(operation); | |
251 | #else | |
252 | setupArgumentsWithExecState(TrustedImm32(op)); | |
253 | return appendCallWithExceptionCheck(operation); | |
254 | #endif | |
255 | } | |
256 | ||
257 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_E operation) | |
258 | { | |
259 | setupArgumentsExecState(); | |
260 | return appendCallWithExceptionCheck(operation); | |
261 | } | |
262 | ||
263 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EC operation, RegisterID regOp) | |
264 | { | |
265 | setupArgumentsWithExecState(regOp); | |
266 | return appendCallWithExceptionCheck(operation); | |
267 | } | |
268 | ||
269 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ECC operation, RegisterID regOp1, RegisterID regOp2) | |
270 | { | |
271 | setupArgumentsWithExecState(regOp1, regOp2); | |
272 | return appendCallWithExceptionCheck(operation); | |
273 | } | |
274 | ||
275 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EPc operation, Instruction* bytecodePC) | |
276 | { | |
277 | setupArgumentsWithExecState(TrustedImmPtr(bytecodePC)); | |
278 | return appendCallWithExceptionCheck(operation); | |
279 | } | |
280 | ||
281 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EZ operation, int32_t op) | |
282 | { | |
283 | setupArgumentsWithExecState(TrustedImm32(op)); | |
284 | return appendCallWithExceptionCheck(operation); | |
285 | } | |
286 | ||
287 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperationWithCallFrameRollbackOnException(J_JITOperation_E operation) | |
288 | { | |
289 | setupArgumentsExecState(); | |
290 | return appendCallWithCallFrameRollbackOnException(operation); | |
291 | } | |
292 | ||
293 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperationNoExceptionCheck(J_JITOperation_EE operation, RegisterID regOp) | |
294 | { | |
295 | setupArgumentsWithExecState(regOp); | |
296 | updateTopCallFrame(); | |
297 | return appendCall(operation); | |
298 | } | |
299 | ||
300 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperationWithCallFrameRollbackOnException(V_JITOperation_ECb operation, CodeBlock* pointer) | |
301 | { | |
302 | setupArgumentsWithExecState(TrustedImmPtr(pointer)); | |
303 | return appendCallWithCallFrameRollbackOnException(operation); | |
304 | } | |
305 | ||
306 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperationWithCallFrameRollbackOnException(Z_JITOperation_E operation) | |
307 | { | |
308 | setupArgumentsExecState(); | |
309 | return appendCallWithCallFrameRollbackOnException(operation); | |
310 | } | |
311 | ||
312 | ||
313 | #if USE(JSVALUE64) | |
314 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(F_JITOperation_EJZZ operation, GPRReg arg1, int32_t arg2, int32_t arg3) | |
315 | { | |
316 | setupArgumentsWithExecState(arg1, TrustedImm32(arg2), TrustedImm32(arg3)); | |
317 | return appendCallWithExceptionCheck(operation); | |
318 | } | |
319 | ||
320 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(F_JITOperation_EFJJZ operation, GPRReg arg1, GPRReg arg2, GPRReg arg3, int32_t arg4) | |
321 | { | |
322 | setupArgumentsWithExecState(arg1, arg2, arg3, TrustedImm32(arg4)); | |
323 | return appendCallWithExceptionCheck(operation); | |
324 | } | |
325 | ||
326 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ESsiJJI operation, StructureStubInfo* stubInfo, RegisterID regOp1, RegisterID regOp2, StringImpl* uid) | |
327 | { | |
328 | setupArgumentsWithExecState(TrustedImmPtr(stubInfo), regOp1, regOp2, TrustedImmPtr(uid)); | |
329 | return appendCallWithExceptionCheck(operation); | |
330 | } | |
331 | ||
332 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJJJ operation, RegisterID regOp1, RegisterID regOp2, RegisterID regOp3) | |
333 | { | |
334 | setupArgumentsWithExecState(regOp1, regOp2, regOp3); | |
335 | return appendCallWithExceptionCheck(operation); | |
336 | } | |
337 | ||
338 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(JIT::WithProfileTag, J_JITOperation_ESsiJI operation, int dst, StructureStubInfo* stubInfo, GPRReg arg1, StringImpl* uid) | |
339 | { | |
340 | setupArgumentsWithExecState(TrustedImmPtr(stubInfo), arg1, TrustedImmPtr(uid)); | |
341 | return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, dst); | |
342 | } | |
343 | ||
344 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(JIT::WithProfileTag, J_JITOperation_EJJ operation, int dst, GPRReg arg1, GPRReg arg2) | |
345 | { | |
346 | setupArgumentsWithExecState(arg1, arg2); | |
347 | return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, dst); | |
348 | } | |
349 | ||
350 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EAapJ operation, int dst, ArrayAllocationProfile* arg1, GPRReg arg2) | |
351 | { | |
352 | setupArgumentsWithExecState(TrustedImmPtr(arg1), arg2); | |
353 | return appendCallWithExceptionCheckSetJSValueResult(operation, dst); | |
354 | } | |
355 | ||
356 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJ operation, int dst, GPRReg arg1) | |
357 | { | |
358 | setupArgumentsWithExecState(arg1); | |
359 | return appendCallWithExceptionCheckSetJSValueResult(operation, dst); | |
360 | } | |
361 | ||
362 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJIdc operation, int dst, GPRReg arg1, const Identifier* arg2) | |
363 | { | |
364 | setupArgumentsWithExecState(arg1, TrustedImmPtr(arg2)); | |
365 | return appendCallWithExceptionCheckSetJSValueResult(operation, dst); | |
366 | } | |
367 | ||
368 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJJ operation, int dst, GPRReg arg1, GPRReg arg2) | |
369 | { | |
370 | setupArgumentsWithExecState(arg1, arg2); | |
371 | return appendCallWithExceptionCheckSetJSValueResult(operation, dst); | |
372 | } | |
373 | ||
374 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperationNoExceptionCheck(V_JITOperation_EJ operation, GPRReg arg1) | |
375 | { | |
376 | setupArgumentsWithExecState(arg1); | |
377 | updateTopCallFrame(); | |
378 | return appendCall(operation); | |
379 | } | |
380 | ||
381 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(P_JITOperation_EJS operation, GPRReg arg1, size_t arg2) | |
382 | { | |
383 | setupArgumentsWithExecState(arg1, TrustedImmPtr(arg2)); | |
384 | return appendCallWithExceptionCheck(operation); | |
385 | } | |
386 | ||
387 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_EJ operation, RegisterID regOp) | |
388 | { | |
389 | setupArgumentsWithExecState(regOp); | |
390 | return appendCallWithExceptionCheck(operation); | |
391 | } | |
392 | ||
393 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_EJJ operation, RegisterID regOp1, RegisterID regOp2) | |
394 | { | |
395 | setupArgumentsWithExecState(regOp1, regOp2); | |
396 | return appendCallWithExceptionCheck(operation); | |
397 | } | |
398 | ||
399 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EIdJZ operation, const Identifier* identOp1, RegisterID regOp2, int32_t op3) | |
400 | { | |
401 | setupArgumentsWithExecState(TrustedImmPtr(identOp1), regOp2, TrustedImm32(op3)); | |
402 | return appendCallWithExceptionCheck(operation); | |
403 | } | |
404 | ||
405 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJ operation, RegisterID regOp) | |
406 | { | |
407 | setupArgumentsWithExecState(regOp); | |
408 | return appendCallWithExceptionCheck(operation); | |
409 | } | |
410 | ||
411 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJIdJJ operation, RegisterID regOp1, const Identifier* identOp2, RegisterID regOp3, RegisterID regOp4) | |
412 | { | |
413 | setupArgumentsWithExecState(regOp1, TrustedImmPtr(identOp2), regOp3, regOp4); | |
414 | return appendCallWithExceptionCheck(operation); | |
415 | } | |
416 | ||
417 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJZ operation, RegisterID regOp1, int32_t op2) | |
418 | { | |
419 | setupArgumentsWithExecState(regOp1, TrustedImm32(op2)); | |
420 | return appendCallWithExceptionCheck(operation); | |
421 | } | |
422 | ||
423 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJZJ operation, RegisterID regOp1, int32_t op2, RegisterID regOp3) | |
424 | { | |
425 | setupArgumentsWithExecState(regOp1, TrustedImm32(op2), regOp3); | |
426 | return appendCallWithExceptionCheck(operation); | |
427 | } | |
428 | ||
429 | #else // USE(JSVALUE32_64) | |
430 | ||
431 | // EncodedJSValue in JSVALUE32_64 is a 64-bit integer. When being compiled in ARM EABI, it must be aligned even-numbered register (r0, r2 or [sp]). | |
432 | // To avoid assemblies from using wrong registers, let's occupy r1 or r3 with a dummy argument when necessary. | |
433 | #if (COMPILER_SUPPORTS(EABI) && CPU(ARM)) || CPU(MIPS) | |
434 | #define EABI_32BIT_DUMMY_ARG TrustedImm32(0), | |
435 | #else | |
436 | #define EABI_32BIT_DUMMY_ARG | |
437 | #endif | |
438 | ||
439 | // JSVALUE32_64 is a 64-bit integer that cannot be put half in an argument register and half on stack when using SH4 architecture. | |
440 | // To avoid this, let's occupy the 4th argument register (r7) with a dummy argument when necessary. This must only be done when there | |
441 | // is no other 32-bit value argument behind this 64-bit JSValue. | |
442 | #if CPU(SH4) | |
443 | #define SH4_32BIT_DUMMY_ARG TrustedImm32(0), | |
444 | #else | |
445 | #define SH4_32BIT_DUMMY_ARG | |
446 | #endif | |
447 | ||
448 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperationNoExceptionCheck(V_JITOperation_EJ operation, GPRReg arg1Tag, GPRReg arg1Payload) | |
449 | { | |
450 | setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag); | |
451 | updateTopCallFrame(); | |
452 | return appendCall(operation); | |
453 | } | |
454 | ||
455 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(F_JITOperation_EJZZ operation, GPRReg arg1Tag, GPRReg arg1Payload, int32_t arg2, int32_t arg3) | |
456 | { | |
457 | setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, TrustedImm32(arg2), TrustedImm32(arg3)); | |
458 | return appendCallWithExceptionCheck(operation); | |
459 | } | |
460 | ||
461 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(F_JITOperation_EFJJZ operation, GPRReg arg1, GPRReg arg2Tag, GPRReg arg2Payload, GPRReg arg3Tag, GPRReg arg3Payload, int32_t arg4) | |
462 | { | |
463 | setupArgumentsWithExecState(arg1, arg2Payload, arg2Tag, arg3Payload, arg3Tag, TrustedImm32(arg4)); | |
464 | return appendCallWithExceptionCheck(operation); | |
465 | } | |
466 | ||
467 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EAapJ operation, int dst, ArrayAllocationProfile* arg1, GPRReg arg2Tag, GPRReg arg2Payload) | |
468 | { | |
469 | setupArgumentsWithExecState(TrustedImmPtr(arg1), arg2Payload, arg2Tag); | |
470 | return appendCallWithExceptionCheckSetJSValueResult(operation, dst); | |
471 | } | |
472 | ||
473 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJ operation, int dst, GPRReg arg1Tag, GPRReg arg1Payload) | |
474 | { | |
475 | setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag); | |
476 | return appendCallWithExceptionCheckSetJSValueResult(operation, dst); | |
477 | } | |
478 | ||
479 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(JIT::WithProfileTag, J_JITOperation_ESsiJI operation, int dst, StructureStubInfo* stubInfo, GPRReg arg1Tag, GPRReg arg1Payload, StringImpl* uid) | |
480 | { | |
481 | setupArgumentsWithExecState(TrustedImmPtr(stubInfo), arg1Payload, arg1Tag, TrustedImmPtr(uid)); | |
482 | return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, dst); | |
483 | } | |
484 | ||
485 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJIdc operation, int dst, GPRReg arg1Tag, GPRReg arg1Payload, const Identifier* arg2) | |
486 | { | |
487 | setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, TrustedImmPtr(arg2)); | |
488 | return appendCallWithExceptionCheckSetJSValueResult(operation, dst); | |
489 | } | |
490 | ||
491 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(J_JITOperation_EJJ operation, int dst, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2Tag, GPRReg arg2Payload) | |
492 | { | |
493 | setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, SH4_32BIT_DUMMY_ARG arg2Payload, arg2Tag); | |
494 | return appendCallWithExceptionCheckSetJSValueResult(operation, dst); | |
495 | } | |
496 | ||
497 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(JIT::WithProfileTag, J_JITOperation_EJJ operation, int dst, GPRReg arg1Tag, GPRReg arg1Payload, GPRReg arg2Tag, GPRReg arg2Payload) | |
498 | { | |
499 | setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, SH4_32BIT_DUMMY_ARG arg2Payload, arg2Tag); | |
500 | return appendCallWithExceptionCheckSetJSValueResultWithProfile(operation, dst); | |
501 | } | |
502 | ||
503 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(P_JITOperation_EJS operation, GPRReg arg1Tag, GPRReg arg1Payload, size_t arg2) | |
504 | { | |
505 | setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, TrustedImmPtr(arg2)); | |
506 | return appendCallWithExceptionCheck(operation); | |
507 | } | |
508 | ||
509 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_EJ operation, RegisterID argTag, RegisterID argPayload) | |
510 | { | |
511 | setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG argPayload, argTag); | |
512 | return appendCallWithExceptionCheck(operation); | |
513 | } | |
514 | ||
515 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(S_JITOperation_EJJ operation, RegisterID arg1Tag, RegisterID arg1Payload, RegisterID arg2Tag, RegisterID arg2Payload) | |
516 | { | |
517 | setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG arg1Payload, arg1Tag, SH4_32BIT_DUMMY_ARG arg2Payload, arg2Tag); | |
518 | return appendCallWithExceptionCheck(operation); | |
519 | } | |
520 | ||
521 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ECICC operation, RegisterID regOp1, const Identifier* identOp2, RegisterID regOp3, RegisterID regOp4) | |
522 | { | |
523 | setupArgumentsWithExecState(regOp1, TrustedImmPtr(identOp2), regOp3, regOp4); | |
524 | return appendCallWithExceptionCheck(operation); | |
525 | } | |
526 | ||
527 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJ operation, RegisterID regOp1Tag, RegisterID regOp1Payload) | |
528 | { | |
529 | setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag); | |
530 | return appendCallWithExceptionCheck(operation); | |
531 | } | |
532 | ||
533 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EIdJZ operation, const Identifier* identOp1, RegisterID regOp2Tag, RegisterID regOp2Payload, int32_t op3) | |
534 | { | |
535 | setupArgumentsWithExecState(TrustedImmPtr(identOp1), regOp2Payload, regOp2Tag, TrustedImm32(op3)); | |
536 | return appendCallWithExceptionCheck(operation); | |
537 | } | |
538 | ||
539 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_ESsiJJI operation, StructureStubInfo* stubInfo, RegisterID regOp1Tag, RegisterID regOp1Payload, RegisterID regOp2Tag, RegisterID regOp2Payload, StringImpl* uid) | |
540 | { | |
541 | setupArgumentsWithExecState(TrustedImmPtr(stubInfo), regOp1Payload, regOp1Tag, regOp2Payload, regOp2Tag, TrustedImmPtr(uid)); | |
542 | return appendCallWithExceptionCheck(operation); | |
543 | } | |
544 | ||
545 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJJJ operation, RegisterID regOp1Tag, RegisterID regOp1Payload, RegisterID regOp2Tag, RegisterID regOp2Payload, RegisterID regOp3Tag, RegisterID regOp3Payload) | |
546 | { | |
547 | setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag, SH4_32BIT_DUMMY_ARG regOp2Payload, regOp2Tag, regOp3Payload, regOp3Tag); | |
548 | return appendCallWithExceptionCheck(operation); | |
549 | } | |
550 | ||
551 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJZ operation, RegisterID regOp1Tag, RegisterID regOp1Payload, int32_t op2) | |
552 | { | |
553 | setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag, TrustedImm32(op2)); | |
554 | return appendCallWithExceptionCheck(operation); | |
555 | } | |
556 | ||
557 | ALWAYS_INLINE MacroAssembler::Call JIT::callOperation(V_JITOperation_EJZJ operation, RegisterID regOp1Tag, RegisterID regOp1Payload, int32_t op2, RegisterID regOp3Tag, RegisterID regOp3Payload) | |
558 | { | |
559 | setupArgumentsWithExecState(EABI_32BIT_DUMMY_ARG regOp1Payload, regOp1Tag, TrustedImm32(op2), EABI_32BIT_DUMMY_ARG regOp3Payload, regOp3Tag); | |
560 | return appendCallWithExceptionCheck(operation); | |
561 | } | |
562 | ||
563 | #undef EABI_32BIT_DUMMY_ARG | |
564 | #undef SH4_32BIT_DUMMY_ARG | |
565 | ||
566 | #endif // USE(JSVALUE32_64) | |
567 | ||
568 | ALWAYS_INLINE JIT::Jump JIT::checkStructure(RegisterID reg, Structure* structure) | |
569 | { | |
570 | return branchStructure(NotEqual, Address(reg, JSCell::structureIDOffset()), structure); | |
571 | } | |
572 | ||
573 | ALWAYS_INLINE void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, int vReg) | |
574 | { | |
575 | if (!m_codeBlock->isKnownNotImmediate(vReg)) | |
576 | linkSlowCase(iter); | |
577 | } | |
578 | ||
579 | ALWAYS_INLINE void JIT::addSlowCase(Jump jump) | |
580 | { | |
581 | ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set. | |
582 | ||
583 | m_slowCases.append(SlowCaseEntry(jump, m_bytecodeOffset)); | |
584 | } | |
585 | ||
586 | ALWAYS_INLINE void JIT::addSlowCase(JumpList jumpList) | |
587 | { | |
588 | ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set. | |
589 | ||
590 | const JumpList::JumpVector& jumpVector = jumpList.jumps(); | |
591 | size_t size = jumpVector.size(); | |
592 | for (size_t i = 0; i < size; ++i) | |
593 | m_slowCases.append(SlowCaseEntry(jumpVector[i], m_bytecodeOffset)); | |
594 | } | |
595 | ||
596 | ALWAYS_INLINE void JIT::addSlowCase() | |
597 | { | |
598 | ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set. | |
599 | ||
600 | Jump emptyJump; // Doing it this way to make Windows happy. | |
601 | m_slowCases.append(SlowCaseEntry(emptyJump, m_bytecodeOffset)); | |
602 | } | |
603 | ||
604 | ALWAYS_INLINE void JIT::addJump(Jump jump, int relativeOffset) | |
605 | { | |
606 | ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set. | |
607 | ||
608 | m_jmpTable.append(JumpTable(jump, m_bytecodeOffset + relativeOffset)); | |
609 | } | |
610 | ||
611 | ALWAYS_INLINE void JIT::emitJumpSlowToHot(Jump jump, int relativeOffset) | |
612 | { | |
613 | ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set. | |
614 | ||
615 | jump.linkTo(m_labels[m_bytecodeOffset + relativeOffset], this); | |
616 | } | |
617 | ||
618 | ALWAYS_INLINE JIT::Jump JIT::emitJumpIfCellNotObject(RegisterID cellReg) | |
619 | { | |
620 | return branch8(Below, Address(cellReg, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType)); | |
621 | } | |
622 | ||
623 | #if ENABLE(SAMPLING_FLAGS) | |
624 | ALWAYS_INLINE void JIT::setSamplingFlag(int32_t flag) | |
625 | { | |
626 | ASSERT(flag >= 1); | |
627 | ASSERT(flag <= 32); | |
628 | or32(TrustedImm32(1u << (flag - 1)), AbsoluteAddress(SamplingFlags::addressOfFlags())); | |
629 | } | |
630 | ||
631 | ALWAYS_INLINE void JIT::clearSamplingFlag(int32_t flag) | |
632 | { | |
633 | ASSERT(flag >= 1); | |
634 | ASSERT(flag <= 32); | |
635 | and32(TrustedImm32(~(1u << (flag - 1))), AbsoluteAddress(SamplingFlags::addressOfFlags())); | |
636 | } | |
637 | #endif | |
638 | ||
639 | #if ENABLE(SAMPLING_COUNTERS) | |
640 | ALWAYS_INLINE void JIT::emitCount(AbstractSamplingCounter& counter, int32_t count) | |
641 | { | |
642 | add64(TrustedImm32(count), AbsoluteAddress(counter.addressOfCounter())); | |
643 | } | |
644 | #endif | |
645 | ||
646 | #if ENABLE(OPCODE_SAMPLING) | |
647 | #if CPU(X86_64) | |
648 | ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction) | |
649 | { | |
650 | move(TrustedImmPtr(m_interpreter->sampler()->sampleSlot()), X86Registers::ecx); | |
651 | storePtr(TrustedImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), X86Registers::ecx); | |
652 | } | |
653 | #else | |
654 | ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction) | |
655 | { | |
656 | storePtr(TrustedImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), m_interpreter->sampler()->sampleSlot()); | |
657 | } | |
658 | #endif | |
659 | #endif | |
660 | ||
661 | #if ENABLE(CODEBLOCK_SAMPLING) | |
662 | #if CPU(X86_64) | |
663 | ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock) | |
664 | { | |
665 | move(TrustedImmPtr(m_interpreter->sampler()->codeBlockSlot()), X86Registers::ecx); | |
666 | storePtr(TrustedImmPtr(codeBlock), X86Registers::ecx); | |
667 | } | |
668 | #else | |
669 | ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock) | |
670 | { | |
671 | storePtr(TrustedImmPtr(codeBlock), m_interpreter->sampler()->codeBlockSlot()); | |
672 | } | |
673 | #endif | |
674 | #endif | |
675 | ||
676 | ALWAYS_INLINE bool JIT::isOperandConstantImmediateChar(int src) | |
677 | { | |
678 | return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isString() && asString(getConstantOperand(src).asCell())->length() == 1; | |
679 | } | |
680 | ||
681 | template<typename StructureType> | |
682 | inline void JIT::emitAllocateJSObject(RegisterID allocator, StructureType structure, RegisterID result, RegisterID scratch) | |
683 | { | |
684 | loadPtr(Address(allocator, MarkedAllocator::offsetOfFreeListHead()), result); | |
685 | addSlowCase(branchTestPtr(Zero, result)); | |
686 | ||
687 | // remove the object from the free list | |
688 | loadPtr(Address(result), scratch); | |
689 | storePtr(scratch, Address(allocator, MarkedAllocator::offsetOfFreeListHead())); | |
690 | ||
691 | // initialize the object's property storage pointer | |
692 | storePtr(TrustedImmPtr(0), Address(result, JSObject::butterflyOffset())); | |
693 | ||
694 | // initialize the object's structure | |
695 | emitStoreStructureWithTypeInfo(structure, result, scratch); | |
696 | } | |
697 | ||
698 | inline void JIT::emitValueProfilingSite(ValueProfile* valueProfile) | |
699 | { | |
700 | ASSERT(shouldEmitProfiling()); | |
701 | ASSERT(valueProfile); | |
702 | ||
703 | const RegisterID value = regT0; | |
704 | #if USE(JSVALUE32_64) | |
705 | const RegisterID valueTag = regT1; | |
706 | #endif | |
707 | ||
708 | // We're in a simple configuration: only one bucket, so we can just do a direct | |
709 | // store. | |
710 | #if USE(JSVALUE64) | |
711 | store64(value, valueProfile->m_buckets); | |
712 | #else | |
713 | EncodedValueDescriptor* descriptor = bitwise_cast<EncodedValueDescriptor*>(valueProfile->m_buckets); | |
714 | store32(value, &descriptor->asBits.payload); | |
715 | store32(valueTag, &descriptor->asBits.tag); | |
716 | #endif | |
717 | } | |
718 | ||
719 | inline void JIT::emitValueProfilingSite(unsigned bytecodeOffset) | |
720 | { | |
721 | if (!shouldEmitProfiling()) | |
722 | return; | |
723 | emitValueProfilingSite(m_codeBlock->valueProfileForBytecodeOffset(bytecodeOffset)); | |
724 | } | |
725 | ||
726 | inline void JIT::emitValueProfilingSite() | |
727 | { | |
728 | emitValueProfilingSite(m_bytecodeOffset); | |
729 | } | |
730 | ||
731 | inline void JIT::emitArrayProfilingSiteWithCell(RegisterID cell, RegisterID indexingType, ArrayProfile* arrayProfile) | |
732 | { | |
733 | if (shouldEmitProfiling()) { | |
734 | load32(MacroAssembler::Address(cell, JSCell::structureIDOffset()), indexingType); | |
735 | store32(indexingType, arrayProfile->addressOfLastSeenStructureID()); | |
736 | } | |
737 | ||
738 | load8(Address(cell, JSCell::indexingTypeOffset()), indexingType); | |
739 | } | |
740 | ||
741 | inline void JIT::emitArrayProfilingSiteForBytecodeIndexWithCell(RegisterID cell, RegisterID indexingType, unsigned bytecodeIndex) | |
742 | { | |
743 | emitArrayProfilingSiteWithCell(cell, indexingType, m_codeBlock->getOrAddArrayProfile(bytecodeIndex)); | |
744 | } | |
745 | ||
746 | inline void JIT::emitArrayProfileStoreToHoleSpecialCase(ArrayProfile* arrayProfile) | |
747 | { | |
748 | store8(TrustedImm32(1), arrayProfile->addressOfMayStoreToHole()); | |
749 | } | |
750 | ||
751 | inline void JIT::emitArrayProfileOutOfBoundsSpecialCase(ArrayProfile* arrayProfile) | |
752 | { | |
753 | store8(TrustedImm32(1), arrayProfile->addressOfOutOfBounds()); | |
754 | } | |
755 | ||
756 | static inline bool arrayProfileSaw(ArrayModes arrayModes, IndexingType capability) | |
757 | { | |
758 | return arrayModesInclude(arrayModes, capability); | |
759 | } | |
760 | ||
761 | inline JITArrayMode JIT::chooseArrayMode(ArrayProfile* profile) | |
762 | { | |
763 | ConcurrentJITLocker locker(m_codeBlock->m_lock); | |
764 | profile->computeUpdatedPrediction(locker, m_codeBlock); | |
765 | ArrayModes arrayModes = profile->observedArrayModes(locker); | |
766 | if (arrayProfileSaw(arrayModes, DoubleShape)) | |
767 | return JITDouble; | |
768 | if (arrayProfileSaw(arrayModes, Int32Shape)) | |
769 | return JITInt32; | |
770 | if (arrayProfileSaw(arrayModes, ArrayStorageShape)) | |
771 | return JITArrayStorage; | |
772 | return JITContiguous; | |
773 | } | |
774 | ||
775 | #if USE(JSVALUE32_64) | |
776 | ||
777 | inline void JIT::emitLoadTag(int index, RegisterID tag) | |
778 | { | |
779 | if (m_codeBlock->isConstantRegisterIndex(index)) { | |
780 | move(Imm32(getConstantOperand(index).tag()), tag); | |
781 | return; | |
782 | } | |
783 | ||
784 | load32(tagFor(index), tag); | |
785 | } | |
786 | ||
787 | inline void JIT::emitLoadPayload(int index, RegisterID payload) | |
788 | { | |
789 | if (m_codeBlock->isConstantRegisterIndex(index)) { | |
790 | move(Imm32(getConstantOperand(index).payload()), payload); | |
791 | return; | |
792 | } | |
793 | ||
794 | load32(payloadFor(index), payload); | |
795 | } | |
796 | ||
797 | inline void JIT::emitLoad(const JSValue& v, RegisterID tag, RegisterID payload) | |
798 | { | |
799 | move(Imm32(v.payload()), payload); | |
800 | move(Imm32(v.tag()), tag); | |
801 | } | |
802 | ||
803 | inline void JIT::emitLoad(int index, RegisterID tag, RegisterID payload, RegisterID base) | |
804 | { | |
805 | RELEASE_ASSERT(tag != payload); | |
806 | ||
807 | if (base == callFrameRegister) { | |
808 | RELEASE_ASSERT(payload != base); | |
809 | emitLoadPayload(index, payload); | |
810 | emitLoadTag(index, tag); | |
811 | return; | |
812 | } | |
813 | ||
814 | if (payload == base) { // avoid stomping base | |
815 | load32(tagFor(index, base), tag); | |
816 | load32(payloadFor(index, base), payload); | |
817 | return; | |
818 | } | |
819 | ||
820 | load32(payloadFor(index, base), payload); | |
821 | load32(tagFor(index, base), tag); | |
822 | } | |
823 | ||
824 | inline void JIT::emitLoad2(int index1, RegisterID tag1, RegisterID payload1, int index2, RegisterID tag2, RegisterID payload2) | |
825 | { | |
826 | emitLoad(index2, tag2, payload2); | |
827 | emitLoad(index1, tag1, payload1); | |
828 | } | |
829 | ||
830 | inline void JIT::emitLoadDouble(int index, FPRegisterID value) | |
831 | { | |
832 | if (m_codeBlock->isConstantRegisterIndex(index)) { | |
833 | WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index); | |
834 | loadDouble(TrustedImmPtr(&inConstantPool), value); | |
835 | } else | |
836 | loadDouble(addressFor(index), value); | |
837 | } | |
838 | ||
839 | inline void JIT::emitLoadInt32ToDouble(int index, FPRegisterID value) | |
840 | { | |
841 | if (m_codeBlock->isConstantRegisterIndex(index)) { | |
842 | WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index); | |
843 | char* bytePointer = reinterpret_cast<char*>(&inConstantPool); | |
844 | convertInt32ToDouble(AbsoluteAddress(bytePointer + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), value); | |
845 | } else | |
846 | convertInt32ToDouble(payloadFor(index), value); | |
847 | } | |
848 | ||
849 | inline void JIT::emitStore(int index, RegisterID tag, RegisterID payload, RegisterID base) | |
850 | { | |
851 | store32(payload, payloadFor(index, base)); | |
852 | store32(tag, tagFor(index, base)); | |
853 | } | |
854 | ||
855 | inline void JIT::emitStoreInt32(int index, RegisterID payload, bool indexIsInt32) | |
856 | { | |
857 | store32(payload, payloadFor(index, callFrameRegister)); | |
858 | if (!indexIsInt32) | |
859 | store32(TrustedImm32(JSValue::Int32Tag), tagFor(index, callFrameRegister)); | |
860 | } | |
861 | ||
862 | inline void JIT::emitStoreInt32(int index, TrustedImm32 payload, bool indexIsInt32) | |
863 | { | |
864 | store32(payload, payloadFor(index, callFrameRegister)); | |
865 | if (!indexIsInt32) | |
866 | store32(TrustedImm32(JSValue::Int32Tag), tagFor(index, callFrameRegister)); | |
867 | } | |
868 | ||
869 | inline void JIT::emitStoreCell(int index, RegisterID payload, bool indexIsCell) | |
870 | { | |
871 | store32(payload, payloadFor(index, callFrameRegister)); | |
872 | if (!indexIsCell) | |
873 | store32(TrustedImm32(JSValue::CellTag), tagFor(index, callFrameRegister)); | |
874 | } | |
875 | ||
876 | inline void JIT::emitStoreBool(int index, RegisterID payload, bool indexIsBool) | |
877 | { | |
878 | store32(payload, payloadFor(index, callFrameRegister)); | |
879 | if (!indexIsBool) | |
880 | store32(TrustedImm32(JSValue::BooleanTag), tagFor(index, callFrameRegister)); | |
881 | } | |
882 | ||
883 | inline void JIT::emitStoreDouble(int index, FPRegisterID value) | |
884 | { | |
885 | storeDouble(value, addressFor(index)); | |
886 | } | |
887 | ||
888 | inline void JIT::emitStore(int index, const JSValue constant, RegisterID base) | |
889 | { | |
890 | store32(Imm32(constant.payload()), payloadFor(index, base)); | |
891 | store32(Imm32(constant.tag()), tagFor(index, base)); | |
892 | } | |
893 | ||
894 | ALWAYS_INLINE void JIT::emitInitRegister(int dst) | |
895 | { | |
896 | emitStore(dst, jsUndefined()); | |
897 | } | |
898 | ||
899 | inline void JIT::emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex) | |
900 | { | |
901 | if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) { | |
902 | if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex)) | |
903 | addSlowCase(jump()); | |
904 | else | |
905 | addSlowCase(emitJumpIfNotJSCell(virtualRegisterIndex)); | |
906 | } | |
907 | } | |
908 | ||
909 | inline void JIT::emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex, RegisterID tag) | |
910 | { | |
911 | if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) { | |
912 | if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex)) | |
913 | addSlowCase(jump()); | |
914 | else | |
915 | addSlowCase(branch32(NotEqual, tag, TrustedImm32(JSValue::CellTag))); | |
916 | } | |
917 | } | |
918 | ||
919 | ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(int src) | |
920 | { | |
921 | return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32(); | |
922 | } | |
923 | ||
924 | ALWAYS_INLINE bool JIT::getOperandConstantImmediateInt(int op1, int op2, int& op, int32_t& constant) | |
925 | { | |
926 | if (isOperandConstantImmediateInt(op1)) { | |
927 | constant = getConstantOperand(op1).asInt32(); | |
928 | op = op2; | |
929 | return true; | |
930 | } | |
931 | ||
932 | if (isOperandConstantImmediateInt(op2)) { | |
933 | constant = getConstantOperand(op2).asInt32(); | |
934 | op = op1; | |
935 | return true; | |
936 | } | |
937 | ||
938 | return false; | |
939 | } | |
940 | ||
941 | #else // USE(JSVALUE32_64) | |
942 | ||
943 | // get arg puts an arg from the SF register array into a h/w register | |
944 | ALWAYS_INLINE void JIT::emitGetVirtualRegister(int src, RegisterID dst) | |
945 | { | |
946 | ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set. | |
947 | ||
948 | // TODO: we want to reuse values that are already in registers if we can - add a register allocator! | |
949 | if (m_codeBlock->isConstantRegisterIndex(src)) { | |
950 | JSValue value = m_codeBlock->getConstant(src); | |
951 | if (!value.isNumber()) | |
952 | move(TrustedImm64(JSValue::encode(value)), dst); | |
953 | else | |
954 | move(Imm64(JSValue::encode(value)), dst); | |
955 | return; | |
956 | } | |
957 | ||
958 | load64(Address(callFrameRegister, src * sizeof(Register)), dst); | |
959 | } | |
960 | ||
961 | ALWAYS_INLINE void JIT::emitGetVirtualRegister(VirtualRegister src, RegisterID dst) | |
962 | { | |
963 | emitGetVirtualRegister(src.offset(), dst); | |
964 | } | |
965 | ||
966 | ALWAYS_INLINE void JIT::emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2) | |
967 | { | |
968 | emitGetVirtualRegister(src1, dst1); | |
969 | emitGetVirtualRegister(src2, dst2); | |
970 | } | |
971 | ||
972 | ALWAYS_INLINE void JIT::emitGetVirtualRegisters(VirtualRegister src1, RegisterID dst1, VirtualRegister src2, RegisterID dst2) | |
973 | { | |
974 | emitGetVirtualRegisters(src1.offset(), dst1, src2.offset(), dst2); | |
975 | } | |
976 | ||
977 | ALWAYS_INLINE int32_t JIT::getConstantOperandImmediateInt(int src) | |
978 | { | |
979 | return getConstantOperand(src).asInt32(); | |
980 | } | |
981 | ||
982 | ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(int src) | |
983 | { | |
984 | return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32(); | |
985 | } | |
986 | ||
987 | ALWAYS_INLINE void JIT::emitPutVirtualRegister(int dst, RegisterID from) | |
988 | { | |
989 | store64(from, Address(callFrameRegister, dst * sizeof(Register))); | |
990 | } | |
991 | ||
992 | ALWAYS_INLINE void JIT::emitPutVirtualRegister(VirtualRegister dst, RegisterID from) | |
993 | { | |
994 | emitPutVirtualRegister(dst.offset(), from); | |
995 | } | |
996 | ||
997 | ALWAYS_INLINE void JIT::emitInitRegister(int dst) | |
998 | { | |
999 | store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, dst * sizeof(Register))); | |
1000 | } | |
1001 | ||
1002 | ALWAYS_INLINE JIT::Jump JIT::emitJumpIfJSCell(RegisterID reg) | |
1003 | { | |
1004 | return branchTest64(Zero, reg, tagMaskRegister); | |
1005 | } | |
1006 | ||
1007 | ALWAYS_INLINE JIT::Jump JIT::emitJumpIfBothJSCells(RegisterID reg1, RegisterID reg2, RegisterID scratch) | |
1008 | { | |
1009 | move(reg1, scratch); | |
1010 | or64(reg2, scratch); | |
1011 | return emitJumpIfJSCell(scratch); | |
1012 | } | |
1013 | ||
1014 | ALWAYS_INLINE void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg) | |
1015 | { | |
1016 | addSlowCase(emitJumpIfJSCell(reg)); | |
1017 | } | |
1018 | ||
1019 | ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg) | |
1020 | { | |
1021 | addSlowCase(emitJumpIfNotJSCell(reg)); | |
1022 | } | |
1023 | ||
1024 | ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, int vReg) | |
1025 | { | |
1026 | if (!m_codeBlock->isKnownNotImmediate(vReg)) | |
1027 | emitJumpSlowCaseIfNotJSCell(reg); | |
1028 | } | |
1029 | ||
1030 | inline void JIT::emitLoadDouble(int index, FPRegisterID value) | |
1031 | { | |
1032 | if (m_codeBlock->isConstantRegisterIndex(index)) { | |
1033 | WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index); | |
1034 | loadDouble(TrustedImmPtr(&inConstantPool), value); | |
1035 | } else | |
1036 | loadDouble(addressFor(index), value); | |
1037 | } | |
1038 | ||
1039 | inline void JIT::emitLoadInt32ToDouble(int index, FPRegisterID value) | |
1040 | { | |
1041 | if (m_codeBlock->isConstantRegisterIndex(index)) { | |
1042 | ASSERT(isOperandConstantImmediateInt(index)); | |
1043 | convertInt32ToDouble(Imm32(getConstantOperand(index).asInt32()), value); | |
1044 | } else | |
1045 | convertInt32ToDouble(addressFor(index), value); | |
1046 | } | |
1047 | ||
1048 | ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateInteger(RegisterID reg) | |
1049 | { | |
1050 | return branch64(AboveOrEqual, reg, tagTypeNumberRegister); | |
1051 | } | |
1052 | ||
1053 | ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateInteger(RegisterID reg) | |
1054 | { | |
1055 | return branch64(Below, reg, tagTypeNumberRegister); | |
1056 | } | |
1057 | ||
1058 | ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch) | |
1059 | { | |
1060 | move(reg1, scratch); | |
1061 | and64(reg2, scratch); | |
1062 | return emitJumpIfNotImmediateInteger(scratch); | |
1063 | } | |
1064 | ||
1065 | ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg) | |
1066 | { | |
1067 | addSlowCase(emitJumpIfNotImmediateInteger(reg)); | |
1068 | } | |
1069 | ||
1070 | ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch) | |
1071 | { | |
1072 | addSlowCase(emitJumpIfNotImmediateIntegers(reg1, reg2, scratch)); | |
1073 | } | |
1074 | ||
1075 | ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateNumber(RegisterID reg) | |
1076 | { | |
1077 | addSlowCase(emitJumpIfNotImmediateNumber(reg)); | |
1078 | } | |
1079 | ||
1080 | ALWAYS_INLINE void JIT::emitFastArithReTagImmediate(RegisterID src, RegisterID dest) | |
1081 | { | |
1082 | emitFastArithIntToImmNoCheck(src, dest); | |
1083 | } | |
1084 | ||
1085 | ALWAYS_INLINE void JIT::emitTagAsBoolImmediate(RegisterID reg) | |
1086 | { | |
1087 | or32(TrustedImm32(static_cast<int32_t>(ValueFalse)), reg); | |
1088 | } | |
1089 | ||
1090 | #endif // USE(JSVALUE32_64) | |
1091 | ||
1092 | template <typename T> | |
1093 | JIT::Jump JIT::branchStructure(RelationalCondition condition, T leftHandSide, Structure* structure) | |
1094 | { | |
1095 | #if USE(JSVALUE64) | |
1096 | return branch32(condition, leftHandSide, TrustedImm32(structure->id())); | |
1097 | #else | |
1098 | return branchPtr(condition, leftHandSide, TrustedImmPtr(structure)); | |
1099 | #endif | |
1100 | } | |
1101 | ||
1102 | template <typename T> | |
1103 | MacroAssembler::Jump branchStructure(MacroAssembler& jit, MacroAssembler::RelationalCondition condition, T leftHandSide, Structure* structure) | |
1104 | { | |
1105 | #if USE(JSVALUE64) | |
1106 | return jit.branch32(condition, leftHandSide, MacroAssembler::TrustedImm32(structure->id())); | |
1107 | #else | |
1108 | return jit.branchPtr(condition, leftHandSide, MacroAssembler::TrustedImmPtr(structure)); | |
1109 | #endif | |
1110 | } | |
1111 | ||
1112 | } // namespace JSC | |
1113 | ||
1114 | #endif // ENABLE(JIT) | |
1115 | ||
1116 | #endif // JITInlines_h | |
1117 |