]>
Commit | Line | Data |
---|---|---|
81345200 | 1 | /* |
ed1e77d3 | 2 | * Copyright (C) 2011, 2013-2015 Apple Inc. All rights reserved. |
81345200 A |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without | |
5 | * modification, are permitted provided that the following conditions | |
6 | * are met: | |
7 | * 1. Redistributions of source code must retain the above copyright | |
8 | * notice, this list of conditions and the following disclaimer. | |
9 | * 2. Redistributions in binary form must reproduce the above copyright | |
10 | * notice, this list of conditions and the following disclaimer in the | |
11 | * documentation and/or other materials provided with the distribution. | |
12 | * | |
13 | * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY | |
14 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | |
15 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
16 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR | |
17 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, | |
18 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | |
19 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR | |
20 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY | |
21 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
22 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
23 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
24 | */ | |
25 | ||
26 | #ifndef AssemblyHelpers_h | |
27 | #define AssemblyHelpers_h | |
28 | ||
29 | #if ENABLE(JIT) | |
30 | ||
31 | #include "CodeBlock.h" | |
32 | #include "FPRInfo.h" | |
33 | #include "GPRInfo.h" | |
34 | #include "JITCode.h" | |
35 | #include "MacroAssembler.h" | |
ed1e77d3 | 36 | #include "TypeofType.h" |
81345200 A |
37 | #include "VM.h" |
38 | ||
39 | namespace JSC { | |
40 | ||
41 | typedef void (*V_DebugOperation_EPP)(ExecState*, void*, void*); | |
42 | ||
43 | class AssemblyHelpers : public MacroAssembler { | |
44 | public: | |
45 | AssemblyHelpers(VM* vm, CodeBlock* codeBlock) | |
46 | : m_vm(vm) | |
47 | , m_codeBlock(codeBlock) | |
48 | , m_baselineCodeBlock(codeBlock ? codeBlock->baselineAlternative() : 0) | |
49 | { | |
50 | if (m_codeBlock) { | |
51 | ASSERT(m_baselineCodeBlock); | |
52 | ASSERT(!m_baselineCodeBlock->alternative()); | |
53 | ASSERT(m_baselineCodeBlock->jitType() == JITCode::None || JITCode::isBaselineCode(m_baselineCodeBlock->jitType())); | |
54 | } | |
55 | } | |
56 | ||
57 | CodeBlock* codeBlock() { return m_codeBlock; } | |
58 | VM* vm() { return m_vm; } | |
59 | AssemblerType_T& assembler() { return m_assembler; } | |
60 | ||
61 | void checkStackPointerAlignment() | |
62 | { | |
63 | // This check is both unneeded and harder to write correctly for ARM64 | |
64 | #if !defined(NDEBUG) && !CPU(ARM64) | |
65 | Jump stackPointerAligned = branchTestPtr(Zero, stackPointerRegister, TrustedImm32(0xf)); | |
66 | abortWithReason(AHStackPointerMisaligned); | |
67 | stackPointerAligned.link(this); | |
68 | #endif | |
69 | } | |
70 | ||
71 | template<typename T> | |
72 | void storeCell(T cell, Address address) | |
73 | { | |
74 | #if USE(JSVALUE64) | |
75 | store64(cell, address); | |
76 | #else | |
77 | store32(cell, address.withOffset(PayloadOffset)); | |
78 | store32(TrustedImm32(JSValue::CellTag), address.withOffset(TagOffset)); | |
79 | #endif | |
80 | } | |
81 | ||
82 | void storeValue(JSValueRegs regs, Address address) | |
83 | { | |
84 | #if USE(JSVALUE64) | |
85 | store64(regs.gpr(), address); | |
86 | #else | |
87 | store32(regs.payloadGPR(), address.withOffset(PayloadOffset)); | |
88 | store32(regs.tagGPR(), address.withOffset(TagOffset)); | |
89 | #endif | |
90 | } | |
91 | ||
ed1e77d3 A |
92 | void storeValue(JSValueRegs regs, BaseIndex address) |
93 | { | |
94 | #if USE(JSVALUE64) | |
95 | store64(regs.gpr(), address); | |
96 | #else | |
97 | store32(regs.payloadGPR(), address.withOffset(PayloadOffset)); | |
98 | store32(regs.tagGPR(), address.withOffset(TagOffset)); | |
99 | #endif | |
100 | } | |
101 | ||
102 | void storeValue(JSValueRegs regs, void* address) | |
103 | { | |
104 | #if USE(JSVALUE64) | |
105 | store64(regs.gpr(), address); | |
106 | #else | |
107 | store32(regs.payloadGPR(), bitwise_cast<void*>(bitwise_cast<uintptr_t>(address) + PayloadOffset)); | |
108 | store32(regs.tagGPR(), bitwise_cast<void*>(bitwise_cast<uintptr_t>(address) + TagOffset)); | |
109 | #endif | |
110 | } | |
111 | ||
112 | void loadValue(Address address, JSValueRegs regs) | |
113 | { | |
114 | #if USE(JSVALUE64) | |
115 | load64(address, regs.gpr()); | |
116 | #else | |
117 | if (address.base == regs.payloadGPR()) { | |
118 | load32(address.withOffset(TagOffset), regs.tagGPR()); | |
119 | load32(address.withOffset(PayloadOffset), regs.payloadGPR()); | |
120 | } else { | |
121 | load32(address.withOffset(PayloadOffset), regs.payloadGPR()); | |
122 | load32(address.withOffset(TagOffset), regs.tagGPR()); | |
123 | } | |
124 | #endif | |
125 | } | |
126 | ||
127 | void loadValue(BaseIndex address, JSValueRegs regs) | |
128 | { | |
129 | #if USE(JSVALUE64) | |
130 | load64(address, regs.gpr()); | |
131 | #else | |
132 | if (address.base == regs.payloadGPR() || address.index == regs.payloadGPR()) { | |
133 | // We actually could handle the case where the registers are aliased to both | |
134 | // tag and payload, but we don't for now. | |
135 | RELEASE_ASSERT(address.base != regs.tagGPR()); | |
136 | RELEASE_ASSERT(address.index != regs.tagGPR()); | |
137 | ||
138 | load32(address.withOffset(TagOffset), regs.tagGPR()); | |
139 | load32(address.withOffset(PayloadOffset), regs.payloadGPR()); | |
140 | } else { | |
141 | load32(address.withOffset(PayloadOffset), regs.payloadGPR()); | |
142 | load32(address.withOffset(TagOffset), regs.tagGPR()); | |
143 | } | |
144 | #endif | |
145 | } | |
146 | ||
81345200 A |
147 | void moveTrustedValue(JSValue value, JSValueRegs regs) |
148 | { | |
149 | #if USE(JSVALUE64) | |
150 | move(TrustedImm64(JSValue::encode(value)), regs.gpr()); | |
151 | #else | |
152 | move(TrustedImm32(value.tag()), regs.tagGPR()); | |
153 | move(TrustedImm32(value.payload()), regs.payloadGPR()); | |
154 | #endif | |
155 | } | |
ed1e77d3 A |
156 | |
157 | void storeTrustedValue(JSValue value, Address address) | |
158 | { | |
159 | #if USE(JSVALUE64) | |
160 | store64(TrustedImm64(JSValue::encode(value)), address); | |
161 | #else | |
162 | store32(TrustedImm32(value.tag()), address.withOffset(TagOffset)); | |
163 | store32(TrustedImm32(value.payload()), address.withOffset(PayloadOffset)); | |
164 | #endif | |
165 | } | |
166 | ||
167 | void storeTrustedValue(JSValue value, BaseIndex address) | |
168 | { | |
169 | #if USE(JSVALUE64) | |
170 | store64(TrustedImm64(JSValue::encode(value)), address); | |
171 | #else | |
172 | store32(TrustedImm32(value.tag()), address.withOffset(TagOffset)); | |
173 | store32(TrustedImm32(value.payload()), address.withOffset(PayloadOffset)); | |
174 | #endif | |
175 | } | |
81345200 A |
176 | |
177 | #if CPU(X86_64) || CPU(X86) | |
178 | static size_t prologueStackPointerDelta() | |
179 | { | |
180 | // Prologue only saves the framePointerRegister | |
181 | return sizeof(void*); | |
182 | } | |
183 | ||
184 | void emitFunctionPrologue() | |
185 | { | |
186 | push(framePointerRegister); | |
187 | move(stackPointerRegister, framePointerRegister); | |
188 | } | |
189 | ||
190 | void emitFunctionEpilogue() | |
191 | { | |
192 | move(framePointerRegister, stackPointerRegister); | |
193 | pop(framePointerRegister); | |
194 | } | |
195 | ||
196 | void preserveReturnAddressAfterCall(GPRReg reg) | |
197 | { | |
198 | pop(reg); | |
199 | } | |
200 | ||
201 | void restoreReturnAddressBeforeReturn(GPRReg reg) | |
202 | { | |
203 | push(reg); | |
204 | } | |
205 | ||
206 | void restoreReturnAddressBeforeReturn(Address address) | |
207 | { | |
208 | push(address); | |
209 | } | |
210 | #endif // CPU(X86_64) || CPU(X86) | |
211 | ||
212 | #if CPU(ARM) || CPU(ARM64) | |
213 | static size_t prologueStackPointerDelta() | |
214 | { | |
215 | // Prologue saves the framePointerRegister and linkRegister | |
216 | return 2 * sizeof(void*); | |
217 | } | |
218 | ||
219 | void emitFunctionPrologue() | |
220 | { | |
221 | pushPair(framePointerRegister, linkRegister); | |
222 | move(stackPointerRegister, framePointerRegister); | |
223 | } | |
224 | ||
225 | void emitFunctionEpilogue() | |
226 | { | |
227 | move(framePointerRegister, stackPointerRegister); | |
228 | popPair(framePointerRegister, linkRegister); | |
229 | } | |
230 | ||
231 | ALWAYS_INLINE void preserveReturnAddressAfterCall(RegisterID reg) | |
232 | { | |
233 | move(linkRegister, reg); | |
234 | } | |
235 | ||
236 | ALWAYS_INLINE void restoreReturnAddressBeforeReturn(RegisterID reg) | |
237 | { | |
238 | move(reg, linkRegister); | |
239 | } | |
240 | ||
241 | ALWAYS_INLINE void restoreReturnAddressBeforeReturn(Address address) | |
242 | { | |
243 | loadPtr(address, linkRegister); | |
244 | } | |
245 | #endif | |
246 | ||
247 | #if CPU(MIPS) | |
248 | static size_t prologueStackPointerDelta() | |
249 | { | |
250 | // Prologue saves the framePointerRegister and returnAddressRegister | |
251 | return 2 * sizeof(void*); | |
252 | } | |
253 | ||
254 | ALWAYS_INLINE void preserveReturnAddressAfterCall(RegisterID reg) | |
255 | { | |
256 | move(returnAddressRegister, reg); | |
257 | } | |
258 | ||
259 | ALWAYS_INLINE void restoreReturnAddressBeforeReturn(RegisterID reg) | |
260 | { | |
261 | move(reg, returnAddressRegister); | |
262 | } | |
263 | ||
264 | ALWAYS_INLINE void restoreReturnAddressBeforeReturn(Address address) | |
265 | { | |
266 | loadPtr(address, returnAddressRegister); | |
267 | } | |
268 | #endif | |
269 | ||
270 | #if CPU(SH4) | |
271 | static size_t prologueStackPointerDelta() | |
272 | { | |
273 | // Prologue saves the framePointerRegister and link register | |
274 | return 2 * sizeof(void*); | |
275 | } | |
276 | ||
ed1e77d3 A |
277 | void emitFunctionPrologue() |
278 | { | |
279 | push(linkRegister); | |
280 | push(framePointerRegister); | |
281 | move(stackPointerRegister, framePointerRegister); | |
282 | } | |
283 | ||
284 | void emitFunctionEpilogue() | |
285 | { | |
286 | move(framePointerRegister, stackPointerRegister); | |
287 | pop(framePointerRegister); | |
288 | pop(linkRegister); | |
289 | } | |
290 | ||
81345200 A |
291 | ALWAYS_INLINE void preserveReturnAddressAfterCall(RegisterID reg) |
292 | { | |
293 | m_assembler.stspr(reg); | |
294 | } | |
295 | ||
296 | ALWAYS_INLINE void restoreReturnAddressBeforeReturn(RegisterID reg) | |
297 | { | |
298 | m_assembler.ldspr(reg); | |
299 | } | |
300 | ||
301 | ALWAYS_INLINE void restoreReturnAddressBeforeReturn(Address address) | |
302 | { | |
303 | loadPtrLinkReg(address); | |
304 | } | |
305 | #endif | |
306 | ||
ed1e77d3 | 307 | void emitGetFromCallFrameHeaderPtr(JSStack::CallFrameHeaderEntry entry, GPRReg to, GPRReg from = GPRInfo::callFrameRegister) |
81345200 | 308 | { |
ed1e77d3 | 309 | loadPtr(Address(from, entry * sizeof(Register)), to); |
81345200 | 310 | } |
ed1e77d3 A |
311 | void emitGetFromCallFrameHeader32(JSStack::CallFrameHeaderEntry entry, GPRReg to, GPRReg from = GPRInfo::callFrameRegister) |
312 | { | |
313 | load32(Address(from, entry * sizeof(Register)), to); | |
314 | } | |
315 | #if USE(JSVALUE64) | |
316 | void emitGetFromCallFrameHeader64(JSStack::CallFrameHeaderEntry entry, GPRReg to, GPRReg from = GPRInfo::callFrameRegister) | |
317 | { | |
318 | load64(Address(from, entry * sizeof(Register)), to); | |
319 | } | |
320 | #endif // USE(JSVALUE64) | |
81345200 A |
321 | void emitPutToCallFrameHeader(GPRReg from, JSStack::CallFrameHeaderEntry entry) |
322 | { | |
323 | storePtr(from, Address(GPRInfo::callFrameRegister, entry * sizeof(Register))); | |
324 | } | |
325 | ||
326 | void emitPutImmediateToCallFrameHeader(void* value, JSStack::CallFrameHeaderEntry entry) | |
327 | { | |
328 | storePtr(TrustedImmPtr(value), Address(GPRInfo::callFrameRegister, entry * sizeof(Register))); | |
329 | } | |
330 | ||
331 | void emitGetCallerFrameFromCallFrameHeaderPtr(RegisterID to) | |
332 | { | |
333 | loadPtr(Address(GPRInfo::callFrameRegister, CallFrame::callerFrameOffset()), to); | |
334 | } | |
335 | void emitPutCallerFrameToCallFrameHeader(RegisterID from) | |
336 | { | |
337 | storePtr(from, Address(GPRInfo::callFrameRegister, CallFrame::callerFrameOffset())); | |
338 | } | |
339 | ||
340 | void emitPutReturnPCToCallFrameHeader(RegisterID from) | |
341 | { | |
342 | storePtr(from, Address(GPRInfo::callFrameRegister, CallFrame::returnPCOffset())); | |
343 | } | |
344 | void emitPutReturnPCToCallFrameHeader(TrustedImmPtr from) | |
345 | { | |
346 | storePtr(from, Address(GPRInfo::callFrameRegister, CallFrame::returnPCOffset())); | |
347 | } | |
348 | ||
349 | // emitPutToCallFrameHeaderBeforePrologue() and related are used to access callee frame header | |
350 | // fields before the code from emitFunctionPrologue() has executed. | |
351 | // First, the access is via the stack pointer. Second, the address calculation must also take | |
352 | // into account that the stack pointer may not have been adjusted down for the return PC and/or | |
353 | // caller's frame pointer. On some platforms, the callee is responsible for pushing the | |
354 | // "link register" containing the return address in the function prologue. | |
355 | #if USE(JSVALUE64) | |
356 | void emitPutToCallFrameHeaderBeforePrologue(GPRReg from, JSStack::CallFrameHeaderEntry entry) | |
357 | { | |
358 | storePtr(from, Address(stackPointerRegister, entry * static_cast<ptrdiff_t>(sizeof(Register)) - prologueStackPointerDelta())); | |
359 | } | |
360 | #else | |
361 | void emitPutPayloadToCallFrameHeaderBeforePrologue(GPRReg from, JSStack::CallFrameHeaderEntry entry) | |
362 | { | |
363 | storePtr(from, Address(stackPointerRegister, entry * static_cast<ptrdiff_t>(sizeof(Register)) - prologueStackPointerDelta() + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload))); | |
364 | } | |
365 | ||
366 | void emitPutTagToCallFrameHeaderBeforePrologue(TrustedImm32 tag, JSStack::CallFrameHeaderEntry entry) | |
367 | { | |
368 | storePtr(tag, Address(stackPointerRegister, entry * static_cast<ptrdiff_t>(sizeof(Register)) - prologueStackPointerDelta() + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag))); | |
369 | } | |
370 | #endif | |
ed1e77d3 A |
371 | |
372 | JumpList branchIfNotEqual(JSValueRegs regs, JSValue value) | |
373 | { | |
374 | #if USE(JSVALUE64) | |
375 | return branch64(NotEqual, regs.gpr(), TrustedImm64(JSValue::encode(value))); | |
376 | #else | |
377 | JumpList result; | |
378 | result.append(branch32(NotEqual, regs.tagGPR(), TrustedImm32(value.tag()))); | |
379 | if (value.isEmpty() || value.isUndefinedOrNull()) | |
380 | return result; // These don't have anything interesting in the payload. | |
381 | result.append(branch32(NotEqual, regs.payloadGPR(), TrustedImm32(value.payload()))); | |
382 | return result; | |
383 | #endif | |
384 | } | |
385 | ||
386 | Jump branchIfEqual(JSValueRegs regs, JSValue value) | |
387 | { | |
388 | #if USE(JSVALUE64) | |
389 | return branch64(Equal, regs.gpr(), TrustedImm64(JSValue::encode(value))); | |
390 | #else | |
391 | Jump notEqual; | |
392 | // These don't have anything interesting in the payload. | |
393 | if (!value.isEmpty() && !value.isUndefinedOrNull()) | |
394 | notEqual = branch32(NotEqual, regs.payloadGPR(), TrustedImm32(value.payload())); | |
395 | Jump result = branch32(Equal, regs.tagGPR(), TrustedImm32(value.tag())); | |
396 | if (notEqual.isSet()) | |
397 | notEqual.link(this); | |
398 | return result; | |
399 | #endif | |
400 | } | |
81345200 A |
401 | |
402 | Jump branchIfNotCell(GPRReg reg) | |
403 | { | |
404 | #if USE(JSVALUE64) | |
405 | return branchTest64(MacroAssembler::NonZero, reg, GPRInfo::tagMaskRegister); | |
406 | #else | |
407 | return branch32(MacroAssembler::NotEqual, reg, TrustedImm32(JSValue::CellTag)); | |
408 | #endif | |
409 | } | |
ed1e77d3 A |
410 | Jump branchIfNotCell(JSValueRegs regs) |
411 | { | |
412 | #if USE(JSVALUE64) | |
413 | return branchIfNotCell(regs.gpr()); | |
414 | #else | |
415 | return branchIfNotCell(regs.tagGPR()); | |
416 | #endif | |
417 | } | |
418 | ||
419 | Jump branchIfCell(GPRReg reg) | |
420 | { | |
421 | #if USE(JSVALUE64) | |
422 | return branchTest64(MacroAssembler::Zero, reg, GPRInfo::tagMaskRegister); | |
423 | #else | |
424 | return branch32(MacroAssembler::Equal, reg, TrustedImm32(JSValue::CellTag)); | |
425 | #endif | |
426 | } | |
427 | Jump branchIfCell(JSValueRegs regs) | |
428 | { | |
429 | #if USE(JSVALUE64) | |
430 | return branchIfCell(regs.gpr()); | |
431 | #else | |
432 | return branchIfCell(regs.tagGPR()); | |
433 | #endif | |
434 | } | |
435 | ||
436 | Jump branchIfOther(JSValueRegs regs, GPRReg tempGPR) | |
437 | { | |
438 | #if USE(JSVALUE64) | |
439 | move(regs.gpr(), tempGPR); | |
440 | and64(TrustedImm32(~TagBitUndefined), tempGPR); | |
441 | return branch64(Equal, tempGPR, TrustedImm64(ValueNull)); | |
442 | #else | |
443 | or32(TrustedImm32(1), regs.tagGPR(), tempGPR); | |
444 | return branch32(Equal, tempGPR, TrustedImm32(JSValue::NullTag)); | |
445 | #endif | |
446 | } | |
447 | ||
448 | Jump branchIfNotOther(JSValueRegs regs, GPRReg tempGPR) | |
449 | { | |
450 | #if USE(JSVALUE64) | |
451 | move(regs.gpr(), tempGPR); | |
452 | and64(TrustedImm32(~TagBitUndefined), tempGPR); | |
453 | return branch64(NotEqual, tempGPR, TrustedImm64(ValueNull)); | |
454 | #else | |
455 | or32(TrustedImm32(1), regs.tagGPR(), tempGPR); | |
456 | return branch32(NotEqual, tempGPR, TrustedImm32(JSValue::NullTag)); | |
457 | #endif | |
458 | } | |
459 | ||
460 | Jump branchIfInt32(JSValueRegs regs) | |
461 | { | |
462 | #if USE(JSVALUE64) | |
463 | return branch64(AboveOrEqual, regs.gpr(), GPRInfo::tagTypeNumberRegister); | |
464 | #else | |
465 | return branch32(Equal, regs.tagGPR(), TrustedImm32(JSValue::Int32Tag)); | |
466 | #endif | |
467 | } | |
468 | ||
469 | Jump branchIfNotInt32(JSValueRegs regs) | |
470 | { | |
471 | #if USE(JSVALUE64) | |
472 | return branch64(Below, regs.gpr(), GPRInfo::tagTypeNumberRegister); | |
473 | #else | |
474 | return branch32(NotEqual, regs.tagGPR(), TrustedImm32(JSValue::Int32Tag)); | |
475 | #endif | |
476 | } | |
477 | ||
478 | // Note that the tempGPR is not used in 64-bit mode. | |
479 | Jump branchIfNumber(JSValueRegs regs, GPRReg tempGPR) | |
480 | { | |
481 | #if USE(JSVALUE64) | |
482 | UNUSED_PARAM(tempGPR); | |
483 | return branchTest64(NonZero, regs.gpr(), GPRInfo::tagTypeNumberRegister); | |
484 | #else | |
485 | add32(TrustedImm32(1), regs.tagGPR(), tempGPR); | |
486 | return branch32(Below, tempGPR, TrustedImm32(JSValue::LowestTag + 1)); | |
487 | #endif | |
488 | } | |
489 | ||
490 | // Note that the tempGPR is not used in 64-bit mode. | |
491 | Jump branchIfNotNumber(JSValueRegs regs, GPRReg tempGPR) | |
492 | { | |
493 | #if USE(JSVALUE64) | |
494 | UNUSED_PARAM(tempGPR); | |
495 | return branchTest64(Zero, regs.gpr(), GPRInfo::tagTypeNumberRegister); | |
496 | #else | |
497 | add32(TrustedImm32(1), regs.tagGPR(), tempGPR); | |
498 | return branch32(AboveOrEqual, tempGPR, TrustedImm32(JSValue::LowestTag + 1)); | |
499 | #endif | |
500 | } | |
501 | ||
502 | // Note that the tempGPR is not used in 32-bit mode. | |
503 | Jump branchIfBoolean(JSValueRegs regs, GPRReg tempGPR) | |
504 | { | |
505 | #if USE(JSVALUE64) | |
506 | move(regs.gpr(), tempGPR); | |
507 | xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), tempGPR); | |
508 | return branchTest64(Zero, tempGPR, TrustedImm32(static_cast<int32_t>(~1))); | |
509 | #else | |
510 | UNUSED_PARAM(tempGPR); | |
511 | return branch32(Equal, regs.tagGPR(), TrustedImm32(JSValue::BooleanTag)); | |
512 | #endif | |
513 | } | |
514 | ||
515 | // Note that the tempGPR is not used in 32-bit mode. | |
516 | Jump branchIfNotBoolean(JSValueRegs regs, GPRReg tempGPR) | |
517 | { | |
518 | #if USE(JSVALUE64) | |
519 | move(regs.gpr(), tempGPR); | |
520 | xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), tempGPR); | |
521 | return branchTest64(NonZero, tempGPR, TrustedImm32(static_cast<int32_t>(~1))); | |
522 | #else | |
523 | UNUSED_PARAM(tempGPR); | |
524 | return branch32(NotEqual, regs.tagGPR(), TrustedImm32(JSValue::BooleanTag)); | |
525 | #endif | |
526 | } | |
527 | ||
528 | Jump branchIfObject(GPRReg cellGPR) | |
529 | { | |
530 | return branch8( | |
531 | AboveOrEqual, Address(cellGPR, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType)); | |
532 | } | |
533 | ||
534 | Jump branchIfNotObject(GPRReg cellGPR) | |
535 | { | |
536 | return branch8( | |
537 | Below, Address(cellGPR, JSCell::typeInfoTypeOffset()), TrustedImm32(ObjectType)); | |
538 | } | |
539 | ||
540 | Jump branchIfType(GPRReg cellGPR, JSType type) | |
541 | { | |
542 | return branch8(Equal, Address(cellGPR, JSCell::typeInfoTypeOffset()), TrustedImm32(type)); | |
543 | } | |
544 | ||
545 | Jump branchIfNotType(GPRReg cellGPR, JSType type) | |
546 | { | |
547 | return branch8(NotEqual, Address(cellGPR, JSCell::typeInfoTypeOffset()), TrustedImm32(type)); | |
548 | } | |
549 | ||
550 | Jump branchIfString(GPRReg cellGPR) { return branchIfType(cellGPR, StringType); } | |
551 | Jump branchIfNotString(GPRReg cellGPR) { return branchIfNotType(cellGPR, StringType); } | |
552 | Jump branchIfSymbol(GPRReg cellGPR) { return branchIfType(cellGPR, SymbolType); } | |
553 | Jump branchIfNotSymbol(GPRReg cellGPR) { return branchIfNotType(cellGPR, SymbolType); } | |
554 | Jump branchIfFunction(GPRReg cellGPR) { return branchIfType(cellGPR, JSFunctionType); } | |
555 | Jump branchIfNotFunction(GPRReg cellGPR) { return branchIfNotType(cellGPR, JSFunctionType); } | |
556 | ||
557 | Jump branchIfEmpty(JSValueRegs regs) | |
558 | { | |
559 | #if USE(JSVALUE64) | |
560 | return branchTest64(Zero, regs.gpr()); | |
561 | #else | |
562 | return branch32(Equal, regs.tagGPR(), TrustedImm32(JSValue::EmptyValueTag)); | |
563 | #endif | |
564 | } | |
81345200 A |
565 | |
566 | static Address addressForByteOffset(ptrdiff_t byteOffset) | |
567 | { | |
568 | return Address(GPRInfo::callFrameRegister, byteOffset); | |
569 | } | |
570 | static Address addressFor(VirtualRegister virtualRegister, GPRReg baseReg) | |
571 | { | |
572 | ASSERT(virtualRegister.isValid()); | |
573 | return Address(baseReg, virtualRegister.offset() * sizeof(Register)); | |
574 | } | |
575 | static Address addressFor(VirtualRegister virtualRegister) | |
576 | { | |
ed1e77d3 A |
577 | // NB. It's tempting on some architectures to sometimes use an offset from the stack |
578 | // register because for some offsets that will encode to a smaller instruction. But we | |
579 | // cannot do this. We use this in places where the stack pointer has been moved to some | |
580 | // unpredictable location. | |
81345200 A |
581 | ASSERT(virtualRegister.isValid()); |
582 | return Address(GPRInfo::callFrameRegister, virtualRegister.offset() * sizeof(Register)); | |
583 | } | |
584 | static Address addressFor(int operand) | |
585 | { | |
586 | return addressFor(static_cast<VirtualRegister>(operand)); | |
587 | } | |
588 | ||
589 | static Address tagFor(VirtualRegister virtualRegister) | |
590 | { | |
591 | ASSERT(virtualRegister.isValid()); | |
592 | return Address(GPRInfo::callFrameRegister, virtualRegister.offset() * sizeof(Register) + TagOffset); | |
593 | } | |
594 | static Address tagFor(int operand) | |
595 | { | |
596 | return tagFor(static_cast<VirtualRegister>(operand)); | |
597 | } | |
598 | ||
599 | static Address payloadFor(VirtualRegister virtualRegister) | |
600 | { | |
601 | ASSERT(virtualRegister.isValid()); | |
602 | return Address(GPRInfo::callFrameRegister, virtualRegister.offset() * sizeof(Register) + PayloadOffset); | |
603 | } | |
604 | static Address payloadFor(int operand) | |
605 | { | |
606 | return payloadFor(static_cast<VirtualRegister>(operand)); | |
607 | } | |
608 | ||
ed1e77d3 A |
609 | // Access to our fixed callee CallFrame. |
610 | static Address calleeFrameSlot(int slot) | |
611 | { | |
612 | ASSERT(slot >= JSStack::CallerFrameAndPCSize); | |
613 | return Address(stackPointerRegister, sizeof(Register) * (slot - JSStack::CallerFrameAndPCSize)); | |
614 | } | |
615 | ||
616 | // Access to our fixed callee CallFrame. | |
617 | static Address calleeArgumentSlot(int argument) | |
618 | { | |
619 | return calleeFrameSlot(virtualRegisterForArgument(argument).offset()); | |
620 | } | |
621 | ||
622 | static Address calleeFrameTagSlot(int slot) | |
623 | { | |
624 | return calleeFrameSlot(slot).withOffset(TagOffset); | |
625 | } | |
626 | ||
627 | static Address calleeFramePayloadSlot(int slot) | |
81345200 | 628 | { |
ed1e77d3 | 629 | return calleeFrameSlot(slot).withOffset(PayloadOffset); |
81345200 A |
630 | } |
631 | ||
ed1e77d3 | 632 | static Address calleeArgumentTagSlot(int argument) |
81345200 | 633 | { |
ed1e77d3 A |
634 | return calleeArgumentSlot(argument).withOffset(TagOffset); |
635 | } | |
636 | ||
637 | static Address calleeArgumentPayloadSlot(int argument) | |
638 | { | |
639 | return calleeArgumentSlot(argument).withOffset(PayloadOffset); | |
640 | } | |
641 | ||
642 | static Address calleeFrameCallerFrame() | |
643 | { | |
644 | return calleeFrameSlot(0).withOffset(CallFrame::callerFrameOffset()); | |
645 | } | |
646 | ||
647 | static GPRReg selectScratchGPR(GPRReg preserve1 = InvalidGPRReg, GPRReg preserve2 = InvalidGPRReg, GPRReg preserve3 = InvalidGPRReg, GPRReg preserve4 = InvalidGPRReg, GPRReg preserve5 = InvalidGPRReg) | |
648 | { | |
649 | if (preserve1 != GPRInfo::regT0 && preserve2 != GPRInfo::regT0 && preserve3 != GPRInfo::regT0 && preserve4 != GPRInfo::regT0 && preserve5 != GPRInfo::regT0) | |
81345200 A |
650 | return GPRInfo::regT0; |
651 | ||
ed1e77d3 | 652 | if (preserve1 != GPRInfo::regT1 && preserve2 != GPRInfo::regT1 && preserve3 != GPRInfo::regT1 && preserve4 != GPRInfo::regT1 && preserve5 != GPRInfo::regT1) |
81345200 A |
653 | return GPRInfo::regT1; |
654 | ||
ed1e77d3 | 655 | if (preserve1 != GPRInfo::regT2 && preserve2 != GPRInfo::regT2 && preserve3 != GPRInfo::regT2 && preserve4 != GPRInfo::regT2 && preserve5 != GPRInfo::regT2) |
81345200 A |
656 | return GPRInfo::regT2; |
657 | ||
ed1e77d3 | 658 | if (preserve1 != GPRInfo::regT3 && preserve2 != GPRInfo::regT3 && preserve3 != GPRInfo::regT3 && preserve4 != GPRInfo::regT3 && preserve5 != GPRInfo::regT3) |
81345200 A |
659 | return GPRInfo::regT3; |
660 | ||
ed1e77d3 A |
661 | if (preserve1 != GPRInfo::regT4 && preserve2 != GPRInfo::regT4 && preserve3 != GPRInfo::regT4 && preserve4 != GPRInfo::regT4 && preserve5 != GPRInfo::regT4) |
662 | return GPRInfo::regT4; | |
663 | ||
664 | return GPRInfo::regT5; | |
81345200 A |
665 | } |
666 | ||
667 | // Add a debug call. This call has no effect on JIT code execution state. | |
668 | void debugCall(V_DebugOperation_EPP function, void* argument) | |
669 | { | |
670 | size_t scratchSize = sizeof(EncodedJSValue) * (GPRInfo::numberOfRegisters + FPRInfo::numberOfRegisters); | |
671 | ScratchBuffer* scratchBuffer = m_vm->scratchBufferForSize(scratchSize); | |
672 | EncodedJSValue* buffer = static_cast<EncodedJSValue*>(scratchBuffer->dataBuffer()); | |
673 | ||
674 | for (unsigned i = 0; i < GPRInfo::numberOfRegisters; ++i) { | |
675 | #if USE(JSVALUE64) | |
676 | store64(GPRInfo::toRegister(i), buffer + i); | |
677 | #else | |
678 | store32(GPRInfo::toRegister(i), buffer + i); | |
679 | #endif | |
680 | } | |
681 | ||
682 | for (unsigned i = 0; i < FPRInfo::numberOfRegisters; ++i) { | |
683 | move(TrustedImmPtr(buffer + GPRInfo::numberOfRegisters + i), GPRInfo::regT0); | |
684 | storeDouble(FPRInfo::toRegister(i), GPRInfo::regT0); | |
685 | } | |
686 | ||
687 | // Tell GC mark phase how much of the scratch buffer is active during call. | |
688 | move(TrustedImmPtr(scratchBuffer->activeLengthPtr()), GPRInfo::regT0); | |
689 | storePtr(TrustedImmPtr(scratchSize), GPRInfo::regT0); | |
690 | ||
691 | #if CPU(X86_64) || CPU(ARM) || CPU(ARM64) || CPU(MIPS) || CPU(SH4) | |
692 | move(TrustedImmPtr(buffer), GPRInfo::argumentGPR2); | |
693 | move(TrustedImmPtr(argument), GPRInfo::argumentGPR1); | |
694 | move(GPRInfo::callFrameRegister, GPRInfo::argumentGPR0); | |
695 | GPRReg scratch = selectScratchGPR(GPRInfo::argumentGPR0, GPRInfo::argumentGPR1, GPRInfo::argumentGPR2); | |
696 | #elif CPU(X86) | |
697 | poke(GPRInfo::callFrameRegister, 0); | |
698 | poke(TrustedImmPtr(argument), 1); | |
699 | poke(TrustedImmPtr(buffer), 2); | |
700 | GPRReg scratch = GPRInfo::regT0; | |
701 | #else | |
702 | #error "JIT not supported on this platform." | |
703 | #endif | |
704 | move(TrustedImmPtr(reinterpret_cast<void*>(function)), scratch); | |
705 | call(scratch); | |
706 | ||
707 | move(TrustedImmPtr(scratchBuffer->activeLengthPtr()), GPRInfo::regT0); | |
708 | storePtr(TrustedImmPtr(0), GPRInfo::regT0); | |
709 | ||
710 | for (unsigned i = 0; i < FPRInfo::numberOfRegisters; ++i) { | |
711 | move(TrustedImmPtr(buffer + GPRInfo::numberOfRegisters + i), GPRInfo::regT0); | |
712 | loadDouble(GPRInfo::regT0, FPRInfo::toRegister(i)); | |
713 | } | |
714 | for (unsigned i = 0; i < GPRInfo::numberOfRegisters; ++i) { | |
715 | #if USE(JSVALUE64) | |
716 | load64(buffer + i, GPRInfo::toRegister(i)); | |
717 | #else | |
718 | load32(buffer + i, GPRInfo::toRegister(i)); | |
719 | #endif | |
720 | } | |
721 | } | |
722 | ||
723 | // These methods JIT generate dynamic, debug-only checks - akin to ASSERTs. | |
724 | #if !ASSERT_DISABLED | |
725 | void jitAssertIsInt32(GPRReg); | |
726 | void jitAssertIsJSInt32(GPRReg); | |
727 | void jitAssertIsJSNumber(GPRReg); | |
728 | void jitAssertIsJSDouble(GPRReg); | |
729 | void jitAssertIsCell(GPRReg); | |
730 | void jitAssertHasValidCallFrame(); | |
731 | void jitAssertIsNull(GPRReg); | |
732 | void jitAssertTagsInPlace(); | |
733 | void jitAssertArgumentCountSane(); | |
734 | #else | |
735 | void jitAssertIsInt32(GPRReg) { } | |
736 | void jitAssertIsJSInt32(GPRReg) { } | |
737 | void jitAssertIsJSNumber(GPRReg) { } | |
738 | void jitAssertIsJSDouble(GPRReg) { } | |
739 | void jitAssertIsCell(GPRReg) { } | |
740 | void jitAssertHasValidCallFrame() { } | |
741 | void jitAssertIsNull(GPRReg) { } | |
742 | void jitAssertTagsInPlace() { } | |
743 | void jitAssertArgumentCountSane() { } | |
744 | #endif | |
745 | ||
746 | void purifyNaN(FPRReg); | |
747 | ||
748 | // These methods convert between doubles, and doubles boxed and JSValues. | |
749 | #if USE(JSVALUE64) | |
750 | GPRReg boxDouble(FPRReg fpr, GPRReg gpr) | |
751 | { | |
752 | moveDoubleTo64(fpr, gpr); | |
753 | sub64(GPRInfo::tagTypeNumberRegister, gpr); | |
754 | jitAssertIsJSDouble(gpr); | |
755 | return gpr; | |
756 | } | |
ed1e77d3 | 757 | FPRReg unboxDoubleWithoutAssertions(GPRReg gpr, FPRReg fpr) |
81345200 | 758 | { |
81345200 A |
759 | add64(GPRInfo::tagTypeNumberRegister, gpr); |
760 | move64ToDouble(gpr, fpr); | |
761 | return fpr; | |
762 | } | |
ed1e77d3 A |
763 | FPRReg unboxDouble(GPRReg gpr, FPRReg fpr) |
764 | { | |
765 | jitAssertIsJSDouble(gpr); | |
766 | return unboxDoubleWithoutAssertions(gpr, fpr); | |
767 | } | |
81345200 A |
768 | |
769 | void boxDouble(FPRReg fpr, JSValueRegs regs) | |
770 | { | |
771 | boxDouble(fpr, regs.gpr()); | |
772 | } | |
773 | ||
774 | // Here are possible arrangements of source, target, scratch: | |
775 | // - source, target, scratch can all be separate registers. | |
776 | // - source and target can be the same but scratch is separate. | |
777 | // - target and scratch can be the same but source is separate. | |
778 | void boxInt52(GPRReg source, GPRReg target, GPRReg scratch, FPRReg fpScratch) | |
779 | { | |
780 | // Is it an int32? | |
781 | signExtend32ToPtr(source, scratch); | |
782 | Jump isInt32 = branch64(Equal, source, scratch); | |
783 | ||
784 | // Nope, it's not, but regT0 contains the int64 value. | |
785 | convertInt64ToDouble(source, fpScratch); | |
786 | boxDouble(fpScratch, target); | |
787 | Jump done = jump(); | |
788 | ||
789 | isInt32.link(this); | |
790 | zeroExtend32ToPtr(source, target); | |
791 | or64(GPRInfo::tagTypeNumberRegister, target); | |
792 | ||
793 | done.link(this); | |
794 | } | |
795 | #endif | |
796 | ||
797 | #if USE(JSVALUE32_64) | |
798 | void boxDouble(FPRReg fpr, GPRReg tagGPR, GPRReg payloadGPR) | |
799 | { | |
800 | moveDoubleToInts(fpr, payloadGPR, tagGPR); | |
801 | } | |
802 | void unboxDouble(GPRReg tagGPR, GPRReg payloadGPR, FPRReg fpr, FPRReg scratchFPR) | |
803 | { | |
804 | moveIntsToDouble(payloadGPR, tagGPR, fpr, scratchFPR); | |
805 | } | |
806 | ||
807 | void boxDouble(FPRReg fpr, JSValueRegs regs) | |
808 | { | |
809 | boxDouble(fpr, regs.tagGPR(), regs.payloadGPR()); | |
810 | } | |
811 | #endif | |
812 | ||
ed1e77d3 A |
813 | void boxBooleanPayload(GPRReg boolGPR, GPRReg payloadGPR) |
814 | { | |
815 | #if USE(JSVALUE64) | |
816 | add32(TrustedImm32(ValueFalse), boolGPR, payloadGPR); | |
817 | #else | |
818 | move(boolGPR, payloadGPR); | |
819 | #endif | |
820 | } | |
821 | ||
822 | void boxBoolean(GPRReg boolGPR, JSValueRegs boxedRegs) | |
823 | { | |
824 | boxBooleanPayload(boolGPR, boxedRegs.payloadGPR()); | |
825 | #if USE(JSVALUE32_64) | |
826 | move(TrustedImm32(JSValue::BooleanTag), boxedRegs.tagGPR()); | |
827 | #endif | |
828 | } | |
829 | ||
81345200 A |
830 | void callExceptionFuzz(); |
831 | ||
832 | enum ExceptionCheckKind { NormalExceptionCheck, InvertedExceptionCheck }; | |
ed1e77d3 A |
833 | enum ExceptionJumpWidth { NormalJumpWidth, FarJumpWidth }; |
834 | Jump emitExceptionCheck( | |
835 | ExceptionCheckKind = NormalExceptionCheck, ExceptionJumpWidth = NormalJumpWidth); | |
81345200 A |
836 | |
837 | #if ENABLE(SAMPLING_COUNTERS) | |
838 | static void emitCount(MacroAssembler& jit, AbstractSamplingCounter& counter, int32_t increment = 1) | |
839 | { | |
840 | jit.add64(TrustedImm32(increment), AbsoluteAddress(counter.addressOfCounter())); | |
841 | } | |
842 | void emitCount(AbstractSamplingCounter& counter, int32_t increment = 1) | |
843 | { | |
844 | add64(TrustedImm32(increment), AbsoluteAddress(counter.addressOfCounter())); | |
845 | } | |
846 | #endif | |
847 | ||
848 | #if ENABLE(SAMPLING_FLAGS) | |
849 | void setSamplingFlag(int32_t); | |
850 | void clearSamplingFlag(int32_t flag); | |
851 | #endif | |
852 | ||
853 | JSGlobalObject* globalObjectFor(CodeOrigin codeOrigin) | |
854 | { | |
855 | return codeBlock()->globalObjectFor(codeOrigin); | |
856 | } | |
857 | ||
858 | bool isStrictModeFor(CodeOrigin codeOrigin) | |
859 | { | |
860 | if (!codeOrigin.inlineCallFrame) | |
861 | return codeBlock()->isStrictMode(); | |
862 | return jsCast<FunctionExecutable*>(codeOrigin.inlineCallFrame->executable.get())->isStrictMode(); | |
863 | } | |
864 | ||
865 | ECMAMode ecmaModeFor(CodeOrigin codeOrigin) | |
866 | { | |
867 | return isStrictModeFor(codeOrigin) ? StrictMode : NotStrictMode; | |
868 | } | |
869 | ||
870 | ExecutableBase* executableFor(const CodeOrigin& codeOrigin); | |
871 | ||
872 | CodeBlock* baselineCodeBlockFor(const CodeOrigin& codeOrigin) | |
873 | { | |
874 | return baselineCodeBlockForOriginAndBaselineCodeBlock(codeOrigin, baselineCodeBlock()); | |
875 | } | |
876 | ||
877 | CodeBlock* baselineCodeBlockFor(InlineCallFrame* inlineCallFrame) | |
878 | { | |
879 | if (!inlineCallFrame) | |
880 | return baselineCodeBlock(); | |
881 | return baselineCodeBlockForInlineCallFrame(inlineCallFrame); | |
882 | } | |
883 | ||
884 | CodeBlock* baselineCodeBlock() | |
885 | { | |
886 | return m_baselineCodeBlock; | |
887 | } | |
888 | ||
81345200 A |
889 | SymbolTable* symbolTableFor(const CodeOrigin& codeOrigin) |
890 | { | |
891 | return baselineCodeBlockFor(codeOrigin)->symbolTable(); | |
892 | } | |
893 | ||
ed1e77d3 | 894 | static VirtualRegister argumentsStart(InlineCallFrame* inlineCallFrame) |
81345200 A |
895 | { |
896 | if (!inlineCallFrame) | |
ed1e77d3 | 897 | return VirtualRegister(CallFrame::argumentOffset(0)); |
81345200 | 898 | if (inlineCallFrame->arguments.size() <= 1) |
ed1e77d3 | 899 | return virtualRegisterForLocal(0); |
81345200 A |
900 | ValueRecovery recovery = inlineCallFrame->arguments[1]; |
901 | RELEASE_ASSERT(recovery.technique() == DisplacedInJSStack); | |
ed1e77d3 | 902 | return recovery.virtualRegister(); |
81345200 A |
903 | } |
904 | ||
ed1e77d3 | 905 | static VirtualRegister argumentsStart(const CodeOrigin& codeOrigin) |
81345200 | 906 | { |
ed1e77d3 | 907 | return argumentsStart(codeOrigin.inlineCallFrame); |
81345200 | 908 | } |
ed1e77d3 | 909 | |
81345200 A |
910 | void emitLoadStructure(RegisterID source, RegisterID dest, RegisterID scratch) |
911 | { | |
912 | #if USE(JSVALUE64) | |
913 | load32(MacroAssembler::Address(source, JSCell::structureIDOffset()), dest); | |
914 | loadPtr(vm()->heap.structureIDTable().base(), scratch); | |
915 | loadPtr(MacroAssembler::BaseIndex(scratch, dest, MacroAssembler::TimesEight), dest); | |
916 | #else | |
917 | UNUSED_PARAM(scratch); | |
918 | loadPtr(MacroAssembler::Address(source, JSCell::structureIDOffset()), dest); | |
919 | #endif | |
920 | } | |
921 | ||
922 | static void emitLoadStructure(AssemblyHelpers& jit, RegisterID base, RegisterID dest, RegisterID scratch) | |
923 | { | |
924 | #if USE(JSVALUE64) | |
925 | jit.load32(MacroAssembler::Address(base, JSCell::structureIDOffset()), dest); | |
926 | jit.loadPtr(jit.vm()->heap.structureIDTable().base(), scratch); | |
927 | jit.loadPtr(MacroAssembler::BaseIndex(scratch, dest, MacroAssembler::TimesEight), dest); | |
928 | #else | |
929 | UNUSED_PARAM(scratch); | |
930 | jit.loadPtr(MacroAssembler::Address(base, JSCell::structureIDOffset()), dest); | |
931 | #endif | |
932 | } | |
933 | ||
934 | void emitStoreStructureWithTypeInfo(TrustedImmPtr structure, RegisterID dest, RegisterID) | |
935 | { | |
936 | emitStoreStructureWithTypeInfo(*this, structure, dest); | |
937 | } | |
938 | ||
939 | void emitStoreStructureWithTypeInfo(RegisterID structure, RegisterID dest, RegisterID scratch) | |
940 | { | |
941 | #if USE(JSVALUE64) | |
942 | load64(MacroAssembler::Address(structure, Structure::structureIDOffset()), scratch); | |
943 | store64(scratch, MacroAssembler::Address(dest, JSCell::structureIDOffset())); | |
944 | #else | |
945 | // Store all the info flags using a single 32-bit wide load and store. | |
946 | load32(MacroAssembler::Address(structure, Structure::indexingTypeOffset()), scratch); | |
947 | store32(scratch, MacroAssembler::Address(dest, JSCell::indexingTypeOffset())); | |
948 | ||
949 | // Store the StructureID | |
950 | storePtr(structure, MacroAssembler::Address(dest, JSCell::structureIDOffset())); | |
951 | #endif | |
952 | } | |
953 | ||
954 | static void emitStoreStructureWithTypeInfo(AssemblyHelpers& jit, TrustedImmPtr structure, RegisterID dest); | |
955 | ||
ed1e77d3 | 956 | Jump jumpIfIsRememberedOrInEden(GPRReg cell) |
81345200 A |
957 | { |
958 | return branchTest8(MacroAssembler::NonZero, MacroAssembler::Address(cell, JSCell::gcDataOffset())); | |
959 | } | |
960 | ||
ed1e77d3 | 961 | Jump jumpIfIsRememberedOrInEden(JSCell* cell) |
81345200 A |
962 | { |
963 | uint8_t* address = reinterpret_cast<uint8_t*>(cell) + JSCell::gcDataOffset(); | |
964 | return branchTest8(MacroAssembler::NonZero, MacroAssembler::AbsoluteAddress(address)); | |
965 | } | |
ed1e77d3 A |
966 | |
967 | // Emits the branch structure for typeof. The code emitted by this doesn't fall through. The | |
968 | // functor is called at those points where we have pinpointed a type. One way to use this is to | |
969 | // have the functor emit the code to put the type string into an appropriate register and then | |
970 | // jump out. A secondary functor is used for the call trap and masquerades-as-undefined slow | |
971 | // case. It is passed the unlinked jump to the slow case. | |
972 | template<typename Functor, typename SlowPathFunctor> | |
973 | void emitTypeOf( | |
974 | JSValueRegs regs, GPRReg tempGPR, const Functor& functor, | |
975 | const SlowPathFunctor& slowPathFunctor) | |
976 | { | |
977 | // Implements the following branching structure: | |
978 | // | |
979 | // if (is cell) { | |
980 | // if (is object) { | |
981 | // if (is function) { | |
982 | // return function; | |
983 | // } else if (doesn't have call trap and doesn't masquerade as undefined) { | |
984 | // return object | |
985 | // } else { | |
986 | // return slowPath(); | |
987 | // } | |
988 | // } else if (is string) { | |
989 | // return string | |
990 | // } else { | |
991 | // return symbol | |
992 | // } | |
993 | // } else if (is number) { | |
994 | // return number | |
995 | // } else if (is null) { | |
996 | // return object | |
997 | // } else if (is boolean) { | |
998 | // return boolean | |
999 | // } else { | |
1000 | // return undefined | |
1001 | // } | |
1002 | ||
1003 | Jump notCell = branchIfNotCell(regs); | |
1004 | ||
1005 | GPRReg cellGPR = regs.payloadGPR(); | |
1006 | Jump notObject = branchIfNotObject(cellGPR); | |
1007 | ||
1008 | Jump notFunction = branchIfNotFunction(cellGPR); | |
1009 | functor(TypeofType::Function, false); | |
1010 | ||
1011 | notFunction.link(this); | |
1012 | slowPathFunctor( | |
1013 | branchTest8( | |
1014 | NonZero, | |
1015 | Address(cellGPR, JSCell::typeInfoFlagsOffset()), | |
1016 | TrustedImm32(MasqueradesAsUndefined | TypeOfShouldCallGetCallData))); | |
1017 | functor(TypeofType::Object, false); | |
1018 | ||
1019 | notObject.link(this); | |
1020 | ||
1021 | Jump notString = branchIfNotString(cellGPR); | |
1022 | functor(TypeofType::String, false); | |
1023 | notString.link(this); | |
1024 | functor(TypeofType::Symbol, false); | |
1025 | ||
1026 | notCell.link(this); | |
1027 | ||
1028 | Jump notNumber = branchIfNotNumber(regs, tempGPR); | |
1029 | functor(TypeofType::Number, false); | |
1030 | notNumber.link(this); | |
1031 | ||
1032 | JumpList notNull = branchIfNotEqual(regs, jsNull()); | |
1033 | functor(TypeofType::Object, false); | |
1034 | notNull.link(this); | |
1035 | ||
1036 | Jump notBoolean = branchIfNotBoolean(regs, tempGPR); | |
1037 | functor(TypeofType::Boolean, false); | |
1038 | notBoolean.link(this); | |
1039 | ||
1040 | functor(TypeofType::Undefined, true); | |
1041 | } | |
81345200 A |
1042 | |
1043 | Vector<BytecodeAndMachineOffset>& decodedCodeMapFor(CodeBlock*); | |
1044 | ||
1045 | protected: | |
1046 | VM* m_vm; | |
1047 | CodeBlock* m_codeBlock; | |
1048 | CodeBlock* m_baselineCodeBlock; | |
1049 | ||
1050 | HashMap<CodeBlock*, Vector<BytecodeAndMachineOffset>> m_decodedCodeMaps; | |
1051 | }; | |
1052 | ||
1053 | } // namespace JSC | |
1054 | ||
1055 | #endif // ENABLE(JIT) | |
1056 | ||
1057 | #endif // AssemblyHelpers_h | |
1058 |