2 * Copyright (C) 2008, 2012, 2014, 2015 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 #ifndef MacroAssemblerX86_64_h
27 #define MacroAssemblerX86_64_h
29 #if ENABLE(ASSEMBLER) && CPU(X86_64)
31 #include "MacroAssemblerX86Common.h"
33 #define REPTACH_OFFSET_CALL_R11 3
35 inline bool CAN_SIGN_EXTEND_32_64(int64_t value
) { return value
== (int64_t)(int32_t)value
; }
39 class MacroAssemblerX86_64
: public MacroAssemblerX86Common
{
41 static const Scale ScalePtr
= TimesEight
;
43 using MacroAssemblerX86Common::add32
;
44 using MacroAssemblerX86Common::and32
;
45 using MacroAssemblerX86Common::branchAdd32
;
46 using MacroAssemblerX86Common::or32
;
47 using MacroAssemblerX86Common::sub32
;
48 using MacroAssemblerX86Common::load8
;
49 using MacroAssemblerX86Common::load32
;
50 using MacroAssemblerX86Common::store32
;
51 using MacroAssemblerX86Common::store8
;
52 using MacroAssemblerX86Common::call
;
53 using MacroAssemblerX86Common::jump
;
54 using MacroAssemblerX86Common::addDouble
;
55 using MacroAssemblerX86Common::loadDouble
;
56 using MacroAssemblerX86Common::convertInt32ToDouble
;
58 void add32(TrustedImm32 imm
, AbsoluteAddress address
)
60 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
61 add32(imm
, Address(scratchRegister
));
64 void and32(TrustedImm32 imm
, AbsoluteAddress address
)
66 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
67 and32(imm
, Address(scratchRegister
));
70 void add32(AbsoluteAddress address
, RegisterID dest
)
72 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
73 add32(Address(scratchRegister
), dest
);
76 void or32(TrustedImm32 imm
, AbsoluteAddress address
)
78 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
79 or32(imm
, Address(scratchRegister
));
82 void or32(RegisterID reg
, AbsoluteAddress address
)
84 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
85 or32(reg
, Address(scratchRegister
));
88 void sub32(TrustedImm32 imm
, AbsoluteAddress address
)
90 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
91 sub32(imm
, Address(scratchRegister
));
94 void load8(const void* address
, RegisterID dest
)
96 move(TrustedImmPtr(address
), dest
);
100 void load32(const void* address
, RegisterID dest
)
102 if (dest
== X86Registers::eax
)
103 m_assembler
.movl_mEAX(address
);
105 move(TrustedImmPtr(address
), dest
);
110 void addDouble(AbsoluteAddress address
, FPRegisterID dest
)
112 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
113 m_assembler
.addsd_mr(0, scratchRegister
, dest
);
116 void convertInt32ToDouble(TrustedImm32 imm
, FPRegisterID dest
)
118 move(imm
, scratchRegister
);
119 m_assembler
.cvtsi2sd_rr(scratchRegister
, dest
);
122 void store32(TrustedImm32 imm
, void* address
)
124 move(TrustedImmPtr(address
), scratchRegister
);
125 store32(imm
, scratchRegister
);
128 void store32(RegisterID source
, void* address
)
130 if (source
== X86Registers::eax
)
131 m_assembler
.movl_EAXm(address
);
133 move(TrustedImmPtr(address
), scratchRegister
);
134 store32(source
, scratchRegister
);
138 void store8(TrustedImm32 imm
, void* address
)
140 move(TrustedImmPtr(address
), scratchRegister
);
141 store8(imm
, Address(scratchRegister
));
144 void store8(RegisterID reg
, void* address
)
146 move(TrustedImmPtr(address
), scratchRegister
);
147 store8(reg
, Address(scratchRegister
));
151 Call
callWithSlowPathReturnType()
153 // On Win64, when the return type is larger than 8 bytes, we need to allocate space on the stack for the return value.
154 // On entry, rcx should contain a pointer to this stack space. The other parameters are shifted to the right,
155 // rdx should contain the first argument, r8 should contain the second argument, and r9 should contain the third argument.
156 // On return, rax contains a pointer to this stack value. See http://msdn.microsoft.com/en-us/library/7572ztz4.aspx.
157 // We then need to copy the 16 byte return value into rax and rdx, since JIT expects the return value to be split between the two.
158 // It is assumed that the parameters are already shifted to the right, when entering this method.
159 // Note: this implementation supports up to 3 parameters.
161 // JIT relies on the CallerFrame (frame pointer) being put on the stack,
162 // On Win64 we need to manually copy the frame pointer to the stack, since MSVC may not maintain a frame pointer on 64-bit.
163 // See http://msdn.microsoft.com/en-us/library/9z1stfyw.aspx where it's stated that rbp MAY be used as a frame pointer.
164 store64(X86Registers::ebp
, Address(X86Registers::esp
, -16));
166 // We also need to allocate the shadow space on the stack for the 4 parameter registers.
167 // In addition, we need to allocate 16 bytes for the return value.
168 // Also, we should allocate 16 bytes for the frame pointer, and return address (not populated).
169 sub64(TrustedImm32(8 * sizeof(int64_t)), X86Registers::esp
);
171 // The first parameter register should contain a pointer to the stack allocated space for the return value.
172 move(X86Registers::esp
, X86Registers::ecx
);
173 add64(TrustedImm32(4 * sizeof(int64_t)), X86Registers::ecx
);
175 DataLabelPtr label
= moveWithPatch(TrustedImmPtr(0), scratchRegister
);
176 Call result
= Call(m_assembler
.call(scratchRegister
), Call::Linkable
);
178 add64(TrustedImm32(8 * sizeof(int64_t)), X86Registers::esp
);
180 // Copy the return value into rax and rdx.
181 load64(Address(X86Registers::eax
, sizeof(int64_t)), X86Registers::edx
);
182 load64(Address(X86Registers::eax
), X86Registers::eax
);
184 ASSERT_UNUSED(label
, differenceBetween(label
, result
) == REPTACH_OFFSET_CALL_R11
);
192 // JIT relies on the CallerFrame (frame pointer) being put on the stack,
193 // On Win64 we need to manually copy the frame pointer to the stack, since MSVC may not maintain a frame pointer on 64-bit.
194 // See http://msdn.microsoft.com/en-us/library/9z1stfyw.aspx where it's stated that rbp MAY be used as a frame pointer.
195 store64(X86Registers::ebp
, Address(X86Registers::esp
, -16));
197 // On Windows we need to copy the arguments that don't fit in registers to the stack location where the callee expects to find them.
198 // We don't know the number of arguments at this point, so the arguments (5, 6, ...) should always be copied.
201 load64(Address(X86Registers::esp
, 4 * sizeof(int64_t)), scratchRegister
);
202 store64(scratchRegister
, Address(X86Registers::esp
, -4 * static_cast<int32_t>(sizeof(int64_t))));
205 load64(Address(X86Registers::esp
, 5 * sizeof(int64_t)), scratchRegister
);
206 store64(scratchRegister
, Address(X86Registers::esp
, -3 * static_cast<int32_t>(sizeof(int64_t))));
208 // We also need to allocate the shadow space on the stack for the 4 parameter registers.
209 // Also, we should allocate 16 bytes for the frame pointer, and return address (not populated).
210 // In addition, we need to allocate 16 bytes for two more parameters, since the call can have up to 6 parameters.
211 sub64(TrustedImm32(8 * sizeof(int64_t)), X86Registers::esp
);
213 DataLabelPtr label
= moveWithPatch(TrustedImmPtr(0), scratchRegister
);
214 Call result
= Call(m_assembler
.call(scratchRegister
), Call::Linkable
);
216 add64(TrustedImm32(8 * sizeof(int64_t)), X86Registers::esp
);
218 ASSERT_UNUSED(label
, differenceBetween(label
, result
) == REPTACH_OFFSET_CALL_R11
);
222 // Address is a memory location containing the address to jump to
223 void jump(AbsoluteAddress address
)
225 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
226 jump(Address(scratchRegister
));
229 Call
tailRecursiveCall()
231 DataLabelPtr label
= moveWithPatch(TrustedImmPtr(0), scratchRegister
);
232 Jump newJump
= Jump(m_assembler
.jmp_r(scratchRegister
));
233 ASSERT_UNUSED(label
, differenceBetween(label
, newJump
) == REPTACH_OFFSET_CALL_R11
);
234 return Call::fromTailJump(newJump
);
237 Call
makeTailRecursiveCall(Jump oldJump
)
240 DataLabelPtr label
= moveWithPatch(TrustedImmPtr(0), scratchRegister
);
241 Jump newJump
= Jump(m_assembler
.jmp_r(scratchRegister
));
242 ASSERT_UNUSED(label
, differenceBetween(label
, newJump
) == REPTACH_OFFSET_CALL_R11
);
243 return Call::fromTailJump(newJump
);
246 Jump
branchAdd32(ResultCondition cond
, TrustedImm32 src
, AbsoluteAddress dest
)
248 move(TrustedImmPtr(dest
.m_ptr
), scratchRegister
);
249 add32(src
, Address(scratchRegister
));
250 return Jump(m_assembler
.jCC(x86Condition(cond
)));
253 void add64(RegisterID src
, RegisterID dest
)
255 m_assembler
.addq_rr(src
, dest
);
258 void add64(Address src
, RegisterID dest
)
260 m_assembler
.addq_mr(src
.offset
, src
.base
, dest
);
263 void add64(AbsoluteAddress src
, RegisterID dest
)
265 move(TrustedImmPtr(src
.m_ptr
), scratchRegister
);
266 add64(Address(scratchRegister
), dest
);
269 void add64(TrustedImm32 imm
, RegisterID srcDest
)
271 if (imm
.m_value
== 1)
272 m_assembler
.incq_r(srcDest
);
274 m_assembler
.addq_ir(imm
.m_value
, srcDest
);
277 void add64(TrustedImm64 imm
, RegisterID dest
)
279 if (imm
.m_value
== 1)
280 m_assembler
.incq_r(dest
);
282 move(imm
, scratchRegister
);
283 add64(scratchRegister
, dest
);
287 void add64(TrustedImm32 imm
, RegisterID src
, RegisterID dest
)
289 m_assembler
.leaq_mr(imm
.m_value
, src
, dest
);
292 void add64(TrustedImm32 imm
, Address address
)
294 if (imm
.m_value
== 1)
295 m_assembler
.incq_m(address
.offset
, address
.base
);
297 m_assembler
.addq_im(imm
.m_value
, address
.offset
, address
.base
);
300 void add64(TrustedImm32 imm
, AbsoluteAddress address
)
302 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
303 add64(imm
, Address(scratchRegister
));
306 void addPtrNoFlags(TrustedImm32 imm
, RegisterID srcDest
)
308 m_assembler
.leaq_mr(imm
.m_value
, srcDest
, srcDest
);
311 void and64(RegisterID src
, RegisterID dest
)
313 m_assembler
.andq_rr(src
, dest
);
316 void and64(TrustedImm32 imm
, RegisterID srcDest
)
318 m_assembler
.andq_ir(imm
.m_value
, srcDest
);
321 void and64(TrustedImmPtr imm
, RegisterID srcDest
)
323 move(imm
, scratchRegister
);
324 and64(scratchRegister
, srcDest
);
327 void lshift64(TrustedImm32 imm
, RegisterID dest
)
329 m_assembler
.shlq_i8r(imm
.m_value
, dest
);
332 void rshift64(TrustedImm32 imm
, RegisterID dest
)
334 m_assembler
.sarq_i8r(imm
.m_value
, dest
);
337 void urshift64(TrustedImm32 imm
, RegisterID dest
)
339 m_assembler
.shrq_i8r(imm
.m_value
, dest
);
342 void mul64(RegisterID src
, RegisterID dest
)
344 m_assembler
.imulq_rr(src
, dest
);
347 void neg64(RegisterID dest
)
349 m_assembler
.negq_r(dest
);
352 void or64(RegisterID src
, RegisterID dest
)
354 m_assembler
.orq_rr(src
, dest
);
357 void or64(TrustedImm64 imm
, RegisterID dest
)
359 move(imm
, scratchRegister
);
360 or64(scratchRegister
, dest
);
363 void or64(TrustedImm32 imm
, RegisterID dest
)
365 m_assembler
.orq_ir(imm
.m_value
, dest
);
368 void or64(RegisterID op1
, RegisterID op2
, RegisterID dest
)
372 else if (op1
== dest
)
380 void or64(TrustedImm32 imm
, RegisterID src
, RegisterID dest
)
386 void rotateRight64(TrustedImm32 imm
, RegisterID srcDst
)
388 m_assembler
.rorq_i8r(imm
.m_value
, srcDst
);
391 void sub64(RegisterID src
, RegisterID dest
)
393 m_assembler
.subq_rr(src
, dest
);
396 void sub64(TrustedImm32 imm
, RegisterID dest
)
398 if (imm
.m_value
== 1)
399 m_assembler
.decq_r(dest
);
401 m_assembler
.subq_ir(imm
.m_value
, dest
);
404 void sub64(TrustedImm64 imm
, RegisterID dest
)
406 if (imm
.m_value
== 1)
407 m_assembler
.decq_r(dest
);
409 move(imm
, scratchRegister
);
410 sub64(scratchRegister
, dest
);
414 void xor64(RegisterID src
, RegisterID dest
)
416 m_assembler
.xorq_rr(src
, dest
);
419 void xor64(RegisterID src
, Address dest
)
421 m_assembler
.xorq_rm(src
, dest
.offset
, dest
.base
);
424 void xor64(TrustedImm32 imm
, RegisterID srcDest
)
426 m_assembler
.xorq_ir(imm
.m_value
, srcDest
);
429 void load64(ImplicitAddress address
, RegisterID dest
)
431 m_assembler
.movq_mr(address
.offset
, address
.base
, dest
);
434 void load64(BaseIndex address
, RegisterID dest
)
436 m_assembler
.movq_mr(address
.offset
, address
.base
, address
.index
, address
.scale
, dest
);
439 void load64(const void* address
, RegisterID dest
)
441 if (dest
== X86Registers::eax
)
442 m_assembler
.movq_mEAX(address
);
444 move(TrustedImmPtr(address
), dest
);
449 DataLabel32
load64WithAddressOffsetPatch(Address address
, RegisterID dest
)
452 m_assembler
.movq_mr_disp32(address
.offset
, address
.base
, dest
);
453 return DataLabel32(this);
456 DataLabelCompact
load64WithCompactAddressOffsetPatch(Address address
, RegisterID dest
)
459 m_assembler
.movq_mr_disp8(address
.offset
, address
.base
, dest
);
460 return DataLabelCompact(this);
463 void store64(RegisterID src
, ImplicitAddress address
)
465 m_assembler
.movq_rm(src
, address
.offset
, address
.base
);
468 void store64(RegisterID src
, BaseIndex address
)
470 m_assembler
.movq_rm(src
, address
.offset
, address
.base
, address
.index
, address
.scale
);
473 void store64(RegisterID src
, void* address
)
475 if (src
== X86Registers::eax
)
476 m_assembler
.movq_EAXm(address
);
478 move(TrustedImmPtr(address
), scratchRegister
);
479 store64(src
, scratchRegister
);
483 void store64(TrustedImm64 imm
, ImplicitAddress address
)
485 if (CAN_SIGN_EXTEND_32_64(imm
.m_value
))
486 m_assembler
.movq_i32m(static_cast<int>(imm
.m_value
), address
.offset
, address
.base
);
488 move(imm
, scratchRegister
);
489 store64(scratchRegister
, address
);
493 void store64(TrustedImm64 imm
, BaseIndex address
)
495 move(imm
, scratchRegister
);
496 m_assembler
.movq_rm(scratchRegister
, address
.offset
, address
.base
, address
.index
, address
.scale
);
499 DataLabel32
store64WithAddressOffsetPatch(RegisterID src
, Address address
)
502 m_assembler
.movq_rm_disp32(src
, address
.offset
, address
.base
);
503 return DataLabel32(this);
506 void move64ToDouble(RegisterID src
, FPRegisterID dest
)
508 m_assembler
.movq_rr(src
, dest
);
511 void moveDoubleTo64(FPRegisterID src
, RegisterID dest
)
513 m_assembler
.movq_rr(src
, dest
);
516 void compare64(RelationalCondition cond
, RegisterID left
, TrustedImm32 right
, RegisterID dest
)
518 if (((cond
== Equal
) || (cond
== NotEqual
)) && !right
.m_value
)
519 m_assembler
.testq_rr(left
, left
);
521 m_assembler
.cmpq_ir(right
.m_value
, left
);
522 m_assembler
.setCC_r(x86Condition(cond
), dest
);
523 m_assembler
.movzbl_rr(dest
, dest
);
526 void compare64(RelationalCondition cond
, RegisterID left
, RegisterID right
, RegisterID dest
)
528 m_assembler
.cmpq_rr(right
, left
);
529 m_assembler
.setCC_r(x86Condition(cond
), dest
);
530 m_assembler
.movzbl_rr(dest
, dest
);
533 Jump
branch64(RelationalCondition cond
, RegisterID left
, RegisterID right
)
535 m_assembler
.cmpq_rr(right
, left
);
536 return Jump(m_assembler
.jCC(x86Condition(cond
)));
539 Jump
branch64(RelationalCondition cond
, RegisterID left
, TrustedImm64 right
)
541 if (((cond
== Equal
) || (cond
== NotEqual
)) && !right
.m_value
) {
542 m_assembler
.testq_rr(left
, left
);
543 return Jump(m_assembler
.jCC(x86Condition(cond
)));
545 move(right
, scratchRegister
);
546 return branch64(cond
, left
, scratchRegister
);
549 Jump
branch64(RelationalCondition cond
, RegisterID left
, Address right
)
551 m_assembler
.cmpq_mr(right
.offset
, right
.base
, left
);
552 return Jump(m_assembler
.jCC(x86Condition(cond
)));
555 Jump
branch64(RelationalCondition cond
, AbsoluteAddress left
, RegisterID right
)
557 move(TrustedImmPtr(left
.m_ptr
), scratchRegister
);
558 return branch64(cond
, Address(scratchRegister
), right
);
561 Jump
branch64(RelationalCondition cond
, Address left
, RegisterID right
)
563 m_assembler
.cmpq_rm(right
, left
.offset
, left
.base
);
564 return Jump(m_assembler
.jCC(x86Condition(cond
)));
567 Jump
branch64(RelationalCondition cond
, Address left
, TrustedImm64 right
)
569 move(right
, scratchRegister
);
570 return branch64(cond
, left
, scratchRegister
);
573 Jump
branch64(RelationalCondition cond
, BaseIndex address
, RegisterID right
)
575 m_assembler
.cmpq_rm(right
, address
.offset
, address
.base
, address
.index
, address
.scale
);
576 return Jump(m_assembler
.jCC(x86Condition(cond
)));
579 Jump
branchPtr(RelationalCondition cond
, BaseIndex left
, RegisterID right
)
581 return branch64(cond
, left
, right
);
584 Jump
branchPtr(RelationalCondition cond
, BaseIndex left
, TrustedImmPtr right
)
586 move(right
, scratchRegister
);
587 return branchPtr(cond
, left
, scratchRegister
);
590 Jump
branchTest64(ResultCondition cond
, RegisterID reg
, RegisterID mask
)
592 m_assembler
.testq_rr(reg
, mask
);
593 return Jump(m_assembler
.jCC(x86Condition(cond
)));
596 Jump
branchTest64(ResultCondition cond
, RegisterID reg
, TrustedImm32 mask
= TrustedImm32(-1))
598 // if we are only interested in the low seven bits, this can be tested with a testb
599 if (mask
.m_value
== -1)
600 m_assembler
.testq_rr(reg
, reg
);
601 else if ((mask
.m_value
& ~0x7f) == 0)
602 m_assembler
.testb_i8r(mask
.m_value
, reg
);
604 m_assembler
.testq_i32r(mask
.m_value
, reg
);
605 return Jump(m_assembler
.jCC(x86Condition(cond
)));
608 void test64(ResultCondition cond
, RegisterID reg
, TrustedImm32 mask
, RegisterID dest
)
610 if (mask
.m_value
== -1)
611 m_assembler
.testq_rr(reg
, reg
);
612 else if ((mask
.m_value
& ~0x7f) == 0)
613 m_assembler
.testb_i8r(mask
.m_value
, reg
);
615 m_assembler
.testq_i32r(mask
.m_value
, reg
);
616 set32(x86Condition(cond
), dest
);
619 void test64(ResultCondition cond
, RegisterID reg
, RegisterID mask
, RegisterID dest
)
621 m_assembler
.testq_rr(reg
, mask
);
622 set32(x86Condition(cond
), dest
);
625 Jump
branchTest64(ResultCondition cond
, AbsoluteAddress address
, TrustedImm32 mask
= TrustedImm32(-1))
627 load64(address
.m_ptr
, scratchRegister
);
628 return branchTest64(cond
, scratchRegister
, mask
);
631 Jump
branchTest64(ResultCondition cond
, Address address
, TrustedImm32 mask
= TrustedImm32(-1))
633 if (mask
.m_value
== -1)
634 m_assembler
.cmpq_im(0, address
.offset
, address
.base
);
636 m_assembler
.testq_i32m(mask
.m_value
, address
.offset
, address
.base
);
637 return Jump(m_assembler
.jCC(x86Condition(cond
)));
640 Jump
branchTest64(ResultCondition cond
, Address address
, RegisterID reg
)
642 m_assembler
.testq_rm(reg
, address
.offset
, address
.base
);
643 return Jump(m_assembler
.jCC(x86Condition(cond
)));
646 Jump
branchTest64(ResultCondition cond
, BaseIndex address
, TrustedImm32 mask
= TrustedImm32(-1))
648 if (mask
.m_value
== -1)
649 m_assembler
.cmpq_im(0, address
.offset
, address
.base
, address
.index
, address
.scale
);
651 m_assembler
.testq_i32m(mask
.m_value
, address
.offset
, address
.base
, address
.index
, address
.scale
);
652 return Jump(m_assembler
.jCC(x86Condition(cond
)));
656 Jump
branchAdd64(ResultCondition cond
, TrustedImm32 imm
, RegisterID dest
)
659 return Jump(m_assembler
.jCC(x86Condition(cond
)));
662 Jump
branchAdd64(ResultCondition cond
, RegisterID src
, RegisterID dest
)
665 return Jump(m_assembler
.jCC(x86Condition(cond
)));
668 Jump
branchMul64(ResultCondition cond
, RegisterID src
, RegisterID dest
)
671 if (cond
!= Overflow
)
672 m_assembler
.testq_rr(dest
, dest
);
673 return Jump(m_assembler
.jCC(x86Condition(cond
)));
676 Jump
branchSub64(ResultCondition cond
, TrustedImm32 imm
, RegisterID dest
)
679 return Jump(m_assembler
.jCC(x86Condition(cond
)));
682 Jump
branchSub64(ResultCondition cond
, RegisterID src
, RegisterID dest
)
685 return Jump(m_assembler
.jCC(x86Condition(cond
)));
688 Jump
branchSub64(ResultCondition cond
, RegisterID src1
, TrustedImm32 src2
, RegisterID dest
)
691 return branchSub64(cond
, src2
, dest
);
694 Jump
branchNeg64(ResultCondition cond
, RegisterID srcDest
)
697 return Jump(m_assembler
.jCC(x86Condition(cond
)));
700 void abortWithReason(AbortReason reason
)
702 move(TrustedImm32(reason
), X86Registers::r11
);
706 void abortWithReason(AbortReason reason
, intptr_t misc
)
708 move(TrustedImm64(misc
), X86Registers::r10
);
709 abortWithReason(reason
);
712 ConvertibleLoadLabel
convertibleLoadPtr(Address address
, RegisterID dest
)
714 ConvertibleLoadLabel result
= ConvertibleLoadLabel(this);
715 m_assembler
.movq_mr(address
.offset
, address
.base
, dest
);
719 DataLabelPtr
moveWithPatch(TrustedImmPtr initialValue
, RegisterID dest
)
722 m_assembler
.movq_i64r(initialValue
.asIntptr(), dest
);
723 return DataLabelPtr(this);
726 DataLabelPtr
moveWithPatch(TrustedImm32 initialValue
, RegisterID dest
)
729 m_assembler
.movq_i64r(initialValue
.m_value
, dest
);
730 return DataLabelPtr(this);
733 Jump
branchPtrWithPatch(RelationalCondition cond
, RegisterID left
, DataLabelPtr
& dataLabel
, TrustedImmPtr initialRightValue
= TrustedImmPtr(0))
735 dataLabel
= moveWithPatch(initialRightValue
, scratchRegister
);
736 return branch64(cond
, left
, scratchRegister
);
739 Jump
branchPtrWithPatch(RelationalCondition cond
, Address left
, DataLabelPtr
& dataLabel
, TrustedImmPtr initialRightValue
= TrustedImmPtr(0))
741 dataLabel
= moveWithPatch(initialRightValue
, scratchRegister
);
742 return branch64(cond
, left
, scratchRegister
);
745 Jump
branch32WithPatch(RelationalCondition cond
, Address left
, DataLabel32
& dataLabel
, TrustedImm32 initialRightValue
= TrustedImm32(0))
748 m_assembler
.movl_i32r(initialRightValue
.m_value
, scratchRegister
);
749 dataLabel
= DataLabel32(this);
750 return branch32(cond
, left
, scratchRegister
);
753 DataLabelPtr
storePtrWithPatch(TrustedImmPtr initialValue
, ImplicitAddress address
)
755 DataLabelPtr label
= moveWithPatch(initialValue
, scratchRegister
);
756 store64(scratchRegister
, address
);
760 using MacroAssemblerX86Common::branch8
;
761 Jump
branch8(RelationalCondition cond
, AbsoluteAddress left
, TrustedImm32 right
)
763 MacroAssemblerX86Common::move(TrustedImmPtr(left
.m_ptr
), scratchRegister
);
764 return MacroAssemblerX86Common::branch8(cond
, Address(scratchRegister
), right
);
767 using MacroAssemblerX86Common::branchTest8
;
768 Jump
branchTest8(ResultCondition cond
, ExtendedAddress address
, TrustedImm32 mask
= TrustedImm32(-1))
770 TrustedImmPtr
addr(reinterpret_cast<void*>(address
.offset
));
771 MacroAssemblerX86Common::move(addr
, scratchRegister
);
772 return MacroAssemblerX86Common::branchTest8(cond
, BaseIndex(scratchRegister
, address
.base
, TimesOne
), mask
);
775 Jump
branchTest8(ResultCondition cond
, AbsoluteAddress address
, TrustedImm32 mask
= TrustedImm32(-1))
777 MacroAssemblerX86Common::move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
778 return MacroAssemblerX86Common::branchTest8(cond
, Address(scratchRegister
), mask
);
781 void convertInt64ToDouble(RegisterID src
, FPRegisterID dest
)
783 m_assembler
.cvtsi2sdq_rr(src
, dest
);
786 static bool supportsFloatingPoint() { return true; }
787 static bool supportsFloatingPointTruncate() { return true; }
788 static bool supportsFloatingPointSqrt() { return true; }
789 static bool supportsFloatingPointAbs() { return true; }
791 static FunctionPtr
readCallTarget(CodeLocationCall call
)
793 return FunctionPtr(X86Assembler::readPointer(call
.dataLabelPtrAtOffset(-REPTACH_OFFSET_CALL_R11
).dataLocation()));
796 static bool haveScratchRegisterForBlinding() { return true; }
797 static RegisterID
scratchRegisterForBlinding() { return scratchRegister
; }
799 static bool canJumpReplacePatchableBranchPtrWithPatch() { return true; }
800 static bool canJumpReplacePatchableBranch32WithPatch() { return true; }
802 static CodeLocationLabel
startOfBranchPtrWithPatchOnRegister(CodeLocationDataLabelPtr label
)
804 const int rexBytes
= 1;
805 const int opcodeBytes
= 1;
806 const int immediateBytes
= 8;
807 const int totalBytes
= rexBytes
+ opcodeBytes
+ immediateBytes
;
808 ASSERT(totalBytes
>= maxJumpReplacementSize());
809 return label
.labelAtOffset(-totalBytes
);
812 static CodeLocationLabel
startOfBranch32WithPatchOnRegister(CodeLocationDataLabel32 label
)
814 const int rexBytes
= 1;
815 const int opcodeBytes
= 1;
816 const int immediateBytes
= 4;
817 const int totalBytes
= rexBytes
+ opcodeBytes
+ immediateBytes
;
818 ASSERT(totalBytes
>= maxJumpReplacementSize());
819 return label
.labelAtOffset(-totalBytes
);
822 static CodeLocationLabel
startOfPatchableBranchPtrWithPatchOnAddress(CodeLocationDataLabelPtr label
)
824 return startOfBranchPtrWithPatchOnRegister(label
);
827 static CodeLocationLabel
startOfPatchableBranch32WithPatchOnAddress(CodeLocationDataLabel32 label
)
829 return startOfBranch32WithPatchOnRegister(label
);
832 static void revertJumpReplacementToPatchableBranchPtrWithPatch(CodeLocationLabel instructionStart
, Address
, void* initialValue
)
834 X86Assembler::revertJumpTo_movq_i64r(instructionStart
.executableAddress(), reinterpret_cast<intptr_t>(initialValue
), scratchRegister
);
837 static void revertJumpReplacementToPatchableBranch32WithPatch(CodeLocationLabel instructionStart
, Address
, int32_t initialValue
)
839 X86Assembler::revertJumpTo_movl_i32r(instructionStart
.executableAddress(), initialValue
, scratchRegister
);
842 static void revertJumpReplacementToBranchPtrWithPatch(CodeLocationLabel instructionStart
, RegisterID
, void* initialValue
)
844 X86Assembler::revertJumpTo_movq_i64r(instructionStart
.executableAddress(), reinterpret_cast<intptr_t>(initialValue
), scratchRegister
);
848 friend class LinkBuffer
;
849 friend class RepatchBuffer
;
851 static void linkCall(void* code
, Call call
, FunctionPtr function
)
853 if (!call
.isFlagSet(Call::Near
))
854 X86Assembler::linkPointer(code
, call
.m_label
.labelAtOffset(-REPTACH_OFFSET_CALL_R11
), function
.value());
856 X86Assembler::linkCall(code
, call
.m_label
, function
.value());
859 static void repatchCall(CodeLocationCall call
, CodeLocationLabel destination
)
861 X86Assembler::repatchPointer(call
.dataLabelPtrAtOffset(-REPTACH_OFFSET_CALL_R11
).dataLocation(), destination
.executableAddress());
864 static void repatchCall(CodeLocationCall call
, FunctionPtr destination
)
866 X86Assembler::repatchPointer(call
.dataLabelPtrAtOffset(-REPTACH_OFFSET_CALL_R11
).dataLocation(), destination
.executableAddress());
872 #endif // ENABLE(ASSEMBLER)
874 #endif // MacroAssemblerX86_64_h