2 * Copyright (C) 2008, 2012 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 #ifndef MacroAssemblerX86_64_h
27 #define MacroAssemblerX86_64_h
29 #if ENABLE(ASSEMBLER) && CPU(X86_64)
31 #include "MacroAssemblerX86Common.h"
33 #define REPTACH_OFFSET_CALL_R11 3
37 class MacroAssemblerX86_64
: public MacroAssemblerX86Common
{
39 static const Scale ScalePtr
= TimesEight
;
41 using MacroAssemblerX86Common::add32
;
42 using MacroAssemblerX86Common::and32
;
43 using MacroAssemblerX86Common::branchAdd32
;
44 using MacroAssemblerX86Common::or32
;
45 using MacroAssemblerX86Common::sub32
;
46 using MacroAssemblerX86Common::load32
;
47 using MacroAssemblerX86Common::store32
;
48 using MacroAssemblerX86Common::store8
;
49 using MacroAssemblerX86Common::call
;
50 using MacroAssemblerX86Common::jump
;
51 using MacroAssemblerX86Common::addDouble
;
52 using MacroAssemblerX86Common::loadDouble
;
53 using MacroAssemblerX86Common::convertInt32ToDouble
;
55 void add32(TrustedImm32 imm
, AbsoluteAddress address
)
57 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
58 add32(imm
, Address(scratchRegister
));
61 void and32(TrustedImm32 imm
, AbsoluteAddress address
)
63 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
64 and32(imm
, Address(scratchRegister
));
67 void add32(AbsoluteAddress address
, RegisterID dest
)
69 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
70 add32(Address(scratchRegister
), dest
);
73 void or32(TrustedImm32 imm
, AbsoluteAddress address
)
75 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
76 or32(imm
, Address(scratchRegister
));
79 void or32(RegisterID reg
, AbsoluteAddress address
)
81 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
82 or32(reg
, Address(scratchRegister
));
85 void sub32(TrustedImm32 imm
, AbsoluteAddress address
)
87 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
88 sub32(imm
, Address(scratchRegister
));
91 void load32(const void* address
, RegisterID dest
)
93 if (dest
== X86Registers::eax
)
94 m_assembler
.movl_mEAX(address
);
96 move(TrustedImmPtr(address
), dest
);
101 void addDouble(AbsoluteAddress address
, FPRegisterID dest
)
103 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
104 m_assembler
.addsd_mr(0, scratchRegister
, dest
);
107 void convertInt32ToDouble(TrustedImm32 imm
, FPRegisterID dest
)
109 move(imm
, scratchRegister
);
110 m_assembler
.cvtsi2sd_rr(scratchRegister
, dest
);
113 void store32(TrustedImm32 imm
, void* address
)
115 move(TrustedImmPtr(address
), scratchRegister
);
116 store32(imm
, scratchRegister
);
119 void store8(TrustedImm32 imm
, void* address
)
121 move(TrustedImmPtr(address
), scratchRegister
);
122 store8(imm
, Address(scratchRegister
));
127 DataLabelPtr label
= moveWithPatch(TrustedImmPtr(0), scratchRegister
);
128 Call result
= Call(m_assembler
.call(scratchRegister
), Call::Linkable
);
129 ASSERT_UNUSED(label
, differenceBetween(label
, result
) == REPTACH_OFFSET_CALL_R11
);
133 // Address is a memory location containing the address to jump to
134 void jump(AbsoluteAddress address
)
136 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
137 jump(Address(scratchRegister
));
140 Call
tailRecursiveCall()
142 DataLabelPtr label
= moveWithPatch(TrustedImmPtr(0), scratchRegister
);
143 Jump newJump
= Jump(m_assembler
.jmp_r(scratchRegister
));
144 ASSERT_UNUSED(label
, differenceBetween(label
, newJump
) == REPTACH_OFFSET_CALL_R11
);
145 return Call::fromTailJump(newJump
);
148 Call
makeTailRecursiveCall(Jump oldJump
)
151 DataLabelPtr label
= moveWithPatch(TrustedImmPtr(0), scratchRegister
);
152 Jump newJump
= Jump(m_assembler
.jmp_r(scratchRegister
));
153 ASSERT_UNUSED(label
, differenceBetween(label
, newJump
) == REPTACH_OFFSET_CALL_R11
);
154 return Call::fromTailJump(newJump
);
157 Jump
branchAdd32(ResultCondition cond
, TrustedImm32 src
, AbsoluteAddress dest
)
159 move(TrustedImmPtr(dest
.m_ptr
), scratchRegister
);
160 add32(src
, Address(scratchRegister
));
161 return Jump(m_assembler
.jCC(x86Condition(cond
)));
164 void add64(RegisterID src
, RegisterID dest
)
166 m_assembler
.addq_rr(src
, dest
);
169 void add64(Address src
, RegisterID dest
)
171 m_assembler
.addq_mr(src
.offset
, src
.base
, dest
);
174 void add64(AbsoluteAddress src
, RegisterID dest
)
176 move(TrustedImmPtr(src
.m_ptr
), scratchRegister
);
177 add64(Address(scratchRegister
), dest
);
180 void add64(TrustedImm32 imm
, RegisterID srcDest
)
182 m_assembler
.addq_ir(imm
.m_value
, srcDest
);
185 void add64(TrustedImm64 imm
, RegisterID dest
)
187 move(imm
, scratchRegister
);
188 add64(scratchRegister
, dest
);
191 void add64(TrustedImm32 imm
, RegisterID src
, RegisterID dest
)
193 m_assembler
.leaq_mr(imm
.m_value
, src
, dest
);
196 void add64(TrustedImm32 imm
, Address address
)
198 m_assembler
.addq_im(imm
.m_value
, address
.offset
, address
.base
);
201 void add64(TrustedImm32 imm
, AbsoluteAddress address
)
203 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
204 add64(imm
, Address(scratchRegister
));
207 void and64(RegisterID src
, RegisterID dest
)
209 m_assembler
.andq_rr(src
, dest
);
212 void and64(TrustedImm32 imm
, RegisterID srcDest
)
214 m_assembler
.andq_ir(imm
.m_value
, srcDest
);
217 void neg64(RegisterID dest
)
219 m_assembler
.negq_r(dest
);
222 void or64(RegisterID src
, RegisterID dest
)
224 m_assembler
.orq_rr(src
, dest
);
227 void or64(TrustedImm64 imm
, RegisterID dest
)
229 move(imm
, scratchRegister
);
230 or64(scratchRegister
, dest
);
233 void or64(TrustedImm32 imm
, RegisterID dest
)
235 m_assembler
.orq_ir(imm
.m_value
, dest
);
238 void or64(RegisterID op1
, RegisterID op2
, RegisterID dest
)
242 else if (op1
== dest
)
250 void or64(TrustedImm32 imm
, RegisterID src
, RegisterID dest
)
256 void rotateRight64(TrustedImm32 imm
, RegisterID srcDst
)
258 m_assembler
.rorq_i8r(imm
.m_value
, srcDst
);
261 void sub64(RegisterID src
, RegisterID dest
)
263 m_assembler
.subq_rr(src
, dest
);
266 void sub64(TrustedImm32 imm
, RegisterID dest
)
268 m_assembler
.subq_ir(imm
.m_value
, dest
);
271 void sub64(TrustedImm64 imm
, RegisterID dest
)
273 move(imm
, scratchRegister
);
274 sub64(scratchRegister
, dest
);
277 void xor64(RegisterID src
, RegisterID dest
)
279 m_assembler
.xorq_rr(src
, dest
);
282 void xor64(RegisterID src
, Address dest
)
284 m_assembler
.xorq_rm(src
, dest
.offset
, dest
.base
);
287 void xor64(TrustedImm32 imm
, RegisterID srcDest
)
289 m_assembler
.xorq_ir(imm
.m_value
, srcDest
);
292 void load64(ImplicitAddress address
, RegisterID dest
)
294 m_assembler
.movq_mr(address
.offset
, address
.base
, dest
);
297 void load64(BaseIndex address
, RegisterID dest
)
299 m_assembler
.movq_mr(address
.offset
, address
.base
, address
.index
, address
.scale
, dest
);
302 void load64(const void* address
, RegisterID dest
)
304 if (dest
== X86Registers::eax
)
305 m_assembler
.movq_mEAX(address
);
307 move(TrustedImmPtr(address
), dest
);
312 DataLabel32
load64WithAddressOffsetPatch(Address address
, RegisterID dest
)
315 m_assembler
.movq_mr_disp32(address
.offset
, address
.base
, dest
);
316 return DataLabel32(this);
319 DataLabelCompact
load64WithCompactAddressOffsetPatch(Address address
, RegisterID dest
)
322 m_assembler
.movq_mr_disp8(address
.offset
, address
.base
, dest
);
323 return DataLabelCompact(this);
326 void store64(RegisterID src
, ImplicitAddress address
)
328 m_assembler
.movq_rm(src
, address
.offset
, address
.base
);
331 void store64(RegisterID src
, BaseIndex address
)
333 m_assembler
.movq_rm(src
, address
.offset
, address
.base
, address
.index
, address
.scale
);
336 void store64(RegisterID src
, void* address
)
338 if (src
== X86Registers::eax
)
339 m_assembler
.movq_EAXm(address
);
341 move(TrustedImmPtr(address
), scratchRegister
);
342 store64(src
, scratchRegister
);
346 void store64(TrustedImm64 imm
, ImplicitAddress address
)
348 move(imm
, scratchRegister
);
349 store64(scratchRegister
, address
);
352 void store64(TrustedImm64 imm
, BaseIndex address
)
354 move(imm
, scratchRegister
);
355 m_assembler
.movq_rm(scratchRegister
, address
.offset
, address
.base
, address
.index
, address
.scale
);
358 DataLabel32
store64WithAddressOffsetPatch(RegisterID src
, Address address
)
361 m_assembler
.movq_rm_disp32(src
, address
.offset
, address
.base
);
362 return DataLabel32(this);
365 void move64ToDouble(RegisterID src
, FPRegisterID dest
)
367 m_assembler
.movq_rr(src
, dest
);
370 void moveDoubleTo64(FPRegisterID src
, RegisterID dest
)
372 m_assembler
.movq_rr(src
, dest
);
375 void compare64(RelationalCondition cond
, RegisterID left
, TrustedImm32 right
, RegisterID dest
)
377 if (((cond
== Equal
) || (cond
== NotEqual
)) && !right
.m_value
)
378 m_assembler
.testq_rr(left
, left
);
380 m_assembler
.cmpq_ir(right
.m_value
, left
);
381 m_assembler
.setCC_r(x86Condition(cond
), dest
);
382 m_assembler
.movzbl_rr(dest
, dest
);
385 void compare64(RelationalCondition cond
, RegisterID left
, RegisterID right
, RegisterID dest
)
387 m_assembler
.cmpq_rr(right
, left
);
388 m_assembler
.setCC_r(x86Condition(cond
), dest
);
389 m_assembler
.movzbl_rr(dest
, dest
);
392 Jump
branch64(RelationalCondition cond
, RegisterID left
, RegisterID right
)
394 m_assembler
.cmpq_rr(right
, left
);
395 return Jump(m_assembler
.jCC(x86Condition(cond
)));
398 Jump
branch64(RelationalCondition cond
, RegisterID left
, TrustedImm64 right
)
400 if (((cond
== Equal
) || (cond
== NotEqual
)) && !right
.m_value
) {
401 m_assembler
.testq_rr(left
, left
);
402 return Jump(m_assembler
.jCC(x86Condition(cond
)));
404 move(right
, scratchRegister
);
405 return branch64(cond
, left
, scratchRegister
);
408 Jump
branch64(RelationalCondition cond
, RegisterID left
, Address right
)
410 m_assembler
.cmpq_mr(right
.offset
, right
.base
, left
);
411 return Jump(m_assembler
.jCC(x86Condition(cond
)));
414 Jump
branch64(RelationalCondition cond
, AbsoluteAddress left
, RegisterID right
)
416 move(TrustedImmPtr(left
.m_ptr
), scratchRegister
);
417 return branch64(cond
, Address(scratchRegister
), right
);
420 Jump
branch64(RelationalCondition cond
, Address left
, RegisterID right
)
422 m_assembler
.cmpq_rm(right
, left
.offset
, left
.base
);
423 return Jump(m_assembler
.jCC(x86Condition(cond
)));
426 Jump
branch64(RelationalCondition cond
, Address left
, TrustedImm64 right
)
428 move(right
, scratchRegister
);
429 return branch64(cond
, left
, scratchRegister
);
432 Jump
branchTest64(ResultCondition cond
, RegisterID reg
, RegisterID mask
)
434 m_assembler
.testq_rr(reg
, mask
);
435 return Jump(m_assembler
.jCC(x86Condition(cond
)));
438 Jump
branchTest64(ResultCondition cond
, RegisterID reg
, TrustedImm32 mask
= TrustedImm32(-1))
440 // if we are only interested in the low seven bits, this can be tested with a testb
441 if (mask
.m_value
== -1)
442 m_assembler
.testq_rr(reg
, reg
);
443 else if ((mask
.m_value
& ~0x7f) == 0)
444 m_assembler
.testb_i8r(mask
.m_value
, reg
);
446 m_assembler
.testq_i32r(mask
.m_value
, reg
);
447 return Jump(m_assembler
.jCC(x86Condition(cond
)));
450 void test64(ResultCondition cond
, RegisterID reg
, TrustedImm32 mask
, RegisterID dest
)
452 if (mask
.m_value
== -1)
453 m_assembler
.testq_rr(reg
, reg
);
454 else if ((mask
.m_value
& ~0x7f) == 0)
455 m_assembler
.testb_i8r(mask
.m_value
, reg
);
457 m_assembler
.testq_i32r(mask
.m_value
, reg
);
458 set32(x86Condition(cond
), dest
);
461 void test64(ResultCondition cond
, RegisterID reg
, RegisterID mask
, RegisterID dest
)
463 m_assembler
.testq_rr(reg
, mask
);
464 set32(x86Condition(cond
), dest
);
467 Jump
branchTest64(ResultCondition cond
, AbsoluteAddress address
, TrustedImm32 mask
= TrustedImm32(-1))
469 load64(address
.m_ptr
, scratchRegister
);
470 return branchTest64(cond
, scratchRegister
, mask
);
473 Jump
branchTest64(ResultCondition cond
, Address address
, TrustedImm32 mask
= TrustedImm32(-1))
475 if (mask
.m_value
== -1)
476 m_assembler
.cmpq_im(0, address
.offset
, address
.base
);
478 m_assembler
.testq_i32m(mask
.m_value
, address
.offset
, address
.base
);
479 return Jump(m_assembler
.jCC(x86Condition(cond
)));
482 Jump
branchTest64(ResultCondition cond
, Address address
, RegisterID reg
)
484 m_assembler
.testq_rm(reg
, address
.offset
, address
.base
);
485 return Jump(m_assembler
.jCC(x86Condition(cond
)));
488 Jump
branchTest64(ResultCondition cond
, BaseIndex address
, TrustedImm32 mask
= TrustedImm32(-1))
490 if (mask
.m_value
== -1)
491 m_assembler
.cmpq_im(0, address
.offset
, address
.base
, address
.index
, address
.scale
);
493 m_assembler
.testq_i32m(mask
.m_value
, address
.offset
, address
.base
, address
.index
, address
.scale
);
494 return Jump(m_assembler
.jCC(x86Condition(cond
)));
498 Jump
branchAdd64(ResultCondition cond
, TrustedImm32 imm
, RegisterID dest
)
501 return Jump(m_assembler
.jCC(x86Condition(cond
)));
504 Jump
branchAdd64(ResultCondition cond
, RegisterID src
, RegisterID dest
)
507 return Jump(m_assembler
.jCC(x86Condition(cond
)));
510 Jump
branchSub64(ResultCondition cond
, TrustedImm32 imm
, RegisterID dest
)
513 return Jump(m_assembler
.jCC(x86Condition(cond
)));
516 Jump
branchSub64(ResultCondition cond
, RegisterID src
, RegisterID dest
)
519 return Jump(m_assembler
.jCC(x86Condition(cond
)));
522 Jump
branchSub64(ResultCondition cond
, RegisterID src1
, TrustedImm32 src2
, RegisterID dest
)
525 return branchSub64(cond
, src2
, dest
);
528 ConvertibleLoadLabel
convertibleLoadPtr(Address address
, RegisterID dest
)
530 ConvertibleLoadLabel result
= ConvertibleLoadLabel(this);
531 m_assembler
.movq_mr(address
.offset
, address
.base
, dest
);
535 DataLabelPtr
moveWithPatch(TrustedImmPtr initialValue
, RegisterID dest
)
538 m_assembler
.movq_i64r(initialValue
.asIntptr(), dest
);
539 return DataLabelPtr(this);
542 Jump
branchPtrWithPatch(RelationalCondition cond
, RegisterID left
, DataLabelPtr
& dataLabel
, TrustedImmPtr initialRightValue
= TrustedImmPtr(0))
544 dataLabel
= moveWithPatch(initialRightValue
, scratchRegister
);
545 return branch64(cond
, left
, scratchRegister
);
548 Jump
branchPtrWithPatch(RelationalCondition cond
, Address left
, DataLabelPtr
& dataLabel
, TrustedImmPtr initialRightValue
= TrustedImmPtr(0))
550 dataLabel
= moveWithPatch(initialRightValue
, scratchRegister
);
551 return branch64(cond
, left
, scratchRegister
);
554 DataLabelPtr
storePtrWithPatch(TrustedImmPtr initialValue
, ImplicitAddress address
)
556 DataLabelPtr label
= moveWithPatch(initialValue
, scratchRegister
);
557 store64(scratchRegister
, address
);
561 using MacroAssemblerX86Common::branchTest8
;
562 Jump
branchTest8(ResultCondition cond
, ExtendedAddress address
, TrustedImm32 mask
= TrustedImm32(-1))
564 TrustedImmPtr
addr(reinterpret_cast<void*>(address
.offset
));
565 MacroAssemblerX86Common::move(addr
, scratchRegister
);
566 return MacroAssemblerX86Common::branchTest8(cond
, BaseIndex(scratchRegister
, address
.base
, TimesOne
), mask
);
569 Jump
branchTest8(ResultCondition cond
, AbsoluteAddress address
, TrustedImm32 mask
= TrustedImm32(-1))
571 MacroAssemblerX86Common::move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
572 return MacroAssemblerX86Common::branchTest8(cond
, Address(scratchRegister
), mask
);
575 static bool supportsFloatingPoint() { return true; }
576 // See comment on MacroAssemblerARMv7::supportsFloatingPointTruncate()
577 static bool supportsFloatingPointTruncate() { return true; }
578 static bool supportsFloatingPointSqrt() { return true; }
579 static bool supportsFloatingPointAbs() { return true; }
581 static FunctionPtr
readCallTarget(CodeLocationCall call
)
583 return FunctionPtr(X86Assembler::readPointer(call
.dataLabelPtrAtOffset(-REPTACH_OFFSET_CALL_R11
).dataLocation()));
586 static RegisterID
scratchRegisterForBlinding() { return scratchRegister
; }
588 static bool canJumpReplacePatchableBranchPtrWithPatch() { return true; }
590 static CodeLocationLabel
startOfBranchPtrWithPatchOnRegister(CodeLocationDataLabelPtr label
)
592 const int rexBytes
= 1;
593 const int opcodeBytes
= 1;
594 const int immediateBytes
= 8;
595 const int totalBytes
= rexBytes
+ opcodeBytes
+ immediateBytes
;
596 ASSERT(totalBytes
>= maxJumpReplacementSize());
597 return label
.labelAtOffset(-totalBytes
);
600 static CodeLocationLabel
startOfPatchableBranchPtrWithPatchOnAddress(CodeLocationDataLabelPtr label
)
602 return startOfBranchPtrWithPatchOnRegister(label
);
605 static void revertJumpReplacementToPatchableBranchPtrWithPatch(CodeLocationLabel instructionStart
, Address
, void* initialValue
)
607 X86Assembler::revertJumpTo_movq_i64r(instructionStart
.executableAddress(), reinterpret_cast<intptr_t>(initialValue
), scratchRegister
);
610 static void revertJumpReplacementToBranchPtrWithPatch(CodeLocationLabel instructionStart
, RegisterID
, void* initialValue
)
612 X86Assembler::revertJumpTo_movq_i64r(instructionStart
.executableAddress(), reinterpret_cast<intptr_t>(initialValue
), scratchRegister
);
616 friend class LinkBuffer
;
617 friend class RepatchBuffer
;
619 static void linkCall(void* code
, Call call
, FunctionPtr function
)
621 if (!call
.isFlagSet(Call::Near
))
622 X86Assembler::linkPointer(code
, call
.m_label
.labelAtOffset(-REPTACH_OFFSET_CALL_R11
), function
.value());
624 X86Assembler::linkCall(code
, call
.m_label
, function
.value());
627 static void repatchCall(CodeLocationCall call
, CodeLocationLabel destination
)
629 X86Assembler::repatchPointer(call
.dataLabelPtrAtOffset(-REPTACH_OFFSET_CALL_R11
).dataLocation(), destination
.executableAddress());
632 static void repatchCall(CodeLocationCall call
, FunctionPtr destination
)
634 X86Assembler::repatchPointer(call
.dataLabelPtrAtOffset(-REPTACH_OFFSET_CALL_R11
).dataLocation(), destination
.executableAddress());
641 #endif // ENABLE(ASSEMBLER)
643 #endif // MacroAssemblerX86_64_h