2 * Copyright (C) 2008 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 #ifndef MacroAssemblerX86_64_h
27 #define MacroAssemblerX86_64_h
29 #if ENABLE(ASSEMBLER) && CPU(X86_64)
31 #include "MacroAssemblerX86Common.h"
33 #define REPTACH_OFFSET_CALL_R11 3
37 class MacroAssemblerX86_64
: public MacroAssemblerX86Common
{
39 static const Scale ScalePtr
= TimesEight
;
41 using MacroAssemblerX86Common::add32
;
42 using MacroAssemblerX86Common::and32
;
43 using MacroAssemblerX86Common::branchAdd32
;
44 using MacroAssemblerX86Common::or32
;
45 using MacroAssemblerX86Common::sub32
;
46 using MacroAssemblerX86Common::load32
;
47 using MacroAssemblerX86Common::store32
;
48 using MacroAssemblerX86Common::call
;
49 using MacroAssemblerX86Common::jump
;
50 using MacroAssemblerX86Common::addDouble
;
51 using MacroAssemblerX86Common::loadDouble
;
52 using MacroAssemblerX86Common::convertInt32ToDouble
;
54 void add32(TrustedImm32 imm
, AbsoluteAddress address
)
56 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
57 add32(imm
, Address(scratchRegister
));
60 void and32(TrustedImm32 imm
, AbsoluteAddress address
)
62 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
63 and32(imm
, Address(scratchRegister
));
66 void or32(TrustedImm32 imm
, AbsoluteAddress address
)
68 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
69 or32(imm
, Address(scratchRegister
));
72 void sub32(TrustedImm32 imm
, AbsoluteAddress address
)
74 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
75 sub32(imm
, Address(scratchRegister
));
78 void load32(const void* address
, RegisterID dest
)
80 if (dest
== X86Registers::eax
)
81 m_assembler
.movl_mEAX(address
);
83 move(TrustedImmPtr(address
), dest
);
88 void addDouble(AbsoluteAddress address
, FPRegisterID dest
)
90 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
91 m_assembler
.addsd_mr(0, scratchRegister
, dest
);
94 void convertInt32ToDouble(TrustedImm32 imm
, FPRegisterID dest
)
96 move(imm
, scratchRegister
);
97 m_assembler
.cvtsi2sd_rr(scratchRegister
, dest
);
100 void store32(TrustedImm32 imm
, void* address
)
102 move(TrustedImmPtr(address
), scratchRegister
);
103 store32(imm
, scratchRegister
);
108 DataLabelPtr label
= moveWithPatch(TrustedImmPtr(0), scratchRegister
);
109 Call result
= Call(m_assembler
.call(scratchRegister
), Call::Linkable
);
110 ASSERT_UNUSED(label
, differenceBetween(label
, result
) == REPTACH_OFFSET_CALL_R11
);
114 // Address is a memory location containing the address to jump to
115 void jump(AbsoluteAddress address
)
117 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
118 jump(Address(scratchRegister
));
121 Call
tailRecursiveCall()
123 DataLabelPtr label
= moveWithPatch(TrustedImmPtr(0), scratchRegister
);
124 Jump newJump
= Jump(m_assembler
.jmp_r(scratchRegister
));
125 ASSERT_UNUSED(label
, differenceBetween(label
, newJump
) == REPTACH_OFFSET_CALL_R11
);
126 return Call::fromTailJump(newJump
);
129 Call
makeTailRecursiveCall(Jump oldJump
)
132 DataLabelPtr label
= moveWithPatch(TrustedImmPtr(0), scratchRegister
);
133 Jump newJump
= Jump(m_assembler
.jmp_r(scratchRegister
));
134 ASSERT_UNUSED(label
, differenceBetween(label
, newJump
) == REPTACH_OFFSET_CALL_R11
);
135 return Call::fromTailJump(newJump
);
139 void addPtr(RegisterID src
, RegisterID dest
)
141 m_assembler
.addq_rr(src
, dest
);
144 void addPtr(TrustedImm32 imm
, RegisterID srcDest
)
146 m_assembler
.addq_ir(imm
.m_value
, srcDest
);
149 void addPtr(TrustedImmPtr imm
, RegisterID dest
)
151 move(imm
, scratchRegister
);
152 m_assembler
.addq_rr(scratchRegister
, dest
);
155 void addPtr(TrustedImm32 imm
, RegisterID src
, RegisterID dest
)
157 m_assembler
.leaq_mr(imm
.m_value
, src
, dest
);
160 void addPtr(TrustedImm32 imm
, Address address
)
162 m_assembler
.addq_im(imm
.m_value
, address
.offset
, address
.base
);
165 void addPtr(TrustedImm32 imm
, AbsoluteAddress address
)
167 move(TrustedImmPtr(address
.m_ptr
), scratchRegister
);
168 addPtr(imm
, Address(scratchRegister
));
171 void add64(TrustedImm32 imm
, AbsoluteAddress address
)
173 addPtr(imm
, address
);
176 void andPtr(RegisterID src
, RegisterID dest
)
178 m_assembler
.andq_rr(src
, dest
);
181 void andPtr(TrustedImm32 imm
, RegisterID srcDest
)
183 m_assembler
.andq_ir(imm
.m_value
, srcDest
);
186 void orPtr(RegisterID src
, RegisterID dest
)
188 m_assembler
.orq_rr(src
, dest
);
191 void orPtr(TrustedImmPtr imm
, RegisterID dest
)
193 move(imm
, scratchRegister
);
194 m_assembler
.orq_rr(scratchRegister
, dest
);
197 void orPtr(TrustedImm32 imm
, RegisterID dest
)
199 m_assembler
.orq_ir(imm
.m_value
, dest
);
202 void orPtr(RegisterID op1
, RegisterID op2
, RegisterID dest
)
206 else if (op1
== dest
)
214 void orPtr(TrustedImm32 imm
, RegisterID src
, RegisterID dest
)
220 void rotateRightPtr(TrustedImm32 imm
, RegisterID srcDst
)
222 m_assembler
.rorq_i8r(imm
.m_value
, srcDst
);
225 void subPtr(RegisterID src
, RegisterID dest
)
227 m_assembler
.subq_rr(src
, dest
);
230 void subPtr(TrustedImm32 imm
, RegisterID dest
)
232 m_assembler
.subq_ir(imm
.m_value
, dest
);
235 void subPtr(TrustedImmPtr imm
, RegisterID dest
)
237 move(imm
, scratchRegister
);
238 m_assembler
.subq_rr(scratchRegister
, dest
);
241 void xorPtr(RegisterID src
, RegisterID dest
)
243 m_assembler
.xorq_rr(src
, dest
);
246 void xorPtr(RegisterID src
, Address dest
)
248 m_assembler
.xorq_rm(src
, dest
.offset
, dest
.base
);
251 void xorPtr(TrustedImm32 imm
, RegisterID srcDest
)
253 m_assembler
.xorq_ir(imm
.m_value
, srcDest
);
256 void loadPtr(ImplicitAddress address
, RegisterID dest
)
258 m_assembler
.movq_mr(address
.offset
, address
.base
, dest
);
261 void loadPtr(BaseIndex address
, RegisterID dest
)
263 m_assembler
.movq_mr(address
.offset
, address
.base
, address
.index
, address
.scale
, dest
);
266 void loadPtr(const void* address
, RegisterID dest
)
268 if (dest
== X86Registers::eax
)
269 m_assembler
.movq_mEAX(address
);
271 move(TrustedImmPtr(address
), dest
);
276 DataLabel32
loadPtrWithAddressOffsetPatch(Address address
, RegisterID dest
)
278 m_assembler
.movq_mr_disp32(address
.offset
, address
.base
, dest
);
279 return DataLabel32(this);
282 DataLabelCompact
loadPtrWithCompactAddressOffsetPatch(Address address
, RegisterID dest
)
284 m_assembler
.movq_mr_disp8(address
.offset
, address
.base
, dest
);
285 return DataLabelCompact(this);
288 void storePtr(RegisterID src
, ImplicitAddress address
)
290 m_assembler
.movq_rm(src
, address
.offset
, address
.base
);
293 void storePtr(RegisterID src
, BaseIndex address
)
295 m_assembler
.movq_rm(src
, address
.offset
, address
.base
, address
.index
, address
.scale
);
298 void storePtr(RegisterID src
, void* address
)
300 if (src
== X86Registers::eax
)
301 m_assembler
.movq_EAXm(address
);
303 move(TrustedImmPtr(address
), scratchRegister
);
304 storePtr(src
, scratchRegister
);
308 void storePtr(TrustedImmPtr imm
, ImplicitAddress address
)
310 move(imm
, scratchRegister
);
311 storePtr(scratchRegister
, address
);
314 void storePtr(TrustedImmPtr imm
, BaseIndex address
)
316 move(imm
, scratchRegister
);
317 m_assembler
.movq_rm(scratchRegister
, address
.offset
, address
.base
, address
.index
, address
.scale
);
320 DataLabel32
storePtrWithAddressOffsetPatch(RegisterID src
, Address address
)
322 m_assembler
.movq_rm_disp32(src
, address
.offset
, address
.base
);
323 return DataLabel32(this);
326 void movePtrToDouble(RegisterID src
, FPRegisterID dest
)
328 m_assembler
.movq_rr(src
, dest
);
331 void moveDoubleToPtr(FPRegisterID src
, RegisterID dest
)
333 m_assembler
.movq_rr(src
, dest
);
336 void comparePtr(RelationalCondition cond
, RegisterID left
, TrustedImm32 right
, RegisterID dest
)
338 if (((cond
== Equal
) || (cond
== NotEqual
)) && !right
.m_value
)
339 m_assembler
.testq_rr(left
, left
);
341 m_assembler
.cmpq_ir(right
.m_value
, left
);
342 m_assembler
.setCC_r(x86Condition(cond
), dest
);
343 m_assembler
.movzbl_rr(dest
, dest
);
346 void comparePtr(RelationalCondition cond
, RegisterID left
, RegisterID right
, RegisterID dest
)
348 m_assembler
.cmpq_rr(right
, left
);
349 m_assembler
.setCC_r(x86Condition(cond
), dest
);
350 m_assembler
.movzbl_rr(dest
, dest
);
353 Jump
branchAdd32(ResultCondition cond
, TrustedImm32 src
, AbsoluteAddress dest
)
355 move(TrustedImmPtr(dest
.m_ptr
), scratchRegister
);
356 add32(src
, Address(scratchRegister
));
357 return Jump(m_assembler
.jCC(x86Condition(cond
)));
360 Jump
branchPtr(RelationalCondition cond
, RegisterID left
, RegisterID right
)
362 m_assembler
.cmpq_rr(right
, left
);
363 return Jump(m_assembler
.jCC(x86Condition(cond
)));
366 Jump
branchPtr(RelationalCondition cond
, RegisterID left
, TrustedImmPtr right
)
368 if (((cond
== Equal
) || (cond
== NotEqual
)) && !right
.m_value
) {
369 m_assembler
.testq_rr(left
, left
);
370 return Jump(m_assembler
.jCC(x86Condition(cond
)));
372 move(right
, scratchRegister
);
373 return branchPtr(cond
, left
, scratchRegister
);
376 Jump
branchPtr(RelationalCondition cond
, RegisterID left
, Address right
)
378 m_assembler
.cmpq_mr(right
.offset
, right
.base
, left
);
379 return Jump(m_assembler
.jCC(x86Condition(cond
)));
382 Jump
branchPtr(RelationalCondition cond
, AbsoluteAddress left
, RegisterID right
)
384 move(TrustedImmPtr(left
.m_ptr
), scratchRegister
);
385 return branchPtr(cond
, Address(scratchRegister
), right
);
388 Jump
branchPtr(RelationalCondition cond
, Address left
, RegisterID right
)
390 m_assembler
.cmpq_rm(right
, left
.offset
, left
.base
);
391 return Jump(m_assembler
.jCC(x86Condition(cond
)));
394 Jump
branchPtr(RelationalCondition cond
, Address left
, TrustedImmPtr right
)
396 move(right
, scratchRegister
);
397 return branchPtr(cond
, left
, scratchRegister
);
400 Jump
branchTestPtr(ResultCondition cond
, RegisterID reg
, RegisterID mask
)
402 m_assembler
.testq_rr(reg
, mask
);
403 return Jump(m_assembler
.jCC(x86Condition(cond
)));
406 Jump
branchTestPtr(ResultCondition cond
, RegisterID reg
, TrustedImm32 mask
= TrustedImm32(-1))
408 // if we are only interested in the low seven bits, this can be tested with a testb
409 if (mask
.m_value
== -1)
410 m_assembler
.testq_rr(reg
, reg
);
411 else if ((mask
.m_value
& ~0x7f) == 0)
412 m_assembler
.testb_i8r(mask
.m_value
, reg
);
414 m_assembler
.testq_i32r(mask
.m_value
, reg
);
415 return Jump(m_assembler
.jCC(x86Condition(cond
)));
418 void testPtr(ResultCondition cond
, RegisterID reg
, TrustedImm32 mask
, RegisterID dest
)
420 if (mask
.m_value
== -1)
421 m_assembler
.testq_rr(reg
, reg
);
422 else if ((mask
.m_value
& ~0x7f) == 0)
423 m_assembler
.testb_i8r(mask
.m_value
, reg
);
425 m_assembler
.testq_i32r(mask
.m_value
, reg
);
426 set32(x86Condition(cond
), dest
);
429 void testPtr(ResultCondition cond
, RegisterID reg
, RegisterID mask
, RegisterID dest
)
431 m_assembler
.testq_rr(reg
, mask
);
432 set32(x86Condition(cond
), dest
);
435 Jump
branchTestPtr(ResultCondition cond
, AbsoluteAddress address
, TrustedImm32 mask
= TrustedImm32(-1))
437 loadPtr(address
.m_ptr
, scratchRegister
);
438 return branchTestPtr(cond
, scratchRegister
, mask
);
441 Jump
branchTestPtr(ResultCondition cond
, Address address
, TrustedImm32 mask
= TrustedImm32(-1))
443 if (mask
.m_value
== -1)
444 m_assembler
.cmpq_im(0, address
.offset
, address
.base
);
446 m_assembler
.testq_i32m(mask
.m_value
, address
.offset
, address
.base
);
447 return Jump(m_assembler
.jCC(x86Condition(cond
)));
450 Jump
branchTestPtr(ResultCondition cond
, BaseIndex address
, TrustedImm32 mask
= TrustedImm32(-1))
452 if (mask
.m_value
== -1)
453 m_assembler
.cmpq_im(0, address
.offset
, address
.base
, address
.index
, address
.scale
);
455 m_assembler
.testq_i32m(mask
.m_value
, address
.offset
, address
.base
, address
.index
, address
.scale
);
456 return Jump(m_assembler
.jCC(x86Condition(cond
)));
460 Jump
branchAddPtr(ResultCondition cond
, TrustedImm32 imm
, RegisterID dest
)
463 return Jump(m_assembler
.jCC(x86Condition(cond
)));
466 Jump
branchAddPtr(ResultCondition cond
, RegisterID src
, RegisterID dest
)
469 return Jump(m_assembler
.jCC(x86Condition(cond
)));
472 Jump
branchSubPtr(ResultCondition cond
, TrustedImm32 imm
, RegisterID dest
)
475 return Jump(m_assembler
.jCC(x86Condition(cond
)));
478 Jump
branchSubPtr(ResultCondition cond
, RegisterID src1
, TrustedImm32 src2
, RegisterID dest
)
481 return branchSubPtr(cond
, src2
, dest
);
484 DataLabelPtr
moveWithPatch(TrustedImmPtr initialValue
, RegisterID dest
)
486 m_assembler
.movq_i64r(initialValue
.asIntptr(), dest
);
487 return DataLabelPtr(this);
490 Jump
branchPtrWithPatch(RelationalCondition cond
, RegisterID left
, DataLabelPtr
& dataLabel
, TrustedImmPtr initialRightValue
= TrustedImmPtr(0))
492 dataLabel
= moveWithPatch(initialRightValue
, scratchRegister
);
493 return branchPtr(cond
, left
, scratchRegister
);
496 Jump
branchPtrWithPatch(RelationalCondition cond
, Address left
, DataLabelPtr
& dataLabel
, TrustedImmPtr initialRightValue
= TrustedImmPtr(0))
498 dataLabel
= moveWithPatch(initialRightValue
, scratchRegister
);
499 return branchPtr(cond
, left
, scratchRegister
);
502 DataLabelPtr
storePtrWithPatch(TrustedImmPtr initialValue
, ImplicitAddress address
)
504 DataLabelPtr label
= moveWithPatch(initialValue
, scratchRegister
);
505 storePtr(scratchRegister
, address
);
509 using MacroAssemblerX86Common::branchTest8
;
510 Jump
branchTest8(ResultCondition cond
, ExtendedAddress address
, TrustedImm32 mask
= TrustedImm32(-1))
512 TrustedImmPtr
addr(reinterpret_cast<void*>(address
.offset
));
513 MacroAssemblerX86Common::move(addr
, scratchRegister
);
514 return MacroAssemblerX86Common::branchTest8(cond
, BaseIndex(scratchRegister
, address
.base
, TimesOne
), mask
);
517 static bool supportsFloatingPoint() { return true; }
518 // See comment on MacroAssemblerARMv7::supportsFloatingPointTruncate()
519 static bool supportsFloatingPointTruncate() { return true; }
520 static bool supportsFloatingPointSqrt() { return true; }
521 static bool supportsFloatingPointAbs() { return true; }
523 static FunctionPtr
readCallTarget(CodeLocationCall call
)
525 return FunctionPtr(X86Assembler::readPointer(call
.dataLabelPtrAtOffset(-REPTACH_OFFSET_CALL_R11
).dataLocation()));
528 static RegisterID
scratchRegisterForBlinding() { return scratchRegister
; }
531 friend class LinkBuffer
;
532 friend class RepatchBuffer
;
534 static void linkCall(void* code
, Call call
, FunctionPtr function
)
536 if (!call
.isFlagSet(Call::Near
))
537 X86Assembler::linkPointer(code
, call
.m_label
.labelAtOffset(-REPTACH_OFFSET_CALL_R11
), function
.value());
539 X86Assembler::linkCall(code
, call
.m_label
, function
.value());
542 static void repatchCall(CodeLocationCall call
, CodeLocationLabel destination
)
544 X86Assembler::repatchPointer(call
.dataLabelPtrAtOffset(-REPTACH_OFFSET_CALL_R11
).dataLocation(), destination
.executableAddress());
547 static void repatchCall(CodeLocationCall call
, FunctionPtr destination
)
549 X86Assembler::repatchPointer(call
.dataLabelPtrAtOffset(-REPTACH_OFFSET_CALL_R11
).dataLocation(), destination
.executableAddress());
556 #endif // ENABLE(ASSEMBLER)
558 #endif // MacroAssemblerX86_64_h