2 * Copyright (C) 2008 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 #ifndef MacroAssemblerX86_h
27 #define MacroAssemblerX86_h
29 #if ENABLE(ASSEMBLER) && CPU(X86)
31 #include "MacroAssemblerX86Common.h"
35 class MacroAssemblerX86
: public MacroAssemblerX86Common
{
37 static const Scale ScalePtr
= TimesFour
;
39 using MacroAssemblerX86Common::add32
;
40 using MacroAssemblerX86Common::and32
;
41 using MacroAssemblerX86Common::branchAdd32
;
42 using MacroAssemblerX86Common::branchSub32
;
43 using MacroAssemblerX86Common::sub32
;
44 using MacroAssemblerX86Common::or32
;
45 using MacroAssemblerX86Common::load32
;
46 using MacroAssemblerX86Common::store32
;
47 using MacroAssemblerX86Common::branch32
;
48 using MacroAssemblerX86Common::call
;
49 using MacroAssemblerX86Common::jump
;
50 using MacroAssemblerX86Common::addDouble
;
51 using MacroAssemblerX86Common::loadDouble
;
52 using MacroAssemblerX86Common::storeDouble
;
53 using MacroAssemblerX86Common::convertInt32ToDouble
;
55 void add32(TrustedImm32 imm
, RegisterID src
, RegisterID dest
)
57 m_assembler
.leal_mr(imm
.m_value
, src
, dest
);
60 void add32(TrustedImm32 imm
, AbsoluteAddress address
)
62 m_assembler
.addl_im(imm
.m_value
, address
.m_ptr
);
65 void add64(TrustedImm32 imm
, AbsoluteAddress address
)
67 m_assembler
.addl_im(imm
.m_value
, address
.m_ptr
);
68 m_assembler
.adcl_im(imm
.m_value
>> 31, reinterpret_cast<const char*>(address
.m_ptr
) + sizeof(int32_t));
71 void and32(TrustedImm32 imm
, AbsoluteAddress address
)
73 m_assembler
.andl_im(imm
.m_value
, address
.m_ptr
);
76 void or32(TrustedImm32 imm
, AbsoluteAddress address
)
78 m_assembler
.orl_im(imm
.m_value
, address
.m_ptr
);
81 void sub32(TrustedImm32 imm
, AbsoluteAddress address
)
83 m_assembler
.subl_im(imm
.m_value
, address
.m_ptr
);
86 void load32(const void* address
, RegisterID dest
)
88 m_assembler
.movl_mr(address
, dest
);
91 void addDouble(AbsoluteAddress address
, FPRegisterID dest
)
93 m_assembler
.addsd_mr(address
.m_ptr
, dest
);
96 void storeDouble(FPRegisterID src
, const void* address
)
98 ASSERT(isSSE2Present());
99 m_assembler
.movsd_rm(src
, address
);
102 void convertInt32ToDouble(AbsoluteAddress src
, FPRegisterID dest
)
104 m_assembler
.cvtsi2sd_mr(src
.m_ptr
, dest
);
107 void store32(TrustedImm32 imm
, void* address
)
109 m_assembler
.movl_i32m(imm
.m_value
, address
);
112 void store32(RegisterID src
, void* address
)
114 m_assembler
.movl_rm(src
, address
);
117 Jump
branchAdd32(ResultCondition cond
, TrustedImm32 imm
, AbsoluteAddress dest
)
119 m_assembler
.addl_im(imm
.m_value
, dest
.m_ptr
);
120 return Jump(m_assembler
.jCC(x86Condition(cond
)));
123 Jump
branchSub32(ResultCondition cond
, TrustedImm32 imm
, AbsoluteAddress dest
)
125 m_assembler
.subl_im(imm
.m_value
, dest
.m_ptr
);
126 return Jump(m_assembler
.jCC(x86Condition(cond
)));
129 Jump
branch32(RelationalCondition cond
, AbsoluteAddress left
, RegisterID right
)
131 m_assembler
.cmpl_rm(right
, left
.m_ptr
);
132 return Jump(m_assembler
.jCC(x86Condition(cond
)));
135 Jump
branch32(RelationalCondition cond
, AbsoluteAddress left
, TrustedImm32 right
)
137 m_assembler
.cmpl_im(right
.m_value
, left
.m_ptr
);
138 return Jump(m_assembler
.jCC(x86Condition(cond
)));
143 return Call(m_assembler
.call(), Call::Linkable
);
146 // Address is a memory location containing the address to jump to
147 void jump(AbsoluteAddress address
)
149 m_assembler
.jmp_m(address
.m_ptr
);
152 Call
tailRecursiveCall()
154 return Call::fromTailJump(jump());
157 Call
makeTailRecursiveCall(Jump oldJump
)
159 return Call::fromTailJump(oldJump
);
163 DataLabelPtr
moveWithPatch(TrustedImmPtr initialValue
, RegisterID dest
)
165 m_assembler
.movl_i32r(initialValue
.asIntptr(), dest
);
166 return DataLabelPtr(this);
169 Jump
branchPtrWithPatch(RelationalCondition cond
, RegisterID left
, DataLabelPtr
& dataLabel
, TrustedImmPtr initialRightValue
= TrustedImmPtr(0))
171 m_assembler
.cmpl_ir_force32(initialRightValue
.asIntptr(), left
);
172 dataLabel
= DataLabelPtr(this);
173 return Jump(m_assembler
.jCC(x86Condition(cond
)));
176 Jump
branchPtrWithPatch(RelationalCondition cond
, Address left
, DataLabelPtr
& dataLabel
, TrustedImmPtr initialRightValue
= TrustedImmPtr(0))
178 m_assembler
.cmpl_im_force32(initialRightValue
.asIntptr(), left
.offset
, left
.base
);
179 dataLabel
= DataLabelPtr(this);
180 return Jump(m_assembler
.jCC(x86Condition(cond
)));
183 DataLabelPtr
storePtrWithPatch(TrustedImmPtr initialValue
, ImplicitAddress address
)
185 m_assembler
.movl_i32m(initialValue
.asIntptr(), address
.offset
, address
.base
);
186 return DataLabelPtr(this);
189 static bool supportsFloatingPoint() { return isSSE2Present(); }
190 // See comment on MacroAssemblerARMv7::supportsFloatingPointTruncate()
191 static bool supportsFloatingPointTruncate() { return isSSE2Present(); }
192 static bool supportsFloatingPointSqrt() { return isSSE2Present(); }
193 static bool supportsFloatingPointAbs() { return isSSE2Present(); }
195 static FunctionPtr
readCallTarget(CodeLocationCall call
)
197 intptr_t offset
= reinterpret_cast<int32_t*>(call
.dataLocation())[-1];
198 return FunctionPtr(reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(call
.dataLocation()) + offset
));
202 friend class LinkBuffer
;
203 friend class RepatchBuffer
;
205 static void linkCall(void* code
, Call call
, FunctionPtr function
)
207 X86Assembler::linkCall(code
, call
.m_label
, function
.value());
210 static void repatchCall(CodeLocationCall call
, CodeLocationLabel destination
)
212 X86Assembler::relinkCall(call
.dataLocation(), destination
.executableAddress());
215 static void repatchCall(CodeLocationCall call
, FunctionPtr destination
)
217 X86Assembler::relinkCall(call
.dataLocation(), destination
.executableAddress());
223 #endif // ENABLE(ASSEMBLER)
225 #endif // MacroAssemblerX86_h