2 * Copyright (C) 2008 Apple Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 #ifndef MacroAssemblerX86_h
27 #define MacroAssemblerX86_h
29 #if ENABLE(ASSEMBLER) && CPU(X86)
31 #include "MacroAssemblerX86Common.h"
35 class MacroAssemblerX86
: public MacroAssemblerX86Common
{
38 : m_isSSE2Present(isSSE2Present())
42 static const Scale ScalePtr
= TimesFour
;
44 using MacroAssemblerX86Common::add32
;
45 using MacroAssemblerX86Common::and32
;
46 using MacroAssemblerX86Common::sub32
;
47 using MacroAssemblerX86Common::or32
;
48 using MacroAssemblerX86Common::load32
;
49 using MacroAssemblerX86Common::store32
;
50 using MacroAssemblerX86Common::branch32
;
51 using MacroAssemblerX86Common::call
;
52 using MacroAssemblerX86Common::loadDouble
;
53 using MacroAssemblerX86Common::convertInt32ToDouble
;
55 void add32(Imm32 imm
, RegisterID src
, RegisterID dest
)
57 m_assembler
.leal_mr(imm
.m_value
, src
, dest
);
60 void add32(Imm32 imm
, AbsoluteAddress address
)
62 m_assembler
.addl_im(imm
.m_value
, address
.m_ptr
);
65 void addWithCarry32(Imm32 imm
, AbsoluteAddress address
)
67 m_assembler
.adcl_im(imm
.m_value
, address
.m_ptr
);
70 void and32(Imm32 imm
, AbsoluteAddress address
)
72 m_assembler
.andl_im(imm
.m_value
, address
.m_ptr
);
75 void or32(Imm32 imm
, AbsoluteAddress address
)
77 m_assembler
.orl_im(imm
.m_value
, address
.m_ptr
);
80 void sub32(Imm32 imm
, AbsoluteAddress address
)
82 m_assembler
.subl_im(imm
.m_value
, address
.m_ptr
);
85 void load32(void* address
, RegisterID dest
)
87 m_assembler
.movl_mr(address
, dest
);
90 void loadDouble(const void* address
, FPRegisterID dest
)
92 ASSERT(isSSE2Present());
93 m_assembler
.movsd_mr(address
, dest
);
96 void convertInt32ToDouble(AbsoluteAddress src
, FPRegisterID dest
)
98 m_assembler
.cvtsi2sd_mr(src
.m_ptr
, dest
);
101 void store32(Imm32 imm
, void* address
)
103 m_assembler
.movl_i32m(imm
.m_value
, address
);
106 void store32(RegisterID src
, void* address
)
108 m_assembler
.movl_rm(src
, address
);
111 Jump
branch32(Condition cond
, AbsoluteAddress left
, RegisterID right
)
113 m_assembler
.cmpl_rm(right
, left
.m_ptr
);
114 return Jump(m_assembler
.jCC(x86Condition(cond
)));
117 Jump
branch32(Condition cond
, AbsoluteAddress left
, Imm32 right
)
119 m_assembler
.cmpl_im(right
.m_value
, left
.m_ptr
);
120 return Jump(m_assembler
.jCC(x86Condition(cond
)));
125 return Call(m_assembler
.call(), Call::Linkable
);
128 Call
tailRecursiveCall()
130 return Call::fromTailJump(jump());
133 Call
makeTailRecursiveCall(Jump oldJump
)
135 return Call::fromTailJump(oldJump
);
139 DataLabelPtr
moveWithPatch(ImmPtr initialValue
, RegisterID dest
)
141 m_assembler
.movl_i32r(initialValue
.asIntptr(), dest
);
142 return DataLabelPtr(this);
145 Jump
branchPtrWithPatch(Condition cond
, RegisterID left
, DataLabelPtr
& dataLabel
, ImmPtr initialRightValue
= ImmPtr(0))
147 m_assembler
.cmpl_ir_force32(initialRightValue
.asIntptr(), left
);
148 dataLabel
= DataLabelPtr(this);
149 return Jump(m_assembler
.jCC(x86Condition(cond
)));
152 Jump
branchPtrWithPatch(Condition cond
, Address left
, DataLabelPtr
& dataLabel
, ImmPtr initialRightValue
= ImmPtr(0))
154 m_assembler
.cmpl_im_force32(initialRightValue
.asIntptr(), left
.offset
, left
.base
);
155 dataLabel
= DataLabelPtr(this);
156 return Jump(m_assembler
.jCC(x86Condition(cond
)));
159 DataLabelPtr
storePtrWithPatch(ImmPtr initialValue
, ImplicitAddress address
)
161 m_assembler
.movl_i32m(initialValue
.asIntptr(), address
.offset
, address
.base
);
162 return DataLabelPtr(this);
165 Label
loadPtrWithPatchToLEA(Address address
, RegisterID dest
)
168 load32(address
, dest
);
172 bool supportsFloatingPoint() const { return m_isSSE2Present
; }
173 // See comment on MacroAssemblerARMv7::supportsFloatingPointTruncate()
174 bool supportsFloatingPointTruncate() const { return m_isSSE2Present
; }
175 bool supportsFloatingPointSqrt() const { return m_isSSE2Present
; }
178 const bool m_isSSE2Present
;
180 friend class LinkBuffer
;
181 friend class RepatchBuffer
;
183 static void linkCall(void* code
, Call call
, FunctionPtr function
)
185 X86Assembler::linkCall(code
, call
.m_jmp
, function
.value());
188 static void repatchCall(CodeLocationCall call
, CodeLocationLabel destination
)
190 X86Assembler::relinkCall(call
.dataLocation(), destination
.executableAddress());
193 static void repatchCall(CodeLocationCall call
, FunctionPtr destination
)
195 X86Assembler::relinkCall(call
.dataLocation(), destination
.executableAddress());
201 #endif // ENABLE(ASSEMBLER)
203 #endif // MacroAssemblerX86_h