2 * Copyright (C) 2008 Apple Inc.
3 * Copyright (C) 2009, 2010 University of Szeged
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
15 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
16 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
18 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
19 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
20 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
23 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 #ifndef MacroAssemblerARM_h
29 #define MacroAssemblerARM_h
31 #if ENABLE(ASSEMBLER) && CPU(ARM_TRADITIONAL)
33 #include "ARMAssembler.h"
34 #include "AbstractMacroAssembler.h"
38 class MacroAssemblerARM
: public AbstractMacroAssembler
<ARMAssembler
> {
39 static const int DoubleConditionMask
= 0x0f;
40 static const int DoubleConditionBitSpecial
= 0x10;
41 COMPILE_ASSERT(!(DoubleConditionBitSpecial
& DoubleConditionMask
), DoubleConditionBitSpecial_should_not_interfere_with_ARMAssembler_Condition_codes
);
43 typedef ARMRegisters::FPRegisterID FPRegisterID
;
45 enum RelationalCondition
{
46 Equal
= ARMAssembler::EQ
,
47 NotEqual
= ARMAssembler::NE
,
48 Above
= ARMAssembler::HI
,
49 AboveOrEqual
= ARMAssembler::CS
,
50 Below
= ARMAssembler::CC
,
51 BelowOrEqual
= ARMAssembler::LS
,
52 GreaterThan
= ARMAssembler::GT
,
53 GreaterThanOrEqual
= ARMAssembler::GE
,
54 LessThan
= ARMAssembler::LT
,
55 LessThanOrEqual
= ARMAssembler::LE
58 enum ResultCondition
{
59 Overflow
= ARMAssembler::VS
,
60 Signed
= ARMAssembler::MI
,
61 PositiveOrZero
= ARMAssembler::PL
,
62 Zero
= ARMAssembler::EQ
,
63 NonZero
= ARMAssembler::NE
66 enum DoubleCondition
{
67 // These conditions will only evaluate to true if the comparison is ordered - i.e. neither operand is NaN.
68 DoubleEqual
= ARMAssembler::EQ
,
69 DoubleNotEqual
= ARMAssembler::NE
| DoubleConditionBitSpecial
,
70 DoubleGreaterThan
= ARMAssembler::GT
,
71 DoubleGreaterThanOrEqual
= ARMAssembler::GE
,
72 DoubleLessThan
= ARMAssembler::CC
,
73 DoubleLessThanOrEqual
= ARMAssembler::LS
,
74 // If either operand is NaN, these conditions always evaluate to true.
75 DoubleEqualOrUnordered
= ARMAssembler::EQ
| DoubleConditionBitSpecial
,
76 DoubleNotEqualOrUnordered
= ARMAssembler::NE
,
77 DoubleGreaterThanOrUnordered
= ARMAssembler::HI
,
78 DoubleGreaterThanOrEqualOrUnordered
= ARMAssembler::CS
,
79 DoubleLessThanOrUnordered
= ARMAssembler::LT
,
80 DoubleLessThanOrEqualOrUnordered
= ARMAssembler::LE
,
83 static const RegisterID stackPointerRegister
= ARMRegisters::sp
;
84 static const RegisterID linkRegister
= ARMRegisters::lr
;
86 static const Scale ScalePtr
= TimesFour
;
88 void add32(RegisterID src
, RegisterID dest
)
90 m_assembler
.adds(dest
, dest
, src
);
93 void add32(RegisterID op1
, RegisterID op2
, RegisterID dest
)
95 m_assembler
.adds(dest
, op1
, op2
);
98 void add32(TrustedImm32 imm
, Address address
)
100 load32(address
, ARMRegisters::S1
);
101 add32(imm
, ARMRegisters::S1
);
102 store32(ARMRegisters::S1
, address
);
105 void add32(TrustedImm32 imm
, RegisterID dest
)
107 m_assembler
.adds(dest
, dest
, m_assembler
.getImm(imm
.m_value
, ARMRegisters::S0
));
110 void add32(AbsoluteAddress src
, RegisterID dest
)
112 move(TrustedImmPtr(src
.m_ptr
), ARMRegisters::S1
);
113 m_assembler
.dtrUp(ARMAssembler::LoadUint32
, ARMRegisters::S1
, ARMRegisters::S1
, 0);
114 add32(ARMRegisters::S1
, dest
);
117 void add32(Address src
, RegisterID dest
)
119 load32(src
, ARMRegisters::S1
);
120 add32(ARMRegisters::S1
, dest
);
123 void add32(RegisterID src
, TrustedImm32 imm
, RegisterID dest
)
125 m_assembler
.adds(dest
, src
, m_assembler
.getImm(imm
.m_value
, ARMRegisters::S0
));
128 void and32(RegisterID src
, RegisterID dest
)
130 m_assembler
.bitAnds(dest
, dest
, src
);
133 void and32(RegisterID op1
, RegisterID op2
, RegisterID dest
)
135 m_assembler
.bitAnds(dest
, op1
, op2
);
138 void and32(TrustedImm32 imm
, RegisterID dest
)
140 ARMWord w
= m_assembler
.getImm(imm
.m_value
, ARMRegisters::S0
, true);
141 if (w
& ARMAssembler::Op2InvertedImmediate
)
142 m_assembler
.bics(dest
, dest
, w
& ~ARMAssembler::Op2InvertedImmediate
);
144 m_assembler
.bitAnds(dest
, dest
, w
);
147 void and32(TrustedImm32 imm
, RegisterID src
, RegisterID dest
)
149 ARMWord w
= m_assembler
.getImm(imm
.m_value
, ARMRegisters::S0
, true);
150 if (w
& ARMAssembler::Op2InvertedImmediate
)
151 m_assembler
.bics(dest
, src
, w
& ~ARMAssembler::Op2InvertedImmediate
);
153 m_assembler
.bitAnds(dest
, src
, w
);
156 void and32(Address src
, RegisterID dest
)
158 load32(src
, ARMRegisters::S1
);
159 and32(ARMRegisters::S1
, dest
);
162 void lshift32(RegisterID shiftAmount
, RegisterID dest
)
164 lshift32(dest
, shiftAmount
, dest
);
167 void lshift32(RegisterID src
, RegisterID shiftAmount
, RegisterID dest
)
169 ARMWord w
= ARMAssembler::getOp2Byte(0x1f);
170 m_assembler
.bitAnd(ARMRegisters::S0
, shiftAmount
, w
);
172 m_assembler
.movs(dest
, m_assembler
.lslRegister(src
, ARMRegisters::S0
));
175 void lshift32(TrustedImm32 imm
, RegisterID dest
)
177 m_assembler
.movs(dest
, m_assembler
.lsl(dest
, imm
.m_value
& 0x1f));
180 void lshift32(RegisterID src
, TrustedImm32 imm
, RegisterID dest
)
182 m_assembler
.movs(dest
, m_assembler
.lsl(src
, imm
.m_value
& 0x1f));
185 void mul32(RegisterID op1
, RegisterID op2
, RegisterID dest
)
189 move(op2
, ARMRegisters::S0
);
190 op2
= ARMRegisters::S0
;
192 // Swap the operands.
193 RegisterID tmp
= op1
;
198 m_assembler
.muls(dest
, op1
, op2
);
201 void mul32(RegisterID src
, RegisterID dest
)
203 mul32(src
, dest
, dest
);
206 void mul32(TrustedImm32 imm
, RegisterID src
, RegisterID dest
)
208 move(imm
, ARMRegisters::S0
);
209 m_assembler
.muls(dest
, src
, ARMRegisters::S0
);
212 void neg32(RegisterID srcDest
)
214 m_assembler
.rsbs(srcDest
, srcDest
, ARMAssembler::getOp2Byte(0));
217 void or32(RegisterID src
, RegisterID dest
)
219 m_assembler
.orrs(dest
, dest
, src
);
222 void or32(RegisterID src
, AbsoluteAddress dest
)
224 move(TrustedImmPtr(dest
.m_ptr
), ARMRegisters::S0
);
225 load32(Address(ARMRegisters::S0
), ARMRegisters::S1
);
226 or32(src
, ARMRegisters::S1
);
227 store32(ARMRegisters::S1
, ARMRegisters::S0
);
230 void or32(TrustedImm32 imm
, RegisterID dest
)
232 m_assembler
.orrs(dest
, dest
, m_assembler
.getImm(imm
.m_value
, ARMRegisters::S0
));
235 void or32(TrustedImm32 imm
, RegisterID src
, RegisterID dest
)
237 m_assembler
.orrs(dest
, src
, m_assembler
.getImm(imm
.m_value
, ARMRegisters::S0
));
240 void or32(RegisterID op1
, RegisterID op2
, RegisterID dest
)
242 m_assembler
.orrs(dest
, op1
, op2
);
245 void rshift32(RegisterID shiftAmount
, RegisterID dest
)
247 rshift32(dest
, shiftAmount
, dest
);
250 void rshift32(RegisterID src
, RegisterID shiftAmount
, RegisterID dest
)
252 ARMWord w
= ARMAssembler::getOp2Byte(0x1f);
253 m_assembler
.bitAnd(ARMRegisters::S0
, shiftAmount
, w
);
255 m_assembler
.movs(dest
, m_assembler
.asrRegister(src
, ARMRegisters::S0
));
258 void rshift32(TrustedImm32 imm
, RegisterID dest
)
260 rshift32(dest
, imm
, dest
);
263 void rshift32(RegisterID src
, TrustedImm32 imm
, RegisterID dest
)
265 m_assembler
.movs(dest
, m_assembler
.asr(src
, imm
.m_value
& 0x1f));
268 void urshift32(RegisterID shiftAmount
, RegisterID dest
)
270 urshift32(dest
, shiftAmount
, dest
);
273 void urshift32(RegisterID src
, RegisterID shiftAmount
, RegisterID dest
)
275 ARMWord w
= ARMAssembler::getOp2Byte(0x1f);
276 m_assembler
.bitAnd(ARMRegisters::S0
, shiftAmount
, w
);
278 m_assembler
.movs(dest
, m_assembler
.lsrRegister(src
, ARMRegisters::S0
));
281 void urshift32(TrustedImm32 imm
, RegisterID dest
)
283 m_assembler
.movs(dest
, m_assembler
.lsr(dest
, imm
.m_value
& 0x1f));
286 void urshift32(RegisterID src
, TrustedImm32 imm
, RegisterID dest
)
288 m_assembler
.movs(dest
, m_assembler
.lsr(src
, imm
.m_value
& 0x1f));
291 void sub32(RegisterID src
, RegisterID dest
)
293 m_assembler
.subs(dest
, dest
, src
);
296 void sub32(TrustedImm32 imm
, RegisterID dest
)
298 m_assembler
.subs(dest
, dest
, m_assembler
.getImm(imm
.m_value
, ARMRegisters::S0
));
301 void sub32(TrustedImm32 imm
, Address address
)
303 load32(address
, ARMRegisters::S1
);
304 sub32(imm
, ARMRegisters::S1
);
305 store32(ARMRegisters::S1
, address
);
308 void sub32(Address src
, RegisterID dest
)
310 load32(src
, ARMRegisters::S1
);
311 sub32(ARMRegisters::S1
, dest
);
314 void sub32(RegisterID src
, TrustedImm32 imm
, RegisterID dest
)
316 m_assembler
.subs(dest
, src
, m_assembler
.getImm(imm
.m_value
, ARMRegisters::S0
));
319 void xor32(RegisterID src
, RegisterID dest
)
321 m_assembler
.eors(dest
, dest
, src
);
324 void xor32(RegisterID op1
, RegisterID op2
, RegisterID dest
)
326 m_assembler
.eors(dest
, op1
, op2
);
329 void xor32(TrustedImm32 imm
, RegisterID dest
)
331 if (imm
.m_value
== -1)
332 m_assembler
.mvns(dest
, dest
);
334 m_assembler
.eors(dest
, dest
, m_assembler
.getImm(imm
.m_value
, ARMRegisters::S0
));
337 void xor32(TrustedImm32 imm
, RegisterID src
, RegisterID dest
)
339 if (imm
.m_value
== -1)
340 m_assembler
.mvns(dest
, src
);
342 m_assembler
.eors(dest
, src
, m_assembler
.getImm(imm
.m_value
, ARMRegisters::S0
));
345 void countLeadingZeros32(RegisterID src
, RegisterID dest
)
347 #if WTF_ARM_ARCH_AT_LEAST(5)
348 m_assembler
.clz(dest
, src
);
352 RELEASE_ASSERT_NOT_REACHED();
356 void load8(ImplicitAddress address
, RegisterID dest
)
358 m_assembler
.dataTransfer32(ARMAssembler::LoadUint8
, dest
, address
.base
, address
.offset
);
361 void load8(BaseIndex address
, RegisterID dest
)
363 m_assembler
.baseIndexTransfer32(ARMAssembler::LoadUint8
, dest
, address
.base
, address
.index
, static_cast<int>(address
.scale
), address
.offset
);
366 void load8Signed(BaseIndex address
, RegisterID dest
)
368 m_assembler
.baseIndexTransfer16(ARMAssembler::LoadInt8
, dest
, address
.base
, address
.index
, static_cast<int>(address
.scale
), address
.offset
);
371 void load16(ImplicitAddress address
, RegisterID dest
)
373 m_assembler
.dataTransfer16(ARMAssembler::LoadUint16
, dest
, address
.base
, address
.offset
);
376 void load16(BaseIndex address
, RegisterID dest
)
378 m_assembler
.baseIndexTransfer16(ARMAssembler::LoadUint16
, dest
, address
.base
, address
.index
, static_cast<int>(address
.scale
), address
.offset
);
381 void load16Signed(BaseIndex address
, RegisterID dest
)
383 m_assembler
.baseIndexTransfer16(ARMAssembler::LoadInt16
, dest
, address
.base
, address
.index
, static_cast<int>(address
.scale
), address
.offset
);
386 void load32(ImplicitAddress address
, RegisterID dest
)
388 m_assembler
.dataTransfer32(ARMAssembler::LoadUint32
, dest
, address
.base
, address
.offset
);
391 void load32(BaseIndex address
, RegisterID dest
)
393 m_assembler
.baseIndexTransfer32(ARMAssembler::LoadUint32
, dest
, address
.base
, address
.index
, static_cast<int>(address
.scale
), address
.offset
);
396 #if CPU(ARMV5_OR_LOWER)
397 void load32WithUnalignedHalfWords(BaseIndex address
, RegisterID dest
);
399 void load32WithUnalignedHalfWords(BaseIndex address
, RegisterID dest
)
401 load32(address
, dest
);
405 void load16Unaligned(BaseIndex address
, RegisterID dest
)
407 load16(address
, dest
);
410 ConvertibleLoadLabel
convertibleLoadPtr(Address address
, RegisterID dest
)
412 ConvertibleLoadLabel
result(this);
413 ASSERT(address
.offset
>= 0 && address
.offset
<= 255);
414 m_assembler
.dtrUp(ARMAssembler::LoadUint32
, dest
, address
.base
, address
.offset
);
418 DataLabel32
load32WithAddressOffsetPatch(Address address
, RegisterID dest
)
420 DataLabel32
dataLabel(this);
421 m_assembler
.ldrUniqueImmediate(ARMRegisters::S0
, 0);
422 m_assembler
.dtrUpRegister(ARMAssembler::LoadUint32
, dest
, address
.base
, ARMRegisters::S0
);
426 static bool isCompactPtrAlignedAddressOffset(ptrdiff_t value
)
428 return value
>= -4095 && value
<= 4095;
431 DataLabelCompact
load32WithCompactAddressOffsetPatch(Address address
, RegisterID dest
)
433 DataLabelCompact
dataLabel(this);
434 ASSERT(isCompactPtrAlignedAddressOffset(address
.offset
));
435 if (address
.offset
>= 0)
436 m_assembler
.dtrUp(ARMAssembler::LoadUint32
, dest
, address
.base
, address
.offset
);
438 m_assembler
.dtrDown(ARMAssembler::LoadUint32
, dest
, address
.base
, address
.offset
);
442 DataLabel32
store32WithAddressOffsetPatch(RegisterID src
, Address address
)
444 DataLabel32
dataLabel(this);
445 m_assembler
.ldrUniqueImmediate(ARMRegisters::S0
, 0);
446 m_assembler
.dtrUpRegister(ARMAssembler::StoreUint32
, src
, address
.base
, ARMRegisters::S0
);
450 void store8(RegisterID src
, BaseIndex address
)
452 m_assembler
.baseIndexTransfer32(ARMAssembler::StoreUint8
, src
, address
.base
, address
.index
, static_cast<int>(address
.scale
), address
.offset
);
455 void store8(TrustedImm32 imm
, const void* address
)
457 move(TrustedImm32(reinterpret_cast<ARMWord
>(address
)), ARMRegisters::S0
);
458 move(imm
, ARMRegisters::S1
);
459 m_assembler
.dtrUp(ARMAssembler::StoreUint8
, ARMRegisters::S1
, ARMRegisters::S0
, 0);
462 void store16(RegisterID src
, BaseIndex address
)
464 m_assembler
.baseIndexTransfer16(ARMAssembler::StoreUint16
, src
, address
.base
, address
.index
, static_cast<int>(address
.scale
), address
.offset
);
467 void store32(RegisterID src
, ImplicitAddress address
)
469 m_assembler
.dataTransfer32(ARMAssembler::StoreUint32
, src
, address
.base
, address
.offset
);
472 void store32(RegisterID src
, BaseIndex address
)
474 m_assembler
.baseIndexTransfer32(ARMAssembler::StoreUint32
, src
, address
.base
, address
.index
, static_cast<int>(address
.scale
), address
.offset
);
477 void store32(TrustedImm32 imm
, ImplicitAddress address
)
479 move(imm
, ARMRegisters::S1
);
480 store32(ARMRegisters::S1
, address
);
483 void store32(TrustedImm32 imm
, BaseIndex address
)
485 move(imm
, ARMRegisters::S1
);
486 m_assembler
.baseIndexTransfer32(ARMAssembler::StoreUint32
, ARMRegisters::S1
, address
.base
, address
.index
, static_cast<int>(address
.scale
), address
.offset
);
489 void store32(RegisterID src
, const void* address
)
491 m_assembler
.ldrUniqueImmediate(ARMRegisters::S0
, reinterpret_cast<ARMWord
>(address
));
492 m_assembler
.dtrUp(ARMAssembler::StoreUint32
, src
, ARMRegisters::S0
, 0);
495 void store32(TrustedImm32 imm
, const void* address
)
497 m_assembler
.ldrUniqueImmediate(ARMRegisters::S0
, reinterpret_cast<ARMWord
>(address
));
498 m_assembler
.moveImm(imm
.m_value
, ARMRegisters::S1
);
499 m_assembler
.dtrUp(ARMAssembler::StoreUint32
, ARMRegisters::S1
, ARMRegisters::S0
, 0);
502 void pop(RegisterID dest
)
504 m_assembler
.pop(dest
);
507 void push(RegisterID src
)
509 m_assembler
.push(src
);
512 void push(Address address
)
514 load32(address
, ARMRegisters::S1
);
515 push(ARMRegisters::S1
);
518 void push(TrustedImm32 imm
)
520 move(imm
, ARMRegisters::S0
);
521 push(ARMRegisters::S0
);
524 void move(TrustedImm32 imm
, RegisterID dest
)
526 m_assembler
.moveImm(imm
.m_value
, dest
);
529 void move(RegisterID src
, RegisterID dest
)
532 m_assembler
.mov(dest
, src
);
535 void move(TrustedImmPtr imm
, RegisterID dest
)
537 move(TrustedImm32(imm
), dest
);
540 void swap(RegisterID reg1
, RegisterID reg2
)
547 void signExtend32ToPtr(RegisterID src
, RegisterID dest
)
553 void zeroExtend32ToPtr(RegisterID src
, RegisterID dest
)
559 Jump
branch8(RelationalCondition cond
, Address left
, TrustedImm32 right
)
561 load8(left
, ARMRegisters::S1
);
562 return branch32(cond
, ARMRegisters::S1
, right
);
565 Jump
branch8(RelationalCondition cond
, BaseIndex left
, TrustedImm32 right
)
567 ASSERT(!(right
.m_value
& 0xFFFFFF00));
568 load8(left
, ARMRegisters::S1
);
569 return branch32(cond
, ARMRegisters::S1
, right
);
572 Jump
branch32(RelationalCondition cond
, RegisterID left
, RegisterID right
, int useConstantPool
= 0)
574 m_assembler
.cmp(left
, right
);
575 return Jump(m_assembler
.jmp(ARMCondition(cond
), useConstantPool
));
578 Jump
branch32(RelationalCondition cond
, RegisterID left
, TrustedImm32 right
, int useConstantPool
= 0)
580 internalCompare32(left
, right
);
581 return Jump(m_assembler
.jmp(ARMCondition(cond
), useConstantPool
));
584 Jump
branch32(RelationalCondition cond
, RegisterID left
, Address right
)
586 load32(right
, ARMRegisters::S1
);
587 return branch32(cond
, left
, ARMRegisters::S1
);
590 Jump
branch32(RelationalCondition cond
, Address left
, RegisterID right
)
592 load32(left
, ARMRegisters::S1
);
593 return branch32(cond
, ARMRegisters::S1
, right
);
596 Jump
branch32(RelationalCondition cond
, Address left
, TrustedImm32 right
)
598 load32(left
, ARMRegisters::S1
);
599 return branch32(cond
, ARMRegisters::S1
, right
);
602 Jump
branch32(RelationalCondition cond
, BaseIndex left
, TrustedImm32 right
)
604 load32(left
, ARMRegisters::S1
);
605 return branch32(cond
, ARMRegisters::S1
, right
);
608 Jump
branch32WithUnalignedHalfWords(RelationalCondition cond
, BaseIndex left
, TrustedImm32 right
)
610 load32WithUnalignedHalfWords(left
, ARMRegisters::S1
);
611 return branch32(cond
, ARMRegisters::S1
, right
);
614 Jump
branchTest8(ResultCondition cond
, Address address
, TrustedImm32 mask
= TrustedImm32(-1))
616 load8(address
, ARMRegisters::S1
);
617 return branchTest32(cond
, ARMRegisters::S1
, mask
);
620 Jump
branchTest8(ResultCondition cond
, AbsoluteAddress address
, TrustedImm32 mask
= TrustedImm32(-1))
622 move(TrustedImmPtr(address
.m_ptr
), ARMRegisters::S1
);
623 load8(Address(ARMRegisters::S1
), ARMRegisters::S1
);
624 return branchTest32(cond
, ARMRegisters::S1
, mask
);
627 Jump
branchTest32(ResultCondition cond
, RegisterID reg
, RegisterID mask
)
629 ASSERT((cond
== Zero
) || (cond
== NonZero
));
630 m_assembler
.tst(reg
, mask
);
631 return Jump(m_assembler
.jmp(ARMCondition(cond
)));
634 Jump
branchTest32(ResultCondition cond
, RegisterID reg
, TrustedImm32 mask
= TrustedImm32(-1))
636 ASSERT((cond
== Zero
) || (cond
== NonZero
));
637 ARMWord w
= m_assembler
.getImm(mask
.m_value
, ARMRegisters::S0
, true);
638 if (w
& ARMAssembler::Op2InvertedImmediate
)
639 m_assembler
.bics(ARMRegisters::S0
, reg
, w
& ~ARMAssembler::Op2InvertedImmediate
);
641 m_assembler
.tst(reg
, w
);
642 return Jump(m_assembler
.jmp(ARMCondition(cond
)));
645 Jump
branchTest32(ResultCondition cond
, Address address
, TrustedImm32 mask
= TrustedImm32(-1))
647 load32(address
, ARMRegisters::S1
);
648 return branchTest32(cond
, ARMRegisters::S1
, mask
);
651 Jump
branchTest32(ResultCondition cond
, BaseIndex address
, TrustedImm32 mask
= TrustedImm32(-1))
653 load32(address
, ARMRegisters::S1
);
654 return branchTest32(cond
, ARMRegisters::S1
, mask
);
659 return Jump(m_assembler
.jmp());
662 void jump(RegisterID target
)
664 m_assembler
.bx(target
);
667 void jump(Address address
)
669 load32(address
, ARMRegisters::pc
);
672 void jump(AbsoluteAddress address
)
674 move(TrustedImmPtr(address
.m_ptr
), ARMRegisters::S0
);
675 load32(Address(ARMRegisters::S0
, 0), ARMRegisters::pc
);
678 void moveDoubleToInts(FPRegisterID src
, RegisterID dest1
, RegisterID dest2
)
680 m_assembler
.vmov(dest1
, dest2
, src
);
683 void moveIntsToDouble(RegisterID src1
, RegisterID src2
, FPRegisterID dest
, FPRegisterID
)
685 m_assembler
.vmov(dest
, src1
, src2
);
688 Jump
branchAdd32(ResultCondition cond
, RegisterID src
, RegisterID dest
)
690 ASSERT((cond
== Overflow
) || (cond
== Signed
) || (cond
== Zero
)
691 || (cond
== NonZero
) || (cond
== PositiveOrZero
));
693 return Jump(m_assembler
.jmp(ARMCondition(cond
)));
696 Jump
branchAdd32(ResultCondition cond
, RegisterID op1
, RegisterID op2
, RegisterID dest
)
698 ASSERT((cond
== Overflow
) || (cond
== Signed
) || (cond
== Zero
)
699 || (cond
== NonZero
) || (cond
== PositiveOrZero
));
700 add32(op1
, op2
, dest
);
701 return Jump(m_assembler
.jmp(ARMCondition(cond
)));
704 Jump
branchAdd32(ResultCondition cond
, TrustedImm32 imm
, RegisterID dest
)
706 ASSERT((cond
== Overflow
) || (cond
== Signed
) || (cond
== Zero
)
707 || (cond
== NonZero
) || (cond
== PositiveOrZero
));
709 return Jump(m_assembler
.jmp(ARMCondition(cond
)));
712 Jump
branchAdd32(ResultCondition cond
, RegisterID src
, TrustedImm32 imm
, RegisterID dest
)
714 ASSERT((cond
== Overflow
) || (cond
== Signed
) || (cond
== Zero
)
715 || (cond
== NonZero
) || (cond
== PositiveOrZero
));
716 add32(src
, imm
, dest
);
717 return Jump(m_assembler
.jmp(ARMCondition(cond
)));
720 Jump
branchAdd32(ResultCondition cond
, TrustedImm32 imm
, AbsoluteAddress dest
)
722 ASSERT((cond
== Overflow
) || (cond
== Signed
) || (cond
== Zero
)
723 || (cond
== NonZero
) || (cond
== PositiveOrZero
));
725 return Jump(m_assembler
.jmp(ARMCondition(cond
)));
728 void mull32(RegisterID op1
, RegisterID op2
, RegisterID dest
)
732 move(op2
, ARMRegisters::S0
);
733 op2
= ARMRegisters::S0
;
735 // Swap the operands.
736 RegisterID tmp
= op1
;
741 m_assembler
.mull(ARMRegisters::S1
, dest
, op1
, op2
);
742 m_assembler
.cmp(ARMRegisters::S1
, m_assembler
.asr(dest
, 31));
745 Jump
branchMul32(ResultCondition cond
, RegisterID src1
, RegisterID src2
, RegisterID dest
)
747 ASSERT((cond
== Overflow
) || (cond
== Signed
) || (cond
== Zero
) || (cond
== NonZero
));
748 if (cond
== Overflow
) {
749 mull32(src1
, src2
, dest
);
753 mul32(src1
, src2
, dest
);
754 return Jump(m_assembler
.jmp(ARMCondition(cond
)));
757 Jump
branchMul32(ResultCondition cond
, RegisterID src
, RegisterID dest
)
759 return branchMul32(cond
, src
, dest
, dest
);
762 Jump
branchMul32(ResultCondition cond
, TrustedImm32 imm
, RegisterID src
, RegisterID dest
)
764 ASSERT((cond
== Overflow
) || (cond
== Signed
) || (cond
== Zero
) || (cond
== NonZero
));
765 if (cond
== Overflow
) {
766 move(imm
, ARMRegisters::S0
);
767 mull32(ARMRegisters::S0
, src
, dest
);
771 mul32(imm
, src
, dest
);
772 return Jump(m_assembler
.jmp(ARMCondition(cond
)));
775 Jump
branchSub32(ResultCondition cond
, RegisterID src
, RegisterID dest
)
777 ASSERT((cond
== Overflow
) || (cond
== Signed
) || (cond
== Zero
) || (cond
== NonZero
));
779 return Jump(m_assembler
.jmp(ARMCondition(cond
)));
782 Jump
branchSub32(ResultCondition cond
, TrustedImm32 imm
, RegisterID dest
)
784 ASSERT((cond
== Overflow
) || (cond
== Signed
) || (cond
== Zero
) || (cond
== NonZero
));
786 return Jump(m_assembler
.jmp(ARMCondition(cond
)));
789 Jump
branchSub32(ResultCondition cond
, RegisterID src
, TrustedImm32 imm
, RegisterID dest
)
791 ASSERT((cond
== Overflow
) || (cond
== Signed
) || (cond
== Zero
) || (cond
== NonZero
));
792 sub32(src
, imm
, dest
);
793 return Jump(m_assembler
.jmp(ARMCondition(cond
)));
796 Jump
branchSub32(ResultCondition cond
, RegisterID op1
, RegisterID op2
, RegisterID dest
)
798 ASSERT((cond
== Overflow
) || (cond
== Signed
) || (cond
== Zero
) || (cond
== NonZero
));
799 m_assembler
.subs(dest
, op1
, op2
);
800 return Jump(m_assembler
.jmp(ARMCondition(cond
)));
803 Jump
branchNeg32(ResultCondition cond
, RegisterID srcDest
)
805 ASSERT((cond
== Overflow
) || (cond
== Signed
) || (cond
== Zero
) || (cond
== NonZero
));
807 return Jump(m_assembler
.jmp(ARMCondition(cond
)));
810 Jump
branchOr32(ResultCondition cond
, RegisterID src
, RegisterID dest
)
812 ASSERT((cond
== Signed
) || (cond
== Zero
) || (cond
== NonZero
));
814 return Jump(m_assembler
.jmp(ARMCondition(cond
)));
817 PatchableJump
patchableBranch32(RelationalCondition cond
, RegisterID reg
, TrustedImm32 imm
)
819 internalCompare32(reg
, imm
);
820 Jump
jump(m_assembler
.loadBranchTarget(ARMRegisters::S1
, ARMCondition(cond
), true));
821 m_assembler
.bx(ARMRegisters::S1
, ARMCondition(cond
));
822 return PatchableJump(jump
);
832 m_assembler
.loadBranchTarget(ARMRegisters::S1
, ARMAssembler::AL
, true);
833 return Call(m_assembler
.blx(ARMRegisters::S1
), Call::LinkableNear
);
836 Call
call(RegisterID target
)
838 return Call(m_assembler
.blx(target
), Call::None
);
841 void call(Address address
)
843 call32(address
.base
, address
.offset
);
848 m_assembler
.bx(linkRegister
);
851 void compare32(RelationalCondition cond
, RegisterID left
, RegisterID right
, RegisterID dest
)
853 m_assembler
.cmp(left
, right
);
854 m_assembler
.mov(dest
, ARMAssembler::getOp2Byte(0));
855 m_assembler
.mov(dest
, ARMAssembler::getOp2Byte(1), ARMCondition(cond
));
858 void compare32(RelationalCondition cond
, RegisterID left
, TrustedImm32 right
, RegisterID dest
)
860 m_assembler
.cmp(left
, m_assembler
.getImm(right
.m_value
, ARMRegisters::S0
));
861 m_assembler
.mov(dest
, ARMAssembler::getOp2Byte(0));
862 m_assembler
.mov(dest
, ARMAssembler::getOp2Byte(1), ARMCondition(cond
));
865 void compare8(RelationalCondition cond
, Address left
, TrustedImm32 right
, RegisterID dest
)
867 load8(left
, ARMRegisters::S1
);
868 compare32(cond
, ARMRegisters::S1
, right
, dest
);
871 void test32(ResultCondition cond
, RegisterID reg
, TrustedImm32 mask
, RegisterID dest
)
873 if (mask
.m_value
== -1)
874 m_assembler
.cmp(0, reg
);
876 m_assembler
.tst(reg
, m_assembler
.getImm(mask
.m_value
, ARMRegisters::S0
));
877 m_assembler
.mov(dest
, ARMAssembler::getOp2Byte(0));
878 m_assembler
.mov(dest
, ARMAssembler::getOp2Byte(1), ARMCondition(cond
));
881 void test32(ResultCondition cond
, Address address
, TrustedImm32 mask
, RegisterID dest
)
883 load32(address
, ARMRegisters::S1
);
884 test32(cond
, ARMRegisters::S1
, mask
, dest
);
887 void test8(ResultCondition cond
, Address address
, TrustedImm32 mask
, RegisterID dest
)
889 load8(address
, ARMRegisters::S1
);
890 test32(cond
, ARMRegisters::S1
, mask
, dest
);
893 void add32(TrustedImm32 imm
, RegisterID src
, RegisterID dest
)
895 m_assembler
.add(dest
, src
, m_assembler
.getImm(imm
.m_value
, ARMRegisters::S0
));
898 void add32(TrustedImm32 imm
, AbsoluteAddress address
)
900 load32(address
.m_ptr
, ARMRegisters::S1
);
901 add32(imm
, ARMRegisters::S1
);
902 store32(ARMRegisters::S1
, address
.m_ptr
);
905 void add64(TrustedImm32 imm
, AbsoluteAddress address
)
909 move(TrustedImmPtr(address
.m_ptr
), ARMRegisters::S1
);
910 m_assembler
.dtrUp(ARMAssembler::LoadUint32
, ARMRegisters::S0
, ARMRegisters::S1
, 0);
912 if ((tmp
= ARMAssembler::getOp2(imm
.m_value
)) != ARMAssembler::InvalidImmediate
)
913 m_assembler
.adds(ARMRegisters::S0
, ARMRegisters::S0
, tmp
);
914 else if ((tmp
= ARMAssembler::getOp2(-imm
.m_value
)) != ARMAssembler::InvalidImmediate
)
915 m_assembler
.subs(ARMRegisters::S0
, ARMRegisters::S0
, tmp
);
917 m_assembler
.adds(ARMRegisters::S0
, ARMRegisters::S0
, m_assembler
.getImm(imm
.m_value
, ARMRegisters::S1
));
918 move(TrustedImmPtr(address
.m_ptr
), ARMRegisters::S1
);
920 m_assembler
.dtrUp(ARMAssembler::StoreUint32
, ARMRegisters::S0
, ARMRegisters::S1
, 0);
922 m_assembler
.dtrUp(ARMAssembler::LoadUint32
, ARMRegisters::S0
, ARMRegisters::S1
, sizeof(ARMWord
));
923 if (imm
.m_value
>= 0)
924 m_assembler
.adc(ARMRegisters::S0
, ARMRegisters::S0
, ARMAssembler::getOp2Byte(0));
926 m_assembler
.sbc(ARMRegisters::S0
, ARMRegisters::S0
, ARMAssembler::getOp2Byte(0));
927 m_assembler
.dtrUp(ARMAssembler::StoreUint32
, ARMRegisters::S0
, ARMRegisters::S1
, sizeof(ARMWord
));
930 void sub32(TrustedImm32 imm
, AbsoluteAddress address
)
932 load32(address
.m_ptr
, ARMRegisters::S1
);
933 sub32(imm
, ARMRegisters::S1
);
934 store32(ARMRegisters::S1
, address
.m_ptr
);
937 void load32(const void* address
, RegisterID dest
)
939 m_assembler
.ldrUniqueImmediate(ARMRegisters::S0
, reinterpret_cast<ARMWord
>(address
));
940 m_assembler
.dtrUp(ARMAssembler::LoadUint32
, dest
, ARMRegisters::S0
, 0);
943 Jump
branch32(RelationalCondition cond
, AbsoluteAddress left
, RegisterID right
)
945 load32(left
.m_ptr
, ARMRegisters::S1
);
946 return branch32(cond
, ARMRegisters::S1
, right
);
949 Jump
branch32(RelationalCondition cond
, AbsoluteAddress left
, TrustedImm32 right
)
951 load32(left
.m_ptr
, ARMRegisters::S1
);
952 return branch32(cond
, ARMRegisters::S1
, right
);
955 void relativeTableJump(RegisterID index
, int scale
)
957 ASSERT(scale
>= 0 && scale
<= 31);
958 m_assembler
.add(ARMRegisters::pc
, ARMRegisters::pc
, m_assembler
.lsl(index
, scale
));
960 // NOP the default prefetching
961 m_assembler
.mov(ARMRegisters::r0
, ARMRegisters::r0
);
966 ensureSpace(2 * sizeof(ARMWord
), sizeof(ARMWord
));
967 m_assembler
.loadBranchTarget(ARMRegisters::S1
, ARMAssembler::AL
, true);
968 return Call(m_assembler
.blx(ARMRegisters::S1
), Call::Linkable
);
971 Call
tailRecursiveCall()
973 return Call::fromTailJump(jump());
976 Call
makeTailRecursiveCall(Jump oldJump
)
978 return Call::fromTailJump(oldJump
);
981 DataLabelPtr
moveWithPatch(TrustedImmPtr initialValue
, RegisterID dest
)
983 DataLabelPtr
dataLabel(this);
984 m_assembler
.ldrUniqueImmediate(dest
, reinterpret_cast<ARMWord
>(initialValue
.m_value
));
988 Jump
branchPtrWithPatch(RelationalCondition cond
, RegisterID left
, DataLabelPtr
& dataLabel
, TrustedImmPtr initialRightValue
= TrustedImmPtr(0))
990 ensureSpace(3 * sizeof(ARMWord
), 2 * sizeof(ARMWord
));
991 dataLabel
= moveWithPatch(initialRightValue
, ARMRegisters::S1
);
992 Jump jump
= branch32(cond
, left
, ARMRegisters::S1
, true);
996 Jump
branchPtrWithPatch(RelationalCondition cond
, Address left
, DataLabelPtr
& dataLabel
, TrustedImmPtr initialRightValue
= TrustedImmPtr(0))
998 load32(left
, ARMRegisters::S1
);
999 ensureSpace(3 * sizeof(ARMWord
), 2 * sizeof(ARMWord
));
1000 dataLabel
= moveWithPatch(initialRightValue
, ARMRegisters::S0
);
1001 Jump jump
= branch32(cond
, ARMRegisters::S0
, ARMRegisters::S1
, true);
1005 DataLabelPtr
storePtrWithPatch(TrustedImmPtr initialValue
, ImplicitAddress address
)
1007 DataLabelPtr dataLabel
= moveWithPatch(initialValue
, ARMRegisters::S1
);
1008 store32(ARMRegisters::S1
, address
);
1012 DataLabelPtr
storePtrWithPatch(ImplicitAddress address
)
1014 return storePtrWithPatch(TrustedImmPtr(0), address
);
1017 // Floating point operators
1018 static bool supportsFloatingPoint()
1020 return s_isVFPPresent
;
1023 static bool supportsFloatingPointTruncate()
1028 static bool supportsFloatingPointSqrt()
1030 return s_isVFPPresent
;
1032 static bool supportsFloatingPointAbs() { return false; }
1034 void loadFloat(BaseIndex address
, FPRegisterID dest
)
1036 m_assembler
.baseIndexTransferFloat(ARMAssembler::LoadFloat
, dest
, address
.base
, address
.index
, static_cast<int>(address
.scale
), address
.offset
);
1039 void loadDouble(ImplicitAddress address
, FPRegisterID dest
)
1041 m_assembler
.dataTransferFloat(ARMAssembler::LoadDouble
, dest
, address
.base
, address
.offset
);
1044 void loadDouble(BaseIndex address
, FPRegisterID dest
)
1046 m_assembler
.baseIndexTransferFloat(ARMAssembler::LoadDouble
, dest
, address
.base
, address
.index
, static_cast<int>(address
.scale
), address
.offset
);
1049 void loadDouble(const void* address
, FPRegisterID dest
)
1051 move(TrustedImm32(reinterpret_cast<ARMWord
>(address
)), ARMRegisters::S0
);
1052 m_assembler
.doubleDtrUp(ARMAssembler::LoadDouble
, dest
, ARMRegisters::S0
, 0);
1055 void storeFloat(FPRegisterID src
, BaseIndex address
)
1057 m_assembler
.baseIndexTransferFloat(ARMAssembler::StoreFloat
, src
, address
.base
, address
.index
, static_cast<int>(address
.scale
), address
.offset
);
1060 void storeDouble(FPRegisterID src
, ImplicitAddress address
)
1062 m_assembler
.dataTransferFloat(ARMAssembler::StoreDouble
, src
, address
.base
, address
.offset
);
1065 void storeDouble(FPRegisterID src
, BaseIndex address
)
1067 m_assembler
.baseIndexTransferFloat(ARMAssembler::StoreDouble
, src
, address
.base
, address
.index
, static_cast<int>(address
.scale
), address
.offset
);
1070 void storeDouble(FPRegisterID src
, const void* address
)
1072 move(TrustedImm32(reinterpret_cast<ARMWord
>(address
)), ARMRegisters::S0
);
1073 m_assembler
.dataTransferFloat(ARMAssembler::StoreDouble
, src
, ARMRegisters::S0
, 0);
1076 void moveDouble(FPRegisterID src
, FPRegisterID dest
)
1079 m_assembler
.vmov_f64(dest
, src
);
1082 void addDouble(FPRegisterID src
, FPRegisterID dest
)
1084 m_assembler
.vadd_f64(dest
, dest
, src
);
1087 void addDouble(FPRegisterID op1
, FPRegisterID op2
, FPRegisterID dest
)
1089 m_assembler
.vadd_f64(dest
, op1
, op2
);
1092 void addDouble(Address src
, FPRegisterID dest
)
1094 loadDouble(src
, ARMRegisters::SD0
);
1095 addDouble(ARMRegisters::SD0
, dest
);
1098 void addDouble(AbsoluteAddress address
, FPRegisterID dest
)
1100 loadDouble(address
.m_ptr
, ARMRegisters::SD0
);
1101 addDouble(ARMRegisters::SD0
, dest
);
1104 void divDouble(FPRegisterID src
, FPRegisterID dest
)
1106 m_assembler
.vdiv_f64(dest
, dest
, src
);
1109 void divDouble(FPRegisterID op1
, FPRegisterID op2
, FPRegisterID dest
)
1111 m_assembler
.vdiv_f64(dest
, op1
, op2
);
1114 void divDouble(Address src
, FPRegisterID dest
)
1116 RELEASE_ASSERT_NOT_REACHED(); // Untested
1117 loadDouble(src
, ARMRegisters::SD0
);
1118 divDouble(ARMRegisters::SD0
, dest
);
1121 void subDouble(FPRegisterID src
, FPRegisterID dest
)
1123 m_assembler
.vsub_f64(dest
, dest
, src
);
1126 void subDouble(FPRegisterID op1
, FPRegisterID op2
, FPRegisterID dest
)
1128 m_assembler
.vsub_f64(dest
, op1
, op2
);
1131 void subDouble(Address src
, FPRegisterID dest
)
1133 loadDouble(src
, ARMRegisters::SD0
);
1134 subDouble(ARMRegisters::SD0
, dest
);
1137 void mulDouble(FPRegisterID src
, FPRegisterID dest
)
1139 m_assembler
.vmul_f64(dest
, dest
, src
);
1142 void mulDouble(Address src
, FPRegisterID dest
)
1144 loadDouble(src
, ARMRegisters::SD0
);
1145 mulDouble(ARMRegisters::SD0
, dest
);
1148 void mulDouble(FPRegisterID op1
, FPRegisterID op2
, FPRegisterID dest
)
1150 m_assembler
.vmul_f64(dest
, op1
, op2
);
1153 void sqrtDouble(FPRegisterID src
, FPRegisterID dest
)
1155 m_assembler
.vsqrt_f64(dest
, src
);
1158 void absDouble(FPRegisterID src
, FPRegisterID dest
)
1160 m_assembler
.vabs_f64(dest
, src
);
1163 void negateDouble(FPRegisterID src
, FPRegisterID dest
)
1165 m_assembler
.vneg_f64(dest
, src
);
1168 void convertInt32ToDouble(RegisterID src
, FPRegisterID dest
)
1170 m_assembler
.vmov_vfp32(dest
<< 1, src
);
1171 m_assembler
.vcvt_f64_s32(dest
, dest
<< 1);
1174 void convertInt32ToDouble(Address src
, FPRegisterID dest
)
1176 load32(src
, ARMRegisters::S1
);
1177 convertInt32ToDouble(ARMRegisters::S1
, dest
);
1180 void convertInt32ToDouble(AbsoluteAddress src
, FPRegisterID dest
)
1182 move(TrustedImmPtr(src
.m_ptr
), ARMRegisters::S1
);
1183 load32(Address(ARMRegisters::S1
), ARMRegisters::S1
);
1184 convertInt32ToDouble(ARMRegisters::S1
, dest
);
1187 void convertFloatToDouble(FPRegisterID src
, FPRegisterID dst
)
1189 m_assembler
.vcvt_f64_f32(dst
, src
);
1192 void convertDoubleToFloat(FPRegisterID src
, FPRegisterID dst
)
1194 m_assembler
.vcvt_f32_f64(dst
, src
);
1197 Jump
branchDouble(DoubleCondition cond
, FPRegisterID left
, FPRegisterID right
)
1199 m_assembler
.vcmp_f64(left
, right
);
1200 m_assembler
.vmrs_apsr();
1201 if (cond
& DoubleConditionBitSpecial
)
1202 m_assembler
.cmp(ARMRegisters::S0
, ARMRegisters::S0
, ARMAssembler::VS
);
1203 return Jump(m_assembler
.jmp(static_cast<ARMAssembler::Condition
>(cond
& ~DoubleConditionMask
)));
1206 // Truncates 'src' to an integer, and places the resulting 'dest'.
1207 // If the result is not representable as a 32 bit value, branch.
1208 // May also branch for some values that are representable in 32 bits
1209 // (specifically, in this case, INT_MIN).
1210 enum BranchTruncateType
{ BranchIfTruncateFailed
, BranchIfTruncateSuccessful
};
1211 Jump
branchTruncateDoubleToInt32(FPRegisterID src
, RegisterID dest
, BranchTruncateType branchType
= BranchIfTruncateFailed
)
1213 truncateDoubleToInt32(src
, dest
);
1215 m_assembler
.add(ARMRegisters::S0
, dest
, ARMAssembler::getOp2Byte(1));
1216 m_assembler
.bic(ARMRegisters::S0
, ARMRegisters::S0
, ARMAssembler::getOp2Byte(1));
1218 ARMWord w
= ARMAssembler::getOp2(0x80000000);
1219 ASSERT(w
!= ARMAssembler::InvalidImmediate
);
1220 m_assembler
.cmp(ARMRegisters::S0
, w
);
1221 return Jump(m_assembler
.jmp(branchType
== BranchIfTruncateFailed
? ARMAssembler::EQ
: ARMAssembler::NE
));
1224 Jump
branchTruncateDoubleToUint32(FPRegisterID src
, RegisterID dest
, BranchTruncateType branchType
= BranchIfTruncateFailed
)
1226 truncateDoubleToUint32(src
, dest
);
1228 m_assembler
.add(ARMRegisters::S0
, dest
, ARMAssembler::getOp2Byte(1));
1229 m_assembler
.bic(ARMRegisters::S0
, ARMRegisters::S0
, ARMAssembler::getOp2Byte(1));
1231 m_assembler
.cmp(ARMRegisters::S0
, ARMAssembler::getOp2Byte(0));
1232 return Jump(m_assembler
.jmp(branchType
== BranchIfTruncateFailed
? ARMAssembler::EQ
: ARMAssembler::NE
));
1235 // Result is undefined if the value is outside of the integer range.
1236 void truncateDoubleToInt32(FPRegisterID src
, RegisterID dest
)
1238 m_assembler
.vcvt_s32_f64(ARMRegisters::SD0
<< 1, src
);
1239 m_assembler
.vmov_arm32(dest
, ARMRegisters::SD0
<< 1);
1242 void truncateDoubleToUint32(FPRegisterID src
, RegisterID dest
)
1244 m_assembler
.vcvt_u32_f64(ARMRegisters::SD0
<< 1, src
);
1245 m_assembler
.vmov_arm32(dest
, ARMRegisters::SD0
<< 1);
1248 // Convert 'src' to an integer, and places the resulting 'dest'.
1249 // If the result is not representable as a 32 bit value, branch.
1250 // May also branch for some values that are representable in 32 bits
1251 // (specifically, in this case, 0).
1252 void branchConvertDoubleToInt32(FPRegisterID src
, RegisterID dest
, JumpList
& failureCases
, FPRegisterID
, bool negZeroCheck
= true)
1254 m_assembler
.vcvt_s32_f64(ARMRegisters::SD0
<< 1, src
);
1255 m_assembler
.vmov_arm32(dest
, ARMRegisters::SD0
<< 1);
1257 // Convert the integer result back to float & compare to the original value - if not equal or unordered (NaN) then jump.
1258 m_assembler
.vcvt_f64_s32(ARMRegisters::SD0
, ARMRegisters::SD0
<< 1);
1259 failureCases
.append(branchDouble(DoubleNotEqualOrUnordered
, src
, ARMRegisters::SD0
));
1261 // If the result is zero, it might have been -0.0, and 0.0 equals to -0.0
1263 failureCases
.append(branchTest32(Zero
, dest
));
1266 Jump
branchDoubleNonZero(FPRegisterID reg
, FPRegisterID scratch
)
1268 m_assembler
.mov(ARMRegisters::S0
, ARMAssembler::getOp2Byte(0));
1269 convertInt32ToDouble(ARMRegisters::S0
, scratch
);
1270 return branchDouble(DoubleNotEqual
, reg
, scratch
);
1273 Jump
branchDoubleZeroOrNaN(FPRegisterID reg
, FPRegisterID scratch
)
1275 m_assembler
.mov(ARMRegisters::S0
, ARMAssembler::getOp2Byte(0));
1276 convertInt32ToDouble(ARMRegisters::S0
, scratch
);
1277 return branchDouble(DoubleEqualOrUnordered
, reg
, scratch
);
1280 // Invert a relational condition, e.g. == becomes !=, < becomes >=, etc.
1281 static RelationalCondition
invert(RelationalCondition cond
)
1283 ASSERT((static_cast<uint32_t>(cond
& 0x0fffffff)) == 0 && static_cast<uint32_t>(cond
) < static_cast<uint32_t>(ARMAssembler::AL
));
1284 return static_cast<RelationalCondition
>(cond
^ 0x10000000);
1292 static FunctionPtr
readCallTarget(CodeLocationCall call
)
1294 return FunctionPtr(reinterpret_cast<void(*)()>(ARMAssembler::readCallTarget(call
.dataLocation())));
1297 static void replaceWithJump(CodeLocationLabel instructionStart
, CodeLocationLabel destination
)
1299 ARMAssembler::replaceWithJump(instructionStart
.dataLocation(), destination
.dataLocation());
1302 static ptrdiff_t maxJumpReplacementSize()
1304 ARMAssembler::maxJumpReplacementSize();
1308 static bool canJumpReplacePatchableBranchPtrWithPatch() { return false; }
1310 static CodeLocationLabel
startOfPatchableBranchPtrWithPatchOnAddress(CodeLocationDataLabelPtr
)
1312 UNREACHABLE_FOR_PLATFORM();
1313 return CodeLocationLabel();
1316 static CodeLocationLabel
startOfBranchPtrWithPatchOnRegister(CodeLocationDataLabelPtr label
)
1318 return label
.labelAtOffset(0);
1321 static void revertJumpReplacementToBranchPtrWithPatch(CodeLocationLabel instructionStart
, RegisterID reg
, void* initialValue
)
1323 ARMAssembler::revertBranchPtrWithPatch(instructionStart
.dataLocation(), reg
, reinterpret_cast<uintptr_t>(initialValue
) & 0xffff);
1326 static void revertJumpReplacementToPatchableBranchPtrWithPatch(CodeLocationLabel
, Address
, void*)
1328 UNREACHABLE_FOR_PLATFORM();
1332 ARMAssembler::Condition
ARMCondition(RelationalCondition cond
)
1334 return static_cast<ARMAssembler::Condition
>(cond
);
1337 ARMAssembler::Condition
ARMCondition(ResultCondition cond
)
1339 return static_cast<ARMAssembler::Condition
>(cond
);
1342 void ensureSpace(int insnSpace
, int constSpace
)
1344 m_assembler
.ensureSpace(insnSpace
, constSpace
);
1347 int sizeOfConstantPool()
1349 return m_assembler
.sizeOfConstantPool();
1352 void call32(RegisterID base
, int32_t offset
)
1354 load32(Address(base
, offset
), ARMRegisters::S1
);
1355 m_assembler
.blx(ARMRegisters::S1
);
1359 friend class LinkBuffer
;
1360 friend class RepatchBuffer
;
1362 void internalCompare32(RegisterID left
, TrustedImm32 right
)
1364 ARMWord tmp
= (static_cast<unsigned>(right
.m_value
) == 0x80000000) ? ARMAssembler::InvalidImmediate
: m_assembler
.getOp2(-right
.m_value
);
1365 if (tmp
!= ARMAssembler::InvalidImmediate
)
1366 m_assembler
.cmn(left
, tmp
);
1368 m_assembler
.cmp(left
, m_assembler
.getImm(right
.m_value
, ARMRegisters::S0
));
1371 static void linkCall(void* code
, Call call
, FunctionPtr function
)
1373 ARMAssembler::linkCall(code
, call
.m_label
, function
.value());
1376 static void repatchCall(CodeLocationCall call
, CodeLocationLabel destination
)
1378 ARMAssembler::relinkCall(call
.dataLocation(), destination
.executableAddress());
1381 static void repatchCall(CodeLocationCall call
, FunctionPtr destination
)
1383 ARMAssembler::relinkCall(call
.dataLocation(), destination
.executableAddress());
1386 static const bool s_isVFPPresent
;
1391 #endif // ENABLE(ASSEMBLER) && CPU(ARM_TRADITIONAL)
1393 #endif // MacroAssemblerARM_h