]>
git.saurik.com Git - apple/javascriptcore.git/blob - assembler/MacroAssemblerARM.cpp
2 * Copyright (C) 2009 University of Szeged
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY UNIVERSITY OF SZEGED ``AS IS'' AND ANY
15 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
17 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL UNIVERSITY OF SZEGED OR
18 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
19 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
20 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
21 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
22 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
24 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 #if ENABLE(ASSEMBLER) && CPU(ARM_TRADITIONAL)
31 #include "MacroAssemblerARM.h"
34 #include <sys/types.h>
39 #include <asm/hwcap.h>
44 static bool isVFPPresent()
47 int fd
= open("/proc/self/auxv", O_RDONLY
);
50 while (read(fd
, &aux
, sizeof(Elf32_auxv_t
))) {
51 if (aux
.a_type
== AT_HWCAP
) {
53 return aux
.a_un
.a_val
& HWCAP_VFP
;
60 #if (COMPILER(RVCT) && defined(__TARGET_FPU_VFP)) || (COMPILER(GCC) && defined(__VFP_FP__))
67 const bool MacroAssemblerARM::s_isVFPPresent
= isVFPPresent();
69 #if CPU(ARMV5_OR_LOWER)
70 /* On ARMv5 and below, natural alignment is required. */
71 void MacroAssemblerARM::load32WithUnalignedHalfWords(BaseIndex address
, RegisterID dest
)
75 ASSERT(address
.scale
>= 0 && address
.scale
<= 3);
76 op2
= m_assembler
.lsl(address
.index
, static_cast<int>(address
.scale
));
78 if (address
.offset
>= 0 && address
.offset
+ 0x2 <= 0xff) {
79 m_assembler
.add_r(ARMRegisters::S0
, address
.base
, op2
);
80 m_assembler
.ldrh_u(dest
, ARMRegisters::S0
, ARMAssembler::getOp2Byte(address
.offset
));
81 m_assembler
.ldrh_u(ARMRegisters::S0
, ARMRegisters::S0
, ARMAssembler::getOp2Byte(address
.offset
+ 0x2));
82 } else if (address
.offset
< 0 && address
.offset
>= -0xff) {
83 m_assembler
.add_r(ARMRegisters::S0
, address
.base
, op2
);
84 m_assembler
.ldrh_d(dest
, ARMRegisters::S0
, ARMAssembler::getOp2Byte(-address
.offset
));
85 m_assembler
.ldrh_d(ARMRegisters::S0
, ARMRegisters::S0
, ARMAssembler::getOp2Byte(-address
.offset
- 0x2));
87 m_assembler
.ldr_un_imm(ARMRegisters::S0
, address
.offset
);
88 m_assembler
.add_r(ARMRegisters::S0
, ARMRegisters::S0
, op2
);
89 m_assembler
.ldrh_r(dest
, address
.base
, ARMRegisters::S0
);
90 m_assembler
.add_r(ARMRegisters::S0
, ARMRegisters::S0
, ARMAssembler::OP2_IMM
| 0x2);
91 m_assembler
.ldrh_r(ARMRegisters::S0
, address
.base
, ARMRegisters::S0
);
93 m_assembler
.orr_r(dest
, dest
, m_assembler
.lsl(ARMRegisters::S0
, 16));
99 #endif // ENABLE(ASSEMBLER) && CPU(ARM_TRADITIONAL)