]>
Commit | Line | Data |
---|---|---|
f9bf01c6 A |
1 | /* |
2 | * Copyright (C) 2009 University of Szeged | |
3 | * All rights reserved. | |
4 | * | |
5 | * Redistribution and use in source and binary forms, with or without | |
6 | * modification, are permitted provided that the following conditions | |
7 | * are met: | |
8 | * 1. Redistributions of source code must retain the above copyright | |
9 | * notice, this list of conditions and the following disclaimer. | |
10 | * 2. Redistributions in binary form must reproduce the above copyright | |
11 | * notice, this list of conditions and the following disclaimer in the | |
12 | * documentation and/or other materials provided with the distribution. | |
13 | * | |
14 | * THIS SOFTWARE IS PROVIDED BY UNIVERSITY OF SZEGED ``AS IS'' AND ANY | |
15 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | |
16 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
17 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL UNIVERSITY OF SZEGED OR | |
18 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, | |
19 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | |
20 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR | |
21 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY | |
22 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
23 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
24 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
25 | */ | |
26 | ||
27 | #include "config.h" | |
28 | ||
29 | #if ENABLE(ASSEMBLER) && CPU(ARM_TRADITIONAL) | |
30 | ||
31 | #include "MacroAssemblerARM.h" | |
32 | ||
33 | #if OS(LINUX) | |
34 | #include <sys/types.h> | |
35 | #include <sys/stat.h> | |
36 | #include <fcntl.h> | |
37 | #include <unistd.h> | |
38 | #include <elf.h> | |
39 | #include <asm/hwcap.h> | |
40 | #endif | |
41 | ||
42 | namespace JSC { | |
43 | ||
44 | static bool isVFPPresent() | |
45 | { | |
46 | #if OS(LINUX) | |
47 | int fd = open("/proc/self/auxv", O_RDONLY); | |
48 | if (fd > 0) { | |
49 | Elf32_auxv_t aux; | |
50 | while (read(fd, &aux, sizeof(Elf32_auxv_t))) { | |
51 | if (aux.a_type == AT_HWCAP) { | |
52 | close(fd); | |
53 | return aux.a_un.a_val & HWCAP_VFP; | |
54 | } | |
55 | } | |
56 | close(fd); | |
57 | } | |
58 | #endif | |
59 | ||
14957cd0 A |
60 | #if (COMPILER(RVCT) && defined(__TARGET_FPU_VFP)) || (COMPILER(GCC) && defined(__VFP_FP__)) |
61 | return true; | |
62 | #else | |
f9bf01c6 | 63 | return false; |
14957cd0 | 64 | #endif |
f9bf01c6 A |
65 | } |
66 | ||
67 | const bool MacroAssemblerARM::s_isVFPPresent = isVFPPresent(); | |
68 | ||
69 | #if CPU(ARMV5_OR_LOWER) | |
70 | /* On ARMv5 and below, natural alignment is required. */ | |
71 | void MacroAssemblerARM::load32WithUnalignedHalfWords(BaseIndex address, RegisterID dest) | |
72 | { | |
73 | ARMWord op2; | |
74 | ||
75 | ASSERT(address.scale >= 0 && address.scale <= 3); | |
76 | op2 = m_assembler.lsl(address.index, static_cast<int>(address.scale)); | |
77 | ||
78 | if (address.offset >= 0 && address.offset + 0x2 <= 0xff) { | |
93a37866 A |
79 | m_assembler.add(ARMRegisters::S0, address.base, op2); |
80 | m_assembler.halfDtrUp(ARMAssembler::LoadUint16, dest, ARMRegisters::S0, ARMAssembler::getOp2Half(address.offset)); | |
81 | m_assembler.halfDtrUp(ARMAssembler::LoadUint16, ARMRegisters::S0, ARMRegisters::S0, ARMAssembler::getOp2Half(address.offset + 0x2)); | |
f9bf01c6 | 82 | } else if (address.offset < 0 && address.offset >= -0xff) { |
93a37866 A |
83 | m_assembler.add(ARMRegisters::S0, address.base, op2); |
84 | m_assembler.halfDtrDown(ARMAssembler::LoadUint16, dest, ARMRegisters::S0, ARMAssembler::getOp2Half(-address.offset)); | |
85 | m_assembler.halfDtrDown(ARMAssembler::LoadUint16, ARMRegisters::S0, ARMRegisters::S0, ARMAssembler::getOp2Half(-address.offset - 0x2)); | |
f9bf01c6 | 86 | } else { |
93a37866 A |
87 | m_assembler.moveImm(address.offset, ARMRegisters::S0); |
88 | m_assembler.add(ARMRegisters::S0, ARMRegisters::S0, op2); | |
89 | m_assembler.halfDtrUpRegister(ARMAssembler::LoadUint16, dest, address.base, ARMRegisters::S0); | |
90 | m_assembler.add(ARMRegisters::S0, ARMRegisters::S0, ARMAssembler::Op2Immediate | 0x2); | |
91 | m_assembler.halfDtrUpRegister(ARMAssembler::LoadUint16, ARMRegisters::S0, address.base, ARMRegisters::S0); | |
f9bf01c6 | 92 | } |
93a37866 | 93 | m_assembler.orr(dest, dest, m_assembler.lsl(ARMRegisters::S0, 16)); |
f9bf01c6 A |
94 | } |
95 | #endif | |
96 | ||
97 | } | |
98 | ||
99 | #endif // ENABLE(ASSEMBLER) && CPU(ARM_TRADITIONAL) |