]>
Commit | Line | Data |
---|---|---|
f9bf01c6 A |
1 | /* |
2 | * Copyright (C) 2009 University of Szeged | |
3 | * All rights reserved. | |
4 | * | |
5 | * Redistribution and use in source and binary forms, with or without | |
6 | * modification, are permitted provided that the following conditions | |
7 | * are met: | |
8 | * 1. Redistributions of source code must retain the above copyright | |
9 | * notice, this list of conditions and the following disclaimer. | |
10 | * 2. Redistributions in binary form must reproduce the above copyright | |
11 | * notice, this list of conditions and the following disclaimer in the | |
12 | * documentation and/or other materials provided with the distribution. | |
13 | * | |
14 | * THIS SOFTWARE IS PROVIDED BY UNIVERSITY OF SZEGED ``AS IS'' AND ANY | |
15 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | |
16 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
17 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL UNIVERSITY OF SZEGED OR | |
18 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, | |
19 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, | |
20 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR | |
21 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY | |
22 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |
23 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | |
24 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |
25 | */ | |
26 | ||
27 | #include "config.h" | |
28 | ||
29 | #if ENABLE(ASSEMBLER) && CPU(ARM_TRADITIONAL) | |
30 | ||
31 | #include "MacroAssemblerARM.h" | |
32 | ||
33 | #if OS(LINUX) | |
34 | #include <sys/types.h> | |
35 | #include <sys/stat.h> | |
36 | #include <fcntl.h> | |
37 | #include <unistd.h> | |
38 | #include <elf.h> | |
39 | #include <asm/hwcap.h> | |
40 | #endif | |
41 | ||
42 | namespace JSC { | |
43 | ||
44 | static bool isVFPPresent() | |
45 | { | |
46 | #if OS(LINUX) | |
47 | int fd = open("/proc/self/auxv", O_RDONLY); | |
48 | if (fd > 0) { | |
49 | Elf32_auxv_t aux; | |
50 | while (read(fd, &aux, sizeof(Elf32_auxv_t))) { | |
51 | if (aux.a_type == AT_HWCAP) { | |
52 | close(fd); | |
53 | return aux.a_un.a_val & HWCAP_VFP; | |
54 | } | |
55 | } | |
56 | close(fd); | |
57 | } | |
58 | #endif | |
59 | ||
60 | return false; | |
61 | } | |
62 | ||
63 | const bool MacroAssemblerARM::s_isVFPPresent = isVFPPresent(); | |
64 | ||
65 | #if CPU(ARMV5_OR_LOWER) | |
66 | /* On ARMv5 and below, natural alignment is required. */ | |
67 | void MacroAssemblerARM::load32WithUnalignedHalfWords(BaseIndex address, RegisterID dest) | |
68 | { | |
69 | ARMWord op2; | |
70 | ||
71 | ASSERT(address.scale >= 0 && address.scale <= 3); | |
72 | op2 = m_assembler.lsl(address.index, static_cast<int>(address.scale)); | |
73 | ||
74 | if (address.offset >= 0 && address.offset + 0x2 <= 0xff) { | |
75 | m_assembler.add_r(ARMRegisters::S0, address.base, op2); | |
76 | m_assembler.ldrh_u(dest, ARMRegisters::S0, ARMAssembler::getOp2Byte(address.offset)); | |
77 | m_assembler.ldrh_u(ARMRegisters::S0, ARMRegisters::S0, ARMAssembler::getOp2Byte(address.offset + 0x2)); | |
78 | } else if (address.offset < 0 && address.offset >= -0xff) { | |
79 | m_assembler.add_r(ARMRegisters::S0, address.base, op2); | |
80 | m_assembler.ldrh_d(dest, ARMRegisters::S0, ARMAssembler::getOp2Byte(-address.offset)); | |
81 | m_assembler.ldrh_d(ARMRegisters::S0, ARMRegisters::S0, ARMAssembler::getOp2Byte(-address.offset - 0x2)); | |
82 | } else { | |
83 | m_assembler.ldr_un_imm(ARMRegisters::S0, address.offset); | |
84 | m_assembler.add_r(ARMRegisters::S0, ARMRegisters::S0, op2); | |
85 | m_assembler.ldrh_r(dest, address.base, ARMRegisters::S0); | |
86 | m_assembler.add_r(ARMRegisters::S0, ARMRegisters::S0, ARMAssembler::OP2_IMM | 0x2); | |
87 | m_assembler.ldrh_r(ARMRegisters::S0, address.base, ARMRegisters::S0); | |
88 | } | |
89 | m_assembler.orr_r(dest, dest, m_assembler.lsl(ARMRegisters::S0, 16)); | |
90 | } | |
91 | #endif | |
92 | ||
93 | } | |
94 | ||
95 | #endif // ENABLE(ASSEMBLER) && CPU(ARM_TRADITIONAL) |