/*
- * Copyright (c) 2005-2007 Apple Inc. All rights reserved.
+ * Copyright (c) 2005-2010 Apple Inc. All rights reserved.
*
* @APPLE_LICENSE_HEADER_START@
*
* @APPLE_LICENSE_HEADER_END@
*/
+
+
+#if __arm__
+ .text
+ .align 2
+
+ .globl _test_loads
+_test_loads:
+ @ PIC load of a
+ ldr r0, L100
+L0:
+ ldr r0, [pc, r0]
+
+ @ PIC load of c
+ ldr r0, L100+4
+L1:
+ ldr r0, [pc, r0]
+
+ @ sorta-absolute load of a
+ ldr r0, L100+8
+ ldr r0, [r0, #0]
+
+ @ sorta-absolute load of c
+ ldr r0, L100+12
+ ldr r0, [r0, #0]
+
+ @ sorta-absolute load of external
+ ldr r0, L100+16
+ ldr r0, [r0, #0]
+
+ @ PIC load of a + addend ??
+ bx lr
+
+L100:
+ .long _a-(L0+8)
+ .long _c-(L1+8)
+ .long _a
+ .long _c
+ .long _ax
+
+_test_calls:
+ @ call internal
+ bl _test_branches
+
+ @ call internal + addend
+ bl _test_branches+0x19000
+
+ @ call external
+ bl _external
+
+ @ call external + addend
+ bl _external+0x19000
+
+
+_test_branches:
+ @ call internal
+ bne _test_calls
+
+ @ call internal + addend
+ bne _test_calls+16
+
+ @ call internal - addend
+ bne _test_calls-16
+
+ @ call external
+ bne _external
+
+ @ call external + addend
+ bne _external+16
+
+ nop
+ bl 1f
+1: nop
+
+
+ .globl _test_weak
+ .weak_definition _test_weak
+_test_weak:
+ nop
+ nop
+
+ .globl _test_hidden_weak
+ .private_extern _test_hidden_weak
+ .weak_definition _test_hidden_weak
+_test_hidden_weak:
+ nop
+ nop
+
+
+_test_weak_call:
+ bl _test_weak
+ bl _test_weak+4
+
+
+_test_weak_hidden_pointer_call:
+ ldr r12,L3
+ add r12, pc, r12
+ nop
+ bx r12
+L101:
+ .long _test_hidden_weak - L101
+
+
+ .text
+_pointer_diffs:
+ .long _foo-1b
+ .long _foo+10-1b
+ .long _test_branches-1b
+ .long _test_branches+3-1b
+ .long (_test_branches - _test_loads) + -2097152
+ .long (_test_calls - _test_loads) + -2097152
+
+
+ .text
+ .code 32
+_arm1:
+ bx lr
+_arm2:
+ bx lr
+ .weak_definition _arm3
+ .globl _arm3
+ .private_extern _arm3
+_arm3:
+ bx lr
+ .weak_definition _arm4
+ .globl _arm4
+ .private_extern _arm4
+_arm4:
+ bx lr
+
+ .code 16
+ .thumb_func _thumb1
+_thumb1:
+ bx lr
+ .thumb_func _thumb2
+_thumb2:
+ bx lr
+ .weak_definition _thumb3
+ .globl _thumb3
+ .private_extern _thumb3
+ .thumb_func _thumb3
+_thumb3:
+ bx lr
+ .weak_definition _thumb4
+ .globl _thumb4
+ .private_extern _thumb4
+ .thumb_func _thumb4
+_thumb4:
+ bx lr
+
+ .thumb_func _thumb_func_ref_test
+_thumb_func_ref_test:
+ push {r7, lr}
+ add r7, sp, #0
+ ldr r3, L6
+L2: add r3, pc
+ ldr r3, L7
+L3: add r3, pc
+ ldr r3, L8
+L4: add r3, pc
+ ldr r3, L9
+L5: add r3, pc
+ pop {r7, pc}
+ .align 2
+L6: .long _thumb1-(L2+4)
+L7: .long _thumb2-(L3+4)
+L7a:.long _thumb3-(L3+4)
+L7b:.long _thumb4-(L3+4)
+L8: .long _arm1-(L4+4)
+L9: .long _arm2-(L5+4)
+L9a:.long _arm3-(L5+4)
+L9b:.long _arm4-(L5+4)
+
+ .code 32
+ .align 2
+_arm_func_ref_test:
+ push {r7, lr}
+ add r7, sp, #0
+ ldr r3, L16
+L12:add r3, pc
+ ldr r3, L17
+L13:add r3, pc
+ ldr r3, L18
+L14:add r3, pc
+ ldr r3, L19
+L15:add r3, pc
+ pop {r7, pc}
+ .align 2
+L16: .long _thumb1-(L12+8)
+L17: .long _thumb2-(L13+8)
+L17a: .long _thumb3-(L13+8)
+L17b: .long _thumb4-(L13+8)
+L18: .long _arm1-(L14+8)
+L19: .long _arm2-(L15+8)
+L19a: .long _arm3-(L15+8)
+L19b: .long _arm4-(L15+8)
+
+ .section __DATA,__const
+_myVTable:
+ .long _thumb1
+ .long _thumb2
+ .long _thumb3
+ .long _arm1
+ .long _arm2
+
+#if __ARM_ARCH_7A__
+ .text
+ .align 2
+_arm16tests:
+ movw r0, :lower16:_datahilo16
+ movt r0, :upper16:_datahilo16
+ movw r0, :lower16:_datahilo16+4
+ movt r0, :upper16:_datahilo16+4
+ movw r0, :lower16:_datahilo16alt
+ movt r0, :upper16:_datahilo16alt
+ movw r0, :lower16:_datahilo16alt+61440
+ movt r0, :upper16:_datahilo16alt+61440
+ movw r0, :lower16:_datahilo16alt+2048
+ movt r0, :upper16:_datahilo16alt+2048
+ movw r0, :lower16:_datahilo16alt+1792
+ movt r0, :upper16:_datahilo16alt+1792
+ movw r0, :lower16:_datahilo16alt+165
+ movt r0, :upper16:_datahilo16alt+165
+Lpicbase:
+ movw r0, :lower16:_datahilo16 - Lpicbase
+ movt r0, :upper16:_datahilo16 - Lpicbase
+ movw r0, :lower16:_datahilo16+4 - Lpicbase
+ movt r0, :upper16:_datahilo16+4 - Lpicbase
+ movw r0, :lower16:_datahilo16alt - Lpicbase
+ movt r0, :upper16:_datahilo16alt - Lpicbase
+ movw r0, :lower16:_datahilo16alt+61440 - Lpicbase
+ movt r0, :upper16:_datahilo16alt+61440 - Lpicbase
+ movw r0, :lower16:_datahilo16alt+2048 - Lpicbase
+ movt r0, :upper16:_datahilo16alt+2048 - Lpicbase
+ movw r0, :lower16:_datahilo16alt+1792 - Lpicbase
+ movt r0, :upper16:_datahilo16alt+1792 - Lpicbase
+ movw r0, :lower16:_datahilo16alt+165 - Lpicbase
+ movt r0, :upper16:_datahilo16alt+165 - Lpicbase
+ bx lr
+
+ .code 16
+ .thumb_func _thumb16tests
+_thumb16tests:
+ movw r0, :lower16:_datahilo16
+ movt r0, :upper16:_datahilo16
+ movw r0, :lower16:_datahilo16+4
+ movt r0, :upper16:_datahilo16+4
+ movw r0, :lower16:_datahilo16alt
+ movt r0, :upper16:_datahilo16alt
+ movw r0, :lower16:_datahilo16alt+61440
+ movt r0, :upper16:_datahilo16alt+61440
+ movw r0, :lower16:_datahilo16alt+2048
+ movt r0, :upper16:_datahilo16alt+2048
+ movw r0, :lower16:_datahilo16alt+1792
+ movt r0, :upper16:_datahilo16alt+1792
+ movw r0, :lower16:_datahilo16alt+165
+ movt r0, :upper16:_datahilo16alt+165
+Lpicbase2:
+ movw r0, :lower16:_datahilo16 - Lpicbase2
+ movt r0, :upper16:_datahilo16 - Lpicbase2
+ movw r0, :lower16:_datahilo16+4 - Lpicbase2
+ movt r0, :upper16:_datahilo16+4 - Lpicbase2
+ movw r0, :lower16:_datahilo16alt - Lpicbase2
+ movt r0, :upper16:_datahilo16alt - Lpicbase2
+ movw r0, :lower16:_datahilo16alt+61440 - Lpicbase2
+ movt r0, :upper16:_datahilo16alt+61440 - Lpicbase2
+ movw r0, :lower16:_datahilo16alt+2048 - Lpicbase2
+ movt r0, :upper16:_datahilo16alt+2048 - Lpicbase2
+ movw r0, :lower16:_datahilo16alt+1792 - Lpicbase2
+ movt r0, :upper16:_datahilo16alt+1792 - Lpicbase2
+ movw r0, :lower16:_datahilo16alt+165 - Lpicbase2
+ movt r0, :upper16:_datahilo16alt+165 - Lpicbase2
+ bx lr
+
+ .data
+_datahilo16: .long 0
+_datahilo16alt: .long 0
+
+#endif
+
+#endif
+
#if __ppc__ || __ppc64__
.text
lis r2,ha16(_a+0x19000)
lwz r2,lo16(_a+0x19000)(r2)
+ ; lea of a + addend
+ lis r2,ha16(_a+0x19000)
+ addi r2,r2,lo16(_a+0x19000)
+
+ ; alt lea of a + addend
+ lis r2,hi16(_a+0x19000)
+ ori r2,r2,lo16(_a+0x19000)
+
; absolute load of external + addend
lis r2,ha16(_ax+0x19000)
lwz r2,lo16(_ax+0x19000)(r2)
lis r2,ha16(_a+0x09000)
lwz r2,lo16(_a+0x09000)(r2)
+ ; lea of a + addend
+ lis r2,ha16(_a+0x09000)
+ addi r2,r2,lo16(_a+0x09000)
+
+ ; alt lea of a + addend
+ lis r2,hi16(_a+0x09000)
+ ori r2,r2,lo16(_a+0x09000)
+
; absolute load of external + addend
lis r2,ha16(_ax+0x09000)
lwz r2,lo16(_ax+0x09000)(r2)
; call external + addend
bne _external+16
+
+ .globl _test_weak
+ .weak_definition _test_weak
+_test_weak:
+ nop
+ nop
+
+_test_weak_call:
+ bl _test_weak
+ bl _test_weak+4
+
#endif
#if __i386__
.text
.align 2
-
+
+Ltest_data:
+ .long 1
+ .long 2
+ .long 3
+
.globl _test_loads
_test_loads:
pushl %ebp
# absolute lea of external + addend
leal _ax+0x1900, %eax
+ # absolute load of _test_data with negative addend and local label
+ movl Ltest_data-16(%edi),%eax
+ movq Ltest_data-16(%edi),%mm4
+
ret
# call internal + addend
call _test_branches+0x19000
+ # 16-bit call internal
+ callw _test_branches
+
+ # 16-bit call internal + addend
+ callw _test_branches+13
+
# call external
call _external
movl _foo+10-1b(%eax),%esi
movl _test_branches-1b(%eax),%esi
movl _test_branches+3-1b(%eax),%esi
+ cmpl $(( (_test_branches - _test_loads) + -2097152 )),(%esp)
+ cmpl $(( (_test_calls - _test_loads) + -2097152 )),(%esp)
+
_word_relocs:
callw _pointer_diffs
+_byte_relocs:
+ mov $100, %ecx
+c_1:
+ loop c_1
+ mov $100, %ecx
+c_2:
+ sub $(1), %ecx
+ jcxz c_2
+
+ .globl _test_weak
+ .weak_definition _test_weak
+_test_weak:
+ nop
+ nop
+
+_test_weak_call:
+ call _test_weak
+ call _test_weak+1
+
#endif
# call external + addend
jne _external+16
+
+_byte_relocs:
+ # nonsense loop that creates byte branch relocation
+ mov $100, %ecx
+c_1:
+ loop _byte_relocs
+ nop
+
+ .globl _test_weak
+ .weak_definition _test_weak
+_test_weak:
+ nop
+ nop
+
+_test_weak_call:
+ call _test_weak
+ call _test_weak+1
+
#endif
# test that pointer-diff relocs are preserved
.text
-_test_diffs:
.align 2
+_test_diffs:
Llocal2:
.long 0
.long Llocal2-_test_branches
.long _test_branches - .
.long _test_branches - . + 8
.long _test_branches - . - 8
+ .long 0
+ .long 0
#if __ppc64__
.quad Llocal2-_test_branches
#endif
_foo: nop
+Lfoo: nop
.align 2
_distance_from_foo:
.long _foo - _distance_to_here
.long _foo - _distance_to_here - 4
.long _foo - _distance_to_here - 12
+ .long Lfoo - _distance_to_here
+Ltohere:
+ .long Lfoo - Ltohere
+ .long Lfoo - Ltohere - 4
.long 0
.quad _test_branches - .
.quad _test_branches - L1
.quad L1 - _prev
+ .quad _prev+100 - _test_branches
+ #tests support for 32-bit absolute pointers
+ .long _prev
+ .long L1
# the following generates: _foo cannot be undefined in a subtraction expression
# but it should be ok (it will be a linker error if _foo and _bar are not in same linkage unit)
.long 0
_b:
-#if __ppc__ || __i386__
+#if __ppc__ || __i386__ || __arm__
.long _test_calls
.long _test_calls+16
.long _external
.long _external+16
+ .long _test_weak
+ .long _test_weak+16
#elif __ppc64__ || __x86_64__
.quad _test_calls
.quad _test_calls+16
.quad _external
.quad _external+16
+ .quad _test_weak
+ .quad _test_weak+16
#endif
# test that reloc sizes are the same