]>
Commit | Line | Data |
---|---|---|
89b3af67 A |
1 | /* |
2 | * Copyright (c) 2003-2006 Apple Computer, Inc. All rights reserved. | |
3 | * | |
4 | * @APPLE_OSREFERENCE_LICENSE_HEADER_START@ | |
5 | * | |
6 | * This file contains Original Code and/or Modifications of Original Code | |
7 | * as defined in and that are subject to the Apple Public Source License | |
8 | * Version 2.0 (the 'License'). You may not use this file except in | |
9 | * compliance with the License. The rights granted to you under the License | |
10 | * may not be used to create, or enable the creation or redistribution of, | |
11 | * unlawful or unlicensed copies of an Apple operating system, or to | |
12 | * circumvent, violate, or enable the circumvention or violation of, any | |
13 | * terms of an Apple operating system software license agreement. | |
14 | * | |
15 | * Please obtain a copy of the License at | |
16 | * http://www.opensource.apple.com/apsl/ and read it before using this file. | |
17 | * | |
18 | * The Original Code and all software distributed under the License are | |
19 | * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER | |
20 | * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, | |
21 | * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, | |
22 | * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. | |
23 | * Please see the License for the specific language governing rights and | |
24 | * limitations under the License. | |
25 | * | |
26 | * @APPLE_OSREFERENCE_LICENSE_HEADER_END@ | |
27 | */ | |
28 | ||
29 | #include <machine/cpu_capabilities.h> | |
30 | ||
31 | .text | |
32 | .align 2, 0x90 | |
33 | .globl __commpage_set_timestamp | |
34 | /* extern void _commpage_set_timestamp(uint64_t abstime, uint64_t secs); */ | |
35 | __commpage_set_timestamp: | |
36 | push %ebp | |
37 | mov %esp,%ebp | |
38 | ||
39 | mov _commPagePtr32,%ecx | |
40 | sub $ _COMM_PAGE32_BASE_ADDRESS,%ecx | |
41 | mov _commPagePtr64,%edx /* point to 64-bit commpage too */ | |
42 | mov %edx,%eax | |
43 | sub $ _COMM_PAGE32_START_ADDRESS,%edx /* because kernel is built 32-bit */ | |
44 | test %eax,%eax | |
45 | cmovz %ecx,%edx /* if no 64-bit commpage, point to 32 with both */ | |
46 | ||
47 | movl $0,_COMM_PAGE_TIMEENABLE(%ecx) | |
48 | movl $0,_COMM_PAGE_TIMEENABLE(%edx) | |
49 | ||
50 | mov 8(%ebp),%eax | |
51 | or 12(%ebp),%eax | |
52 | je 1f | |
53 | ||
54 | mov 8(%ebp),%eax | |
55 | mov %eax,_COMM_PAGE_TIMEBASE(%ecx) | |
56 | mov %eax,_COMM_PAGE_TIMEBASE(%edx) | |
57 | mov 12(%ebp),%eax | |
58 | mov %eax,_COMM_PAGE_TIMEBASE+4(%ecx) | |
59 | mov %eax,_COMM_PAGE_TIMEBASE+4(%edx) | |
60 | ||
61 | mov 16(%ebp),%eax | |
62 | mov %eax,_COMM_PAGE_TIMESTAMP(%ecx) | |
63 | mov %eax,_COMM_PAGE_TIMESTAMP(%edx) | |
64 | mov 20(%ebp),%eax | |
65 | mov %eax,_COMM_PAGE_TIMESTAMP+4(%ecx) | |
66 | mov %eax,_COMM_PAGE_TIMESTAMP+4(%edx) | |
67 | ||
68 | movl $1,_COMM_PAGE_TIMEENABLE(%ecx) | |
69 | movl $1,_COMM_PAGE_TIMEENABLE(%edx) | |
70 | 1: | |
71 | pop %ebp | |
72 | ret | |
73 | ||
74 | .text | |
75 | .align 2, 0x90 | |
76 | .globl _commpage_set_nanotime | |
77 | /* extern void commpage_set_nanotime(uint64_t tsc_base, uint64_t ns_base, uint32_t scale, uint32_t shift); */ | |
78 | _commpage_set_nanotime: | |
79 | push %ebp | |
80 | mov %esp,%ebp | |
81 | ||
82 | mov _commPagePtr32,%ecx | |
83 | testl %ecx,%ecx | |
84 | je 1f | |
85 | ||
86 | sub $(_COMM_PAGE_BASE_ADDRESS),%ecx | |
87 | mov _commPagePtr64,%edx /* point to 64-bit commpage too */ | |
88 | mov %edx,%eax | |
89 | sub $ _COMM_PAGE32_START_ADDRESS,%edx /* because kernel is built 32-bit */ | |
90 | test %eax,%eax | |
91 | cmovz %ecx,%edx /* if no 64-bit commpage, point to 32 with both */ | |
92 | ||
93 | mov 8(%ebp),%eax | |
94 | mov %eax,_COMM_PAGE_NT_TSC_BASE(%ecx) | |
95 | mov %eax,_COMM_PAGE_NT_TSC_BASE(%edx) | |
96 | mov 12(%ebp),%eax | |
97 | mov %eax,_COMM_PAGE_NT_TSC_BASE+4(%ecx) | |
98 | mov %eax,_COMM_PAGE_NT_TSC_BASE+4(%edx) | |
99 | ||
100 | mov 24(%ebp),%eax | |
101 | mov %eax,_COMM_PAGE_NT_SCALE(%ecx) | |
102 | mov %eax,_COMM_PAGE_NT_SCALE(%edx) | |
103 | ||
104 | mov 28(%ebp),%eax | |
105 | mov %eax,_COMM_PAGE_NT_SHIFT(%ecx) | |
106 | mov %eax,_COMM_PAGE_NT_SHIFT(%edx) | |
107 | ||
108 | mov 16(%ebp),%eax | |
109 | mov %eax,_COMM_PAGE_NT_NS_BASE(%ecx) | |
110 | mov %eax,_COMM_PAGE_NT_NS_BASE(%edx) | |
111 | mov 20(%ebp),%eax | |
112 | mov %eax,_COMM_PAGE_NT_NS_BASE+4(%ecx) | |
113 | mov %eax,_COMM_PAGE_NT_NS_BASE+4(%edx) | |
114 | 1: | |
115 | pop %ebp | |
116 | ret | |
117 | ||
118 | #define CPN(routine) _commpage_ ## routine | |
119 | ||
120 | /* pointers to the 32-bit commpage routine descriptors */ | |
121 | /* WARNING: these must be sorted by commpage address! */ | |
122 | .const_data | |
123 | .align 2 | |
124 | .globl _commpage_32_routines | |
125 | _commpage_32_routines: | |
126 | .long CPN(compare_and_swap32_mp) | |
127 | .long CPN(compare_and_swap32_up) | |
128 | .long CPN(compare_and_swap64_mp) | |
129 | .long CPN(compare_and_swap64_up) | |
130 | .long CPN(atomic_add32_mp) | |
131 | .long CPN(atomic_add32_up) | |
132 | .long CPN(mach_absolute_time) | |
133 | .long CPN(spin_lock_try_mp) | |
134 | .long CPN(spin_lock_try_up) | |
135 | .long CPN(spin_lock_mp) | |
136 | .long CPN(spin_lock_up) | |
137 | .long CPN(spin_unlock) | |
138 | .long CPN(pthread_getspecific) | |
139 | .long CPN(gettimeofday) | |
140 | .long CPN(sys_flush_dcache) | |
141 | .long CPN(sys_icache_invalidate) | |
142 | .long CPN(pthread_self) | |
143 | // .long CPN(relinquish) | |
144 | .long CPN(bit_test_and_set_mp) | |
145 | .long CPN(bit_test_and_set_up) | |
146 | .long CPN(bit_test_and_clear_mp) | |
147 | .long CPN(bit_test_and_clear_up) | |
148 | .long CPN(bzero_scalar) | |
149 | .long CPN(bzero_sse3) | |
150 | .long CPN(bcopy_scalar) | |
151 | .long CPN(bcopy_sse3) | |
152 | .long CPN(bcopy_sse4) | |
153 | .long CPN(old_nanotime) | |
154 | .long CPN(memset_pattern_sse3) | |
155 | .long CPN(longcopy_sse4) | |
156 | .long CPN(nanotime) | |
157 | .long 0 | |
158 | ||
159 | ||
160 | /* pointers to the 64-bit commpage routine descriptors */ | |
161 | /* WARNING: these must be sorted by commpage address! */ | |
162 | .const_data | |
163 | .align 2 | |
164 | .globl _commpage_64_routines | |
165 | _commpage_64_routines: | |
166 | .long CPN(compare_and_swap32_mp_64) | |
167 | .long CPN(compare_and_swap32_up_64) | |
168 | .long CPN(compare_and_swap64_mp_64) | |
169 | .long CPN(compare_and_swap64_up_64) | |
170 | .long CPN(atomic_add32_mp_64) | |
171 | .long CPN(atomic_add32_up_64) | |
172 | .long CPN(atomic_add64_mp_64) | |
173 | .long CPN(atomic_add64_up_64) | |
174 | .long CPN(mach_absolute_time) | |
175 | .long CPN(spin_lock_try_mp_64) | |
176 | .long CPN(spin_lock_try_up_64) | |
177 | .long CPN(spin_lock_mp_64) | |
178 | .long CPN(spin_lock_up_64) | |
179 | .long CPN(spin_unlock_64) | |
180 | .long CPN(pthread_getspecific_64) | |
181 | .long CPN(gettimeofday_64) | |
182 | .long CPN(sys_flush_dcache_64) | |
183 | .long CPN(sys_icache_invalidate) /* same routine as 32-bit version, just a "ret" */ | |
184 | .long CPN(pthread_self_64) | |
185 | .long CPN(bit_test_and_set_mp_64) | |
186 | .long CPN(bit_test_and_set_up_64) | |
187 | .long CPN(bit_test_and_clear_mp_64) | |
188 | .long CPN(bit_test_and_clear_up_64) | |
189 | .long CPN(bzero_sse3_64) | |
190 | .long CPN(bcopy_sse4_64) | |
191 | .long CPN(old_nanotime_64) | |
192 | .long CPN(memset_pattern_sse3_64) | |
193 | .long CPN(longcopy_sse4_64) | |
194 | .long CPN(nanotime_64) | |
195 | .long 0 | |
196 |