2 * Copyright (c) 2003-2006 Apple Computer, Inc. All rights reserved.
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
29 #include <machine/cpu_capabilities.h>
33 .globl __commpage_set_timestamp
34 /* extern void _commpage_set_timestamp(uint64_t abstime, uint64_t secs); */
35 __commpage_set_timestamp:
39 mov _commPagePtr32,%ecx
40 sub $ _COMM_PAGE32_BASE_ADDRESS,%ecx
41 mov _commPagePtr64,%edx /* point to 64-bit commpage too */
43 sub $ _COMM_PAGE32_START_ADDRESS,%edx /* because kernel is built 32-bit */
45 cmovz %ecx,%edx /* if no 64-bit commpage, point to 32 with both */
47 movl $0,_COMM_PAGE_TIMEENABLE(%ecx)
48 movl $0,_COMM_PAGE_TIMEENABLE(%edx)
55 mov %eax,_COMM_PAGE_TIMEBASE(%ecx)
56 mov %eax,_COMM_PAGE_TIMEBASE(%edx)
58 mov %eax,_COMM_PAGE_TIMEBASE+4(%ecx)
59 mov %eax,_COMM_PAGE_TIMEBASE+4(%edx)
62 mov %eax,_COMM_PAGE_TIMESTAMP(%ecx)
63 mov %eax,_COMM_PAGE_TIMESTAMP(%edx)
65 mov %eax,_COMM_PAGE_TIMESTAMP+4(%ecx)
66 mov %eax,_COMM_PAGE_TIMESTAMP+4(%edx)
68 movl $1,_COMM_PAGE_TIMEENABLE(%ecx)
69 movl $1,_COMM_PAGE_TIMEENABLE(%edx)
76 .globl _commpage_set_nanotime
77 /* extern void commpage_set_nanotime(uint64_t tsc_base, uint64_t ns_base, uint32_t scale, uint32_t shift); */
78 _commpage_set_nanotime:
82 mov _commPagePtr32,%ecx
86 sub $(_COMM_PAGE_BASE_ADDRESS),%ecx
87 mov _commPagePtr64,%edx /* point to 64-bit commpage too */
89 sub $ _COMM_PAGE32_START_ADDRESS,%edx /* because kernel is built 32-bit */
91 cmovz %ecx,%edx /* if no 64-bit commpage, point to 32 with both */
94 mov %eax,_COMM_PAGE_NT_TSC_BASE(%ecx)
95 mov %eax,_COMM_PAGE_NT_TSC_BASE(%edx)
97 mov %eax,_COMM_PAGE_NT_TSC_BASE+4(%ecx)
98 mov %eax,_COMM_PAGE_NT_TSC_BASE+4(%edx)
101 mov %eax,_COMM_PAGE_NT_SCALE(%ecx)
102 mov %eax,_COMM_PAGE_NT_SCALE(%edx)
105 mov %eax,_COMM_PAGE_NT_SHIFT(%ecx)
106 mov %eax,_COMM_PAGE_NT_SHIFT(%edx)
109 mov %eax,_COMM_PAGE_NT_NS_BASE(%ecx)
110 mov %eax,_COMM_PAGE_NT_NS_BASE(%edx)
112 mov %eax,_COMM_PAGE_NT_NS_BASE+4(%ecx)
113 mov %eax,_COMM_PAGE_NT_NS_BASE+4(%edx)
118 #define CPN(routine) _commpage_ ## routine
120 /* pointers to the 32-bit commpage routine descriptors */
121 /* WARNING: these must be sorted by commpage address! */
124 .globl _commpage_32_routines
125 _commpage_32_routines:
126 .long CPN(compare_and_swap32_mp)
127 .long CPN(compare_and_swap32_up)
128 .long CPN(compare_and_swap64_mp)
129 .long CPN(compare_and_swap64_up)
130 .long CPN(atomic_add32_mp)
131 .long CPN(atomic_add32_up)
132 .long CPN(mach_absolute_time)
133 .long CPN(spin_lock_try_mp)
134 .long CPN(spin_lock_try_up)
135 .long CPN(spin_lock_mp)
136 .long CPN(spin_lock_up)
137 .long CPN(spin_unlock)
138 .long CPN(pthread_getspecific)
139 .long CPN(gettimeofday)
140 .long CPN(sys_flush_dcache)
141 .long CPN(sys_icache_invalidate)
142 .long CPN(pthread_self)
143 // .long CPN(relinquish)
144 .long CPN(bit_test_and_set_mp)
145 .long CPN(bit_test_and_set_up)
146 .long CPN(bit_test_and_clear_mp)
147 .long CPN(bit_test_and_clear_up)
148 .long CPN(bzero_scalar)
149 .long CPN(bzero_sse3)
150 .long CPN(bcopy_scalar)
151 .long CPN(bcopy_sse3)
152 .long CPN(bcopy_sse4)
153 .long CPN(old_nanotime)
154 .long CPN(memset_pattern_sse3)
155 .long CPN(longcopy_sse4)
160 /* pointers to the 64-bit commpage routine descriptors */
161 /* WARNING: these must be sorted by commpage address! */
164 .globl _commpage_64_routines
165 _commpage_64_routines:
166 .long CPN(compare_and_swap32_mp_64)
167 .long CPN(compare_and_swap32_up_64)
168 .long CPN(compare_and_swap64_mp_64)
169 .long CPN(compare_and_swap64_up_64)
170 .long CPN(atomic_add32_mp_64)
171 .long CPN(atomic_add32_up_64)
172 .long CPN(atomic_add64_mp_64)
173 .long CPN(atomic_add64_up_64)
174 .long CPN(mach_absolute_time)
175 .long CPN(spin_lock_try_mp_64)
176 .long CPN(spin_lock_try_up_64)
177 .long CPN(spin_lock_mp_64)
178 .long CPN(spin_lock_up_64)
179 .long CPN(spin_unlock_64)
180 .long CPN(pthread_getspecific_64)
181 .long CPN(gettimeofday_64)
182 .long CPN(sys_flush_dcache_64)
183 .long CPN(sys_icache_invalidate) /* same routine as 32-bit version, just a "ret" */
184 .long CPN(pthread_self_64)
185 .long CPN(bit_test_and_set_mp_64)
186 .long CPN(bit_test_and_set_up_64)
187 .long CPN(bit_test_and_clear_mp_64)
188 .long CPN(bit_test_and_clear_up_64)
189 .long CPN(bzero_sse3_64)
190 .long CPN(bcopy_sse4_64)
191 .long CPN(old_nanotime_64)
192 .long CPN(memset_pattern_sse3_64)
193 .long CPN(longcopy_sse4_64)
194 .long CPN(nanotime_64)