]> git.saurik.com Git - apple/xnu.git/blame_incremental - osfmk/i386/commpage/commpage_asm.s
xnu-792.10.96.tar.gz
[apple/xnu.git] / osfmk / i386 / commpage / commpage_asm.s
... / ...
CommitLineData
1/*
2 * Copyright (c) 2003-2006 Apple Computer, Inc. All rights reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * The contents of this file constitute Original Code as defined in and
7 * are subject to the Apple Public Source License Version 1.1 (the
8 * "License"). You may not use this file except in compliance with the
9 * License. Please obtain a copy of the License at
10 * http://www.apple.com/publicsource and read it before using this file.
11 *
12 * This Original Code and all software distributed under the License are
13 * distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY KIND, EITHER
14 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
15 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT. Please see the
17 * License for the specific language governing rights and limitations
18 * under the License.
19 *
20 * @APPLE_LICENSE_HEADER_END@
21 */
22
23#include <machine/cpu_capabilities.h>
24
25 .text
26 .align 2, 0x90
27 .globl __commpage_set_timestamp
28/* extern void _commpage_set_timestamp(uint64_t abstime, uint64_t secs); */
29__commpage_set_timestamp:
30 push %ebp
31 mov %esp,%ebp
32
33 mov _commPagePtr32,%ecx
34 sub $ _COMM_PAGE32_BASE_ADDRESS,%ecx
35 mov _commPagePtr64,%edx /* point to 64-bit commpage too */
36 mov %edx,%eax
37 sub $ _COMM_PAGE32_START_ADDRESS,%edx /* because kernel is built 32-bit */
38 test %eax,%eax
39 cmovz %ecx,%edx /* if no 64-bit commpage, point to 32 with both */
40
41 movl $0,_COMM_PAGE_TIMEENABLE(%ecx)
42 movl $0,_COMM_PAGE_TIMEENABLE(%edx)
43
44 mov 8(%ebp),%eax
45 or 12(%ebp),%eax
46 je 1f
47
48 mov 8(%ebp),%eax
49 mov %eax,_COMM_PAGE_TIMEBASE(%ecx)
50 mov %eax,_COMM_PAGE_TIMEBASE(%edx)
51 mov 12(%ebp),%eax
52 mov %eax,_COMM_PAGE_TIMEBASE+4(%ecx)
53 mov %eax,_COMM_PAGE_TIMEBASE+4(%edx)
54
55 mov 16(%ebp),%eax
56 mov %eax,_COMM_PAGE_TIMESTAMP(%ecx)
57 mov %eax,_COMM_PAGE_TIMESTAMP(%edx)
58 mov 20(%ebp),%eax
59 mov %eax,_COMM_PAGE_TIMESTAMP+4(%ecx)
60 mov %eax,_COMM_PAGE_TIMESTAMP+4(%edx)
61
62 movl $1,_COMM_PAGE_TIMEENABLE(%ecx)
63 movl $1,_COMM_PAGE_TIMEENABLE(%edx)
641:
65 pop %ebp
66 ret
67
68 .text
69 .align 2, 0x90
70 .globl _commpage_set_nanotime
71/* extern void commpage_set_nanotime(uint64_t tsc_base, uint64_t ns_base, uint32_t scale, uint32_t shift); */
72_commpage_set_nanotime:
73 push %ebp
74 mov %esp,%ebp
75
76 mov _commPagePtr32,%ecx
77 testl %ecx,%ecx
78 je 1f
79
80 sub $(_COMM_PAGE_BASE_ADDRESS),%ecx
81 mov _commPagePtr64,%edx /* point to 64-bit commpage too */
82 mov %edx,%eax
83 sub $ _COMM_PAGE32_START_ADDRESS,%edx /* because kernel is built 32-bit */
84 test %eax,%eax
85 cmovz %ecx,%edx /* if no 64-bit commpage, point to 32 with both */
86
87 mov 8(%ebp),%eax
88 mov %eax,_COMM_PAGE_NT_TSC_BASE(%ecx)
89 mov %eax,_COMM_PAGE_NT_TSC_BASE(%edx)
90 mov 12(%ebp),%eax
91 mov %eax,_COMM_PAGE_NT_TSC_BASE+4(%ecx)
92 mov %eax,_COMM_PAGE_NT_TSC_BASE+4(%edx)
93
94 mov 24(%ebp),%eax
95 mov %eax,_COMM_PAGE_NT_SCALE(%ecx)
96 mov %eax,_COMM_PAGE_NT_SCALE(%edx)
97
98 mov 28(%ebp),%eax
99 mov %eax,_COMM_PAGE_NT_SHIFT(%ecx)
100 mov %eax,_COMM_PAGE_NT_SHIFT(%edx)
101
102 mov 16(%ebp),%eax
103 mov %eax,_COMM_PAGE_NT_NS_BASE(%ecx)
104 mov %eax,_COMM_PAGE_NT_NS_BASE(%edx)
105 mov 20(%ebp),%eax
106 mov %eax,_COMM_PAGE_NT_NS_BASE+4(%ecx)
107 mov %eax,_COMM_PAGE_NT_NS_BASE+4(%edx)
1081:
109 pop %ebp
110 ret
111
112#define CPN(routine) _commpage_ ## routine
113
114/* pointers to the 32-bit commpage routine descriptors */
115/* WARNING: these must be sorted by commpage address! */
116 .const_data
117 .align 2
118 .globl _commpage_32_routines
119_commpage_32_routines:
120 .long CPN(compare_and_swap32_mp)
121 .long CPN(compare_and_swap32_up)
122 .long CPN(compare_and_swap64_mp)
123 .long CPN(compare_and_swap64_up)
124 .long CPN(atomic_add32_mp)
125 .long CPN(atomic_add32_up)
126 .long CPN(mach_absolute_time)
127 .long CPN(spin_lock_try_mp)
128 .long CPN(spin_lock_try_up)
129 .long CPN(spin_lock_mp)
130 .long CPN(spin_lock_up)
131 .long CPN(spin_unlock)
132 .long CPN(pthread_getspecific)
133 .long CPN(gettimeofday)
134 .long CPN(sys_flush_dcache)
135 .long CPN(sys_icache_invalidate)
136 .long CPN(pthread_self)
137// .long CPN(relinquish)
138 .long CPN(bit_test_and_set_mp)
139 .long CPN(bit_test_and_set_up)
140 .long CPN(bit_test_and_clear_mp)
141 .long CPN(bit_test_and_clear_up)
142 .long CPN(bzero_scalar)
143 .long CPN(bzero_sse3)
144 .long CPN(bcopy_scalar)
145 .long CPN(bcopy_sse3)
146 .long CPN(bcopy_sse4)
147 .long CPN(old_nanotime)
148 .long CPN(memset_pattern_sse3)
149 .long CPN(longcopy_sse4)
150 .long CPN(nanotime)
151 .long 0
152
153
154/* pointers to the 64-bit commpage routine descriptors */
155/* WARNING: these must be sorted by commpage address! */
156 .const_data
157 .align 2
158 .globl _commpage_64_routines
159_commpage_64_routines:
160 .long CPN(compare_and_swap32_mp_64)
161 .long CPN(compare_and_swap32_up_64)
162 .long CPN(compare_and_swap64_mp_64)
163 .long CPN(compare_and_swap64_up_64)
164 .long CPN(atomic_add32_mp_64)
165 .long CPN(atomic_add32_up_64)
166 .long CPN(atomic_add64_mp_64)
167 .long CPN(atomic_add64_up_64)
168 .long CPN(mach_absolute_time)
169 .long CPN(spin_lock_try_mp_64)
170 .long CPN(spin_lock_try_up_64)
171 .long CPN(spin_lock_mp_64)
172 .long CPN(spin_lock_up_64)
173 .long CPN(spin_unlock_64)
174 .long CPN(pthread_getspecific_64)
175 .long CPN(gettimeofday_64)
176 .long CPN(sys_flush_dcache_64)
177 .long CPN(sys_icache_invalidate) /* same routine as 32-bit version, just a "ret" */
178 .long CPN(pthread_self_64)
179 .long CPN(bit_test_and_set_mp_64)
180 .long CPN(bit_test_and_set_up_64)
181 .long CPN(bit_test_and_clear_mp_64)
182 .long CPN(bit_test_and_clear_up_64)
183 .long CPN(bzero_sse3_64)
184 .long CPN(bcopy_sse4_64)
185 .long CPN(old_nanotime_64)
186 .long CPN(memset_pattern_sse3_64)
187 .long CPN(longcopy_sse4_64)
188 .long CPN(nanotime_64)
189 .long 0
190