]> git.saurik.com Git - apple/xnu.git/blame - osfmk/i386/commpage/commpage_asm.s
xnu-1228.12.14.tar.gz
[apple/xnu.git] / osfmk / i386 / commpage / commpage_asm.s
CommitLineData
0c530ab8 1/*
2d21ac55 2 * Copyright (c) 2003-2007 Apple Inc. All rights reserved.
0c530ab8 3 *
2d21ac55 4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
0c530ab8 5 *
2d21ac55
A
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
0c530ab8 14 *
2d21ac55
A
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
17 *
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
0c530ab8
A
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
2d21ac55
A
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
0c530ab8 25 *
2d21ac55 26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
0c530ab8
A
27 */
28
29#include <machine/cpu_capabilities.h>
30
0c530ab8 31
2d21ac55
A
32/*
33 * extern void commpage_sched_gen_inc(void);
34 */
35 .text
36 .align 2, 0x90
37 .globl _commpage_sched_gen_inc
0c530ab8 38
2d21ac55 39_commpage_sched_gen_inc:
0c530ab8
A
40 push %ebp
41 mov %esp,%ebp
42
2d21ac55
A
43 /* Increment 32-bit commpage field if present */
44 mov _commPagePtr32,%edx
45 testl %edx,%edx
0c530ab8 46 je 1f
2d21ac55
A
47 sub $(_COMM_PAGE32_BASE_ADDRESS),%edx
48 lock
49 incl _COMM_PAGE_SCHED_GEN(%edx)
0c530ab8 50
2d21ac55
A
51 /* Increment 64-bit commpage field if present */
52 mov _commPagePtr64,%edx
53 testl %edx,%edx
54 je 1f
55 sub $(_COMM_PAGE32_START_ADDRESS),%edx
56 lock
57 incl _COMM_PAGE_SCHED_GEN(%edx)
0c530ab8
A
581:
59 pop %ebp
60 ret
61
62#define CPN(routine) _commpage_ ## routine
63
64/* pointers to the 32-bit commpage routine descriptors */
65/* WARNING: these must be sorted by commpage address! */
66 .const_data
67 .align 2
68 .globl _commpage_32_routines
69_commpage_32_routines:
70 .long CPN(compare_and_swap32_mp)
71 .long CPN(compare_and_swap32_up)
72 .long CPN(compare_and_swap64_mp)
73 .long CPN(compare_and_swap64_up)
2d21ac55
A
74 .long CPN(AtomicEnqueue)
75 .long CPN(AtomicDequeue)
76 .long CPN(memory_barrier)
77 .long CPN(memory_barrier_sse2)
0c530ab8
A
78 .long CPN(atomic_add32_mp)
79 .long CPN(atomic_add32_up)
80 .long CPN(mach_absolute_time)
81 .long CPN(spin_lock_try_mp)
82 .long CPN(spin_lock_try_up)
83 .long CPN(spin_lock_mp)
84 .long CPN(spin_lock_up)
85 .long CPN(spin_unlock)
86 .long CPN(pthread_getspecific)
87 .long CPN(gettimeofday)
88 .long CPN(sys_flush_dcache)
89 .long CPN(sys_icache_invalidate)
90 .long CPN(pthread_self)
91// .long CPN(relinquish)
92 .long CPN(bit_test_and_set_mp)
93 .long CPN(bit_test_and_set_up)
94 .long CPN(bit_test_and_clear_mp)
95 .long CPN(bit_test_and_clear_up)
96 .long CPN(bzero_scalar)
2d21ac55 97 .long CPN(bzero_sse2)
c910b4d9 98 .long CPN(bzero_sse42)
0c530ab8 99 .long CPN(bcopy_scalar)
2d21ac55
A
100 .long CPN(bcopy_sse2)
101 .long CPN(bcopy_sse3x)
c910b4d9 102 .long CPN(bcopy_sse42)
2d21ac55
A
103 .long CPN(memset_pattern_sse2)
104 .long CPN(longcopy_sse3x)
0c530ab8 105 .long CPN(nanotime)
2d21ac55 106 .long CPN(nanotime_slow)
0c530ab8
A
107 .long 0
108
109
110/* pointers to the 64-bit commpage routine descriptors */
111/* WARNING: these must be sorted by commpage address! */
112 .const_data
113 .align 2
114 .globl _commpage_64_routines
115_commpage_64_routines:
116 .long CPN(compare_and_swap32_mp_64)
117 .long CPN(compare_and_swap32_up_64)
118 .long CPN(compare_and_swap64_mp_64)
119 .long CPN(compare_and_swap64_up_64)
2d21ac55
A
120 .long CPN(AtomicEnqueue_64)
121 .long CPN(AtomicDequeue_64)
122 .long CPN(memory_barrier_sse2) /* same routine as 32-bit version */
0c530ab8
A
123 .long CPN(atomic_add32_mp_64)
124 .long CPN(atomic_add32_up_64)
125 .long CPN(atomic_add64_mp_64)
126 .long CPN(atomic_add64_up_64)
127 .long CPN(mach_absolute_time)
128 .long CPN(spin_lock_try_mp_64)
129 .long CPN(spin_lock_try_up_64)
130 .long CPN(spin_lock_mp_64)
131 .long CPN(spin_lock_up_64)
132 .long CPN(spin_unlock_64)
133 .long CPN(pthread_getspecific_64)
134 .long CPN(gettimeofday_64)
135 .long CPN(sys_flush_dcache_64)
136 .long CPN(sys_icache_invalidate) /* same routine as 32-bit version, just a "ret" */
137 .long CPN(pthread_self_64)
138 .long CPN(bit_test_and_set_mp_64)
139 .long CPN(bit_test_and_set_up_64)
140 .long CPN(bit_test_and_clear_mp_64)
141 .long CPN(bit_test_and_clear_up_64)
2d21ac55 142 .long CPN(bzero_sse2_64)
c910b4d9 143 .long CPN(bzero_sse42_64)
2d21ac55 144 .long CPN(bcopy_sse3x_64)
c910b4d9 145 .long CPN(bcopy_sse42_64)
2d21ac55
A
146 .long CPN(memset_pattern_sse2_64)
147 .long CPN(longcopy_sse3x_64)
0c530ab8
A
148 .long CPN(nanotime_64)
149 .long 0
150