/*
- * Copyright (c) 2003-2007 Apple Inc. All rights reserved.
+ * Copyright (c) 2003-2009 Apple Inc. All rights reserved.
*
* @APPLE_OSREFERENCE_LICENSE_HEADER_START@
*
*/
#include <machine/cpu_capabilities.h>
-
+#include <machine/commpage.h>
+#include <machine/asm.h>
+#include <assym.s>
/*
* extern void commpage_sched_gen_inc(void);
*/
.text
- .align 2, 0x90
- .globl _commpage_sched_gen_inc
+ .globl _commpage_sched_gen_inc
_commpage_sched_gen_inc:
- push %ebp
- mov %esp,%ebp
+#if defined (__x86_64__)
+ FRAME
+
+ /* Increment 32-bit commpage field if present */
+ movq _commPagePtr32(%rip),%rdx
+ testq %rdx,%rdx
+ je 1f
+ subq $(ASM_COMM_PAGE32_BASE_ADDRESS),%rdx
+ lock
+ incl ASM_COMM_PAGE_SCHED_GEN(%rdx)
+ /* Increment 64-bit commpage field if present */
+ movq _commPagePtr64(%rip),%rdx
+ testq %rdx,%rdx
+ je 1f
+ subq $(ASM_COMM_PAGE32_START_ADDRESS),%rdx
+ lock
+ incl ASM_COMM_PAGE_SCHED_GEN(%rdx)
+1:
+ EMARF
+ ret
+#elif defined (__i386__)
+ FRAME
+
/* Increment 32-bit commpage field if present */
mov _commPagePtr32,%edx
testl %edx,%edx
je 1f
- sub $(_COMM_PAGE32_BASE_ADDRESS),%edx
+ sub $(ASM_COMM_PAGE32_BASE_ADDRESS),%edx
lock
- incl _COMM_PAGE_SCHED_GEN(%edx)
+ incl ASM_COMM_PAGE_SCHED_GEN(%edx)
/* Increment 64-bit commpage field if present */
mov _commPagePtr64,%edx
testl %edx,%edx
je 1f
- sub $(_COMM_PAGE32_START_ADDRESS),%edx
+ sub $(ASM_COMM_PAGE32_START_ADDRESS),%edx
lock
- incl _COMM_PAGE_SCHED_GEN(%edx)
+ incl ASM_COMM_PAGE_SCHED_GEN(%edx)
1:
- pop %ebp
+ EMARF
ret
-
-#define CPN(routine) _commpage_ ## routine
+#else
+#error unsupported architecture
+#endif
/* pointers to the 32-bit commpage routine descriptors */
/* WARNING: these must be sorted by commpage address! */
.const_data
- .align 2
+ .align 3
.globl _commpage_32_routines
_commpage_32_routines:
- .long CPN(compare_and_swap32_mp)
- .long CPN(compare_and_swap32_up)
- .long CPN(compare_and_swap64_mp)
- .long CPN(compare_and_swap64_up)
- .long CPN(AtomicEnqueue)
- .long CPN(AtomicDequeue)
- .long CPN(memory_barrier)
- .long CPN(memory_barrier_sse2)
- .long CPN(atomic_add32_mp)
- .long CPN(atomic_add32_up)
- .long CPN(mach_absolute_time)
- .long CPN(spin_lock_try_mp)
- .long CPN(spin_lock_try_up)
- .long CPN(spin_lock_mp)
- .long CPN(spin_lock_up)
- .long CPN(spin_unlock)
- .long CPN(pthread_getspecific)
- .long CPN(gettimeofday)
- .long CPN(sys_flush_dcache)
- .long CPN(sys_icache_invalidate)
- .long CPN(pthread_self)
-// .long CPN(relinquish)
- .long CPN(bit_test_and_set_mp)
- .long CPN(bit_test_and_set_up)
- .long CPN(bit_test_and_clear_mp)
- .long CPN(bit_test_and_clear_up)
- .long CPN(bzero_scalar)
- .long CPN(bzero_sse2)
- .long CPN(bzero_sse42)
- .long CPN(bcopy_scalar)
- .long CPN(bcopy_sse2)
- .long CPN(bcopy_sse3x)
- .long CPN(bcopy_sse42)
- .long CPN(memset_pattern_sse2)
- .long CPN(longcopy_sse3x)
- .long CPN(nanotime)
- .long CPN(nanotime_slow)
+ COMMPAGE_DESCRIPTOR_REFERENCE(preempt)
+ COMMPAGE_DESCRIPTOR_REFERENCE(backoff)
+ COMMPAGE_DESCRIPTOR_REFERENCE(pfz_enqueue)
+ COMMPAGE_DESCRIPTOR_REFERENCE(pfz_dequeue)
+ COMMPAGE_DESCRIPTOR_REFERENCE(pfz_mutex_lock)
+#if defined (__i386__)
.long 0
+#elif defined (__x86_64__)
+ .quad 0
+#else
+#error unsupported architecture
+#endif
/* pointers to the 64-bit commpage routine descriptors */
/* WARNING: these must be sorted by commpage address! */
.const_data
- .align 2
+ .align 3
.globl _commpage_64_routines
_commpage_64_routines:
- .long CPN(compare_and_swap32_mp_64)
- .long CPN(compare_and_swap32_up_64)
- .long CPN(compare_and_swap64_mp_64)
- .long CPN(compare_and_swap64_up_64)
- .long CPN(AtomicEnqueue_64)
- .long CPN(AtomicDequeue_64)
- .long CPN(memory_barrier_sse2) /* same routine as 32-bit version */
- .long CPN(atomic_add32_mp_64)
- .long CPN(atomic_add32_up_64)
- .long CPN(atomic_add64_mp_64)
- .long CPN(atomic_add64_up_64)
- .long CPN(mach_absolute_time)
- .long CPN(spin_lock_try_mp_64)
- .long CPN(spin_lock_try_up_64)
- .long CPN(spin_lock_mp_64)
- .long CPN(spin_lock_up_64)
- .long CPN(spin_unlock_64)
- .long CPN(pthread_getspecific_64)
- .long CPN(gettimeofday_64)
- .long CPN(sys_flush_dcache_64)
- .long CPN(sys_icache_invalidate) /* same routine as 32-bit version, just a "ret" */
- .long CPN(pthread_self_64)
- .long CPN(bit_test_and_set_mp_64)
- .long CPN(bit_test_and_set_up_64)
- .long CPN(bit_test_and_clear_mp_64)
- .long CPN(bit_test_and_clear_up_64)
- .long CPN(bzero_sse2_64)
- .long CPN(bzero_sse42_64)
- .long CPN(bcopy_sse3x_64)
- .long CPN(bcopy_sse42_64)
- .long CPN(memset_pattern_sse2_64)
- .long CPN(longcopy_sse3x_64)
- .long CPN(nanotime_64)
+ COMMPAGE_DESCRIPTOR_REFERENCE(preempt_64)
+ COMMPAGE_DESCRIPTOR_REFERENCE(backoff_64)
+ COMMPAGE_DESCRIPTOR_REFERENCE(pfz_enqueue_64)
+ COMMPAGE_DESCRIPTOR_REFERENCE(pfz_dequeue_64)
+ COMMPAGE_DESCRIPTOR_REFERENCE(pfz_mutex_lock_64)
+#if defined (__i386__)
.long 0
+#elif defined (__x86_64__)
+ .quad 0
+#else
+#error unsupported architecture
+#endif