X-Git-Url: https://git.saurik.com/apple/xnu.git/blobdiff_plain/2d21ac55c334faf3a56e5634905ed6987fc787d4..b0d623f7f2ae71ed96e60569f61f9a9a27016e80:/osfmk/i386/commpage/commpage_asm.s diff --git a/osfmk/i386/commpage/commpage_asm.s b/osfmk/i386/commpage/commpage_asm.s index e9604430c..4e3ad82e2 100644 --- a/osfmk/i386/commpage/commpage_asm.s +++ b/osfmk/i386/commpage/commpage_asm.s @@ -1,5 +1,5 @@ /* - * Copyright (c) 2003-2007 Apple Inc. All rights reserved. + * Copyright (c) 2003-2009 Apple Inc. All rights reserved. * * @APPLE_OSREFERENCE_LICENSE_HEADER_START@ * @@ -27,120 +27,177 @@ */ #include - +#include +#include +#include /* * extern void commpage_sched_gen_inc(void); */ .text - .align 2, 0x90 - .globl _commpage_sched_gen_inc + .globl _commpage_sched_gen_inc _commpage_sched_gen_inc: - push %ebp - mov %esp,%ebp +#if defined (__x86_64__) + FRAME + + /* Increment 32-bit commpage field if present */ + movq _commPagePtr32(%rip),%rdx + testq %rdx,%rdx + je 1f + subq $(ASM_COMM_PAGE32_BASE_ADDRESS),%rdx + lock + incl ASM_COMM_PAGE_SCHED_GEN(%rdx) + /* Increment 64-bit commpage field if present */ + movq _commPagePtr64(%rip),%rdx + testq %rdx,%rdx + je 1f + subq $(ASM_COMM_PAGE32_START_ADDRESS),%rdx + lock + incl ASM_COMM_PAGE_SCHED_GEN(%rdx) +1: + EMARF + ret +#elif defined (__i386__) + FRAME + /* Increment 32-bit commpage field if present */ mov _commPagePtr32,%edx testl %edx,%edx je 1f - sub $(_COMM_PAGE32_BASE_ADDRESS),%edx + sub $(ASM_COMM_PAGE32_BASE_ADDRESS),%edx lock - incl _COMM_PAGE_SCHED_GEN(%edx) + incl ASM_COMM_PAGE_SCHED_GEN(%edx) /* Increment 64-bit commpage field if present */ mov _commPagePtr64,%edx testl %edx,%edx je 1f - sub $(_COMM_PAGE32_START_ADDRESS),%edx + sub $(ASM_COMM_PAGE32_START_ADDRESS),%edx lock - incl _COMM_PAGE_SCHED_GEN(%edx) + incl ASM_COMM_PAGE_SCHED_GEN(%edx) 1: - pop %ebp + EMARF ret - -#define CPN(routine) _commpage_ ## routine +#else +#error unsupported architecture +#endif /* pointers to the 32-bit commpage routine descriptors */ /* WARNING: these must be sorted by commpage address! */ .const_data - .align 2 + .align 3 .globl _commpage_32_routines _commpage_32_routines: - .long CPN(compare_and_swap32_mp) - .long CPN(compare_and_swap32_up) - .long CPN(compare_and_swap64_mp) - .long CPN(compare_and_swap64_up) - .long CPN(AtomicEnqueue) - .long CPN(AtomicDequeue) - .long CPN(memory_barrier) - .long CPN(memory_barrier_sse2) - .long CPN(atomic_add32_mp) - .long CPN(atomic_add32_up) - .long CPN(mach_absolute_time) - .long CPN(spin_lock_try_mp) - .long CPN(spin_lock_try_up) - .long CPN(spin_lock_mp) - .long CPN(spin_lock_up) - .long CPN(spin_unlock) - .long CPN(pthread_getspecific) - .long CPN(gettimeofday) - .long CPN(sys_flush_dcache) - .long CPN(sys_icache_invalidate) - .long CPN(pthread_self) -// .long CPN(relinquish) - .long CPN(bit_test_and_set_mp) - .long CPN(bit_test_and_set_up) - .long CPN(bit_test_and_clear_mp) - .long CPN(bit_test_and_clear_up) - .long CPN(bzero_scalar) - .long CPN(bzero_sse2) - .long CPN(bcopy_scalar) - .long CPN(bcopy_sse2) - .long CPN(bcopy_sse3x) - .long CPN(memset_pattern_sse2) - .long CPN(longcopy_sse3x) - .long CPN(nanotime) - .long CPN(nanotime_slow) + COMMPAGE_DESCRIPTOR_REFERENCE(compare_and_swap32_mp) + COMMPAGE_DESCRIPTOR_REFERENCE(compare_and_swap32_up) + COMMPAGE_DESCRIPTOR_REFERENCE(compare_and_swap64_mp) + COMMPAGE_DESCRIPTOR_REFERENCE(compare_and_swap64_up) + COMMPAGE_DESCRIPTOR_REFERENCE(AtomicEnqueue) + COMMPAGE_DESCRIPTOR_REFERENCE(AtomicDequeue) + COMMPAGE_DESCRIPTOR_REFERENCE(memory_barrier) + COMMPAGE_DESCRIPTOR_REFERENCE(memory_barrier_sse2) + COMMPAGE_DESCRIPTOR_REFERENCE(atomic_add32_mp) + COMMPAGE_DESCRIPTOR_REFERENCE(atomic_add32_up) + COMMPAGE_DESCRIPTOR_REFERENCE(cpu_number) + COMMPAGE_DESCRIPTOR_REFERENCE(mach_absolute_time) + COMMPAGE_DESCRIPTOR_REFERENCE(spin_lock_try_mp) + COMMPAGE_DESCRIPTOR_REFERENCE(spin_lock_try_up) + COMMPAGE_DESCRIPTOR_REFERENCE(spin_lock_mp) + COMMPAGE_DESCRIPTOR_REFERENCE(spin_lock_up) + COMMPAGE_DESCRIPTOR_REFERENCE(spin_unlock) + COMMPAGE_DESCRIPTOR_REFERENCE(pthread_getspecific) + COMMPAGE_DESCRIPTOR_REFERENCE(gettimeofday) + COMMPAGE_DESCRIPTOR_REFERENCE(sys_flush_dcache) + COMMPAGE_DESCRIPTOR_REFERENCE(sys_icache_invalidate) + COMMPAGE_DESCRIPTOR_REFERENCE(pthread_self) + COMMPAGE_DESCRIPTOR_REFERENCE(preempt) +// COMMPAGE_DESCRIPTOR_REFERENCE(relinquish) + COMMPAGE_DESCRIPTOR_REFERENCE(bit_test_and_set_mp) + COMMPAGE_DESCRIPTOR_REFERENCE(bit_test_and_set_up) + COMMPAGE_DESCRIPTOR_REFERENCE(bit_test_and_clear_mp) + COMMPAGE_DESCRIPTOR_REFERENCE(bit_test_and_clear_up) + COMMPAGE_DESCRIPTOR_REFERENCE(bzero_scalar) + COMMPAGE_DESCRIPTOR_REFERENCE(bzero_sse2) + COMMPAGE_DESCRIPTOR_REFERENCE(bzero_sse42) + COMMPAGE_DESCRIPTOR_REFERENCE(bcopy_scalar) + COMMPAGE_DESCRIPTOR_REFERENCE(bcopy_sse2) + COMMPAGE_DESCRIPTOR_REFERENCE(bcopy_sse3x) + COMMPAGE_DESCRIPTOR_REFERENCE(bcopy_sse42) + COMMPAGE_DESCRIPTOR_REFERENCE(memset_pattern_sse2) + COMMPAGE_DESCRIPTOR_REFERENCE(longcopy_sse3x) + COMMPAGE_DESCRIPTOR_REFERENCE(backoff) + COMMPAGE_DESCRIPTOR_REFERENCE(AtomicFifoEnqueue) + COMMPAGE_DESCRIPTOR_REFERENCE(AtomicFifoDequeue) + COMMPAGE_DESCRIPTOR_REFERENCE(nanotime) + COMMPAGE_DESCRIPTOR_REFERENCE(nanotime_slow) + COMMPAGE_DESCRIPTOR_REFERENCE(pthread_mutex_lock) + COMMPAGE_DESCRIPTOR_REFERENCE(pfz_enqueue) + COMMPAGE_DESCRIPTOR_REFERENCE(pfz_dequeue) + COMMPAGE_DESCRIPTOR_REFERENCE(pfz_mutex_lock) +#if defined (__i386__) .long 0 +#elif defined (__x86_64__) + .quad 0 +#else +#error unsupported architecture +#endif /* pointers to the 64-bit commpage routine descriptors */ /* WARNING: these must be sorted by commpage address! */ .const_data - .align 2 + .align 3 .globl _commpage_64_routines _commpage_64_routines: - .long CPN(compare_and_swap32_mp_64) - .long CPN(compare_and_swap32_up_64) - .long CPN(compare_and_swap64_mp_64) - .long CPN(compare_and_swap64_up_64) - .long CPN(AtomicEnqueue_64) - .long CPN(AtomicDequeue_64) - .long CPN(memory_barrier_sse2) /* same routine as 32-bit version */ - .long CPN(atomic_add32_mp_64) - .long CPN(atomic_add32_up_64) - .long CPN(atomic_add64_mp_64) - .long CPN(atomic_add64_up_64) - .long CPN(mach_absolute_time) - .long CPN(spin_lock_try_mp_64) - .long CPN(spin_lock_try_up_64) - .long CPN(spin_lock_mp_64) - .long CPN(spin_lock_up_64) - .long CPN(spin_unlock_64) - .long CPN(pthread_getspecific_64) - .long CPN(gettimeofday_64) - .long CPN(sys_flush_dcache_64) - .long CPN(sys_icache_invalidate) /* same routine as 32-bit version, just a "ret" */ - .long CPN(pthread_self_64) - .long CPN(bit_test_and_set_mp_64) - .long CPN(bit_test_and_set_up_64) - .long CPN(bit_test_and_clear_mp_64) - .long CPN(bit_test_and_clear_up_64) - .long CPN(bzero_sse2_64) - .long CPN(bcopy_sse3x_64) - .long CPN(memset_pattern_sse2_64) - .long CPN(longcopy_sse3x_64) - .long CPN(nanotime_64) + COMMPAGE_DESCRIPTOR_REFERENCE(compare_and_swap32_mp_64) + COMMPAGE_DESCRIPTOR_REFERENCE(compare_and_swap32_up_64) + COMMPAGE_DESCRIPTOR_REFERENCE(compare_and_swap64_mp_64) + COMMPAGE_DESCRIPTOR_REFERENCE(compare_and_swap64_up_64) + COMMPAGE_DESCRIPTOR_REFERENCE(AtomicEnqueue_64) + COMMPAGE_DESCRIPTOR_REFERENCE(AtomicDequeue_64) + COMMPAGE_DESCRIPTOR_REFERENCE(memory_barrier_sse2) /* same routine as 32-bit version */ + COMMPAGE_DESCRIPTOR_REFERENCE(atomic_add32_mp_64) + COMMPAGE_DESCRIPTOR_REFERENCE(atomic_add32_up_64) + COMMPAGE_DESCRIPTOR_REFERENCE(atomic_add64_mp_64) + COMMPAGE_DESCRIPTOR_REFERENCE(atomic_add64_up_64) + COMMPAGE_DESCRIPTOR_REFERENCE(cpu_number_64) + COMMPAGE_DESCRIPTOR_REFERENCE(mach_absolute_time) + COMMPAGE_DESCRIPTOR_REFERENCE(spin_lock_try_mp_64) + COMMPAGE_DESCRIPTOR_REFERENCE(spin_lock_try_up_64) + COMMPAGE_DESCRIPTOR_REFERENCE(spin_lock_mp_64) + COMMPAGE_DESCRIPTOR_REFERENCE(spin_lock_up_64) + COMMPAGE_DESCRIPTOR_REFERENCE(spin_unlock_64) + COMMPAGE_DESCRIPTOR_REFERENCE(pthread_getspecific_64) + COMMPAGE_DESCRIPTOR_REFERENCE(gettimeofday_64) + COMMPAGE_DESCRIPTOR_REFERENCE(sys_flush_dcache_64) + COMMPAGE_DESCRIPTOR_REFERENCE(sys_icache_invalidate) /* same routine as 32-bit version, just a "ret" */ + COMMPAGE_DESCRIPTOR_REFERENCE(pthread_self_64) + COMMPAGE_DESCRIPTOR_REFERENCE(preempt_64) + COMMPAGE_DESCRIPTOR_REFERENCE(bit_test_and_set_mp_64) + COMMPAGE_DESCRIPTOR_REFERENCE(bit_test_and_set_up_64) + COMMPAGE_DESCRIPTOR_REFERENCE(bit_test_and_clear_mp_64) + COMMPAGE_DESCRIPTOR_REFERENCE(bit_test_and_clear_up_64) + COMMPAGE_DESCRIPTOR_REFERENCE(bzero_sse2_64) + COMMPAGE_DESCRIPTOR_REFERENCE(bzero_sse42_64) + COMMPAGE_DESCRIPTOR_REFERENCE(bcopy_sse3x_64) + COMMPAGE_DESCRIPTOR_REFERENCE(bcopy_sse42_64) + COMMPAGE_DESCRIPTOR_REFERENCE(memset_pattern_sse2_64) + COMMPAGE_DESCRIPTOR_REFERENCE(longcopy_sse3x_64) + COMMPAGE_DESCRIPTOR_REFERENCE(backoff_64) + COMMPAGE_DESCRIPTOR_REFERENCE(AtomicFifoEnqueue_64) + COMMPAGE_DESCRIPTOR_REFERENCE(AtomicFifoDequeue_64) + COMMPAGE_DESCRIPTOR_REFERENCE(nanotime_64) + COMMPAGE_DESCRIPTOR_REFERENCE(pthread_mutex_lock_64) + COMMPAGE_DESCRIPTOR_REFERENCE(pfz_enqueue_64) + COMMPAGE_DESCRIPTOR_REFERENCE(pfz_dequeue_64) + COMMPAGE_DESCRIPTOR_REFERENCE(pfz_mutex_lock_64) +#if defined (__i386__) .long 0 +#elif defined (__x86_64__) + .quad 0 +#else +#error unsupported architecture +#endif