+PTHREAD_ALWAYS_INLINE
+static inline bool
+_pthread_rwlock_check_signature_init(_pthread_rwlock *rwlock)
+{
+ return (rwlock->sig == _PTHREAD_RWLOCK_SIG_init);
+}
+
+/* ALWAYS called with list lock and return with list lock */
+PTHREAD_ALWAYS_INLINE
+static inline bool
+_pthread_is_valid_locked(pthread_t thread)
+{
+ pthread_t p;
+loop:
+ TAILQ_FOREACH(p, &__pthread_head, plist) {
+ if (p == thread) {
+ int state = os_atomic_load(&p->cancel_state, relaxed);
+ if (state & _PTHREAD_CANCEL_INITIALIZED) {
+ return true;
+ }
+ _PTHREAD_UNLOCK(_pthread_list_lock);
+ thread_switch(_pthread_kernel_thread(p),
+ SWITCH_OPTION_OSLOCK_DEPRESS, 1);
+ _PTHREAD_LOCK(_pthread_list_lock);
+ goto loop;
+ }
+ }
+
+ return false;
+}
+
+#define PTHREAD_IS_VALID_LOCK_THREAD 0x1
+
+PTHREAD_ALWAYS_INLINE
+static inline bool
+_pthread_is_valid(pthread_t thread, int flags, mach_port_t *portp)
+{
+ mach_port_t kport = MACH_PORT_NULL;
+ bool valid;
+
+ if (thread == NULL) {
+ return false;
+ }
+
+ if (thread == pthread_self()) {
+ valid = true;
+ kport = _pthread_kernel_thread(thread);
+ if (flags & PTHREAD_IS_VALID_LOCK_THREAD) {
+ _PTHREAD_LOCK(thread->lock);
+ }
+ } else {
+ _PTHREAD_LOCK(_pthread_list_lock);
+ if (_pthread_is_valid_locked(thread)) {
+ kport = _pthread_kernel_thread(thread);
+ valid = true;
+ if (flags & PTHREAD_IS_VALID_LOCK_THREAD) {
+ _PTHREAD_LOCK(thread->lock);
+ }
+ } else {
+ valid = false;
+ }
+ _PTHREAD_UNLOCK(_pthread_list_lock);
+ }
+
+ if (portp != NULL) {
+ *portp = kport;
+ }
+ return valid;
+}
+
+PTHREAD_ALWAYS_INLINE
+static inline void*
+_pthread_atomic_xchg_ptr_inline(void **p, void *v)
+{
+ return os_atomic_xchg(p, v, seq_cst);
+}
+
+PTHREAD_ALWAYS_INLINE
+static inline uint32_t
+_pthread_atomic_xchg_uint32_relaxed_inline(uint32_t *p,uint32_t v)
+{
+ return os_atomic_xchg(p, v, relaxed);
+}
+
+#define _pthread_atomic_xchg_ptr(p, v) \
+ _pthread_atomic_xchg_ptr_inline(p, v)
+#define _pthread_atomic_xchg_uint32_relaxed(p, v) \
+ _pthread_atomic_xchg_uint32_relaxed_inline(p, v)
+