+/* TH_RUN & !TH_IDLE controls whether a thread has a run count */
+#define sched_run_incr(th) \
+ hw_atomic_add(&sched_run_count, 1) \
+
+#define sched_run_decr(th) \
+ hw_atomic_sub(&sched_run_count, 1) \
+
+#if MACH_ASSERT
+extern void sched_share_incr(thread_t thread);
+extern void sched_share_decr(thread_t thread);
+extern void sched_background_incr(thread_t thread);
+extern void sched_background_decr(thread_t thread);
+
+extern void assert_thread_sched_count(thread_t thread);
+
+#else /* MACH_ASSERT */
+/* sched_mode == TH_MODE_TIMESHARE controls whether a thread has a timeshare count when it has a run count */
+#define sched_share_incr(th) \
+MACRO_BEGIN \
+ (void)hw_atomic_add(&sched_share_count, 1); \
+MACRO_END
+
+#define sched_share_decr(th) \
+MACRO_BEGIN \
+ (void)hw_atomic_sub(&sched_share_count, 1); \
+MACRO_END
+
+/* TH_SFLAG_THROTTLED controls whether a thread has a background count when it has a run count and a share count */
+#define sched_background_incr(th) \
+MACRO_BEGIN \
+ hw_atomic_add(&sched_background_count, 1); \
+MACRO_END
+
+#define sched_background_decr(th) \
+MACRO_BEGIN \
+ hw_atomic_sub(&sched_background_count, 1); \
+MACRO_END
+
+#define assert_thread_sched_count(th) \
+MACRO_BEGIN \
+MACRO_END
+
+#endif /* !MACH_ASSERT */