+extern void _rtc_nanotime_store(
+ uint64_t tsc,
+ uint64_t nsec,
+ uint32_t scale,
+ uint32_t shift,
+ rtc_nanotime_t *dst);
+
+extern uint64_t _rtc_nanotime_read(
+ rtc_nanotime_t *rntp,
+ int slow);
+
+extern rtc_nanotime_t rtc_nanotime_info;
+#endif
+
+#define SLOW_TSC_THRESHOLD 1000067800 /* TSC is too slow for regular nanotime() algorithm */
+
+#if defined(__i386__)
+/*
+ * Assembly snippet included in exception handlers and rtc_nanotime_read()
+ * %edi points to nanotime info struct
+ * %edx:%eax returns nanotime
+ */
+#define RTC_NANOTIME_READ_FAST() \
+0: movl RNT_GENERATION(%edi),%esi /* being updated? */ ; \
+ testl %esi,%esi ; \
+ jz 0b /* wait until done */ ; \
+ rdtsc ; \
+ lfence ; \
+ subl RNT_TSC_BASE(%edi),%eax ; \
+ sbbl RNT_TSC_BASE+4(%edi),%edx /* tsc - tsc_base */ ; \
+ movl RNT_SCALE(%edi),%ecx /* * scale factor */ ; \
+ movl %edx,%ebx ; \
+ mull %ecx ; \
+ movl %ebx,%eax ; \
+ movl %edx,%ebx ; \
+ mull %ecx ; \
+ addl %ebx,%eax ; \
+ adcl $0,%edx ; \
+ addl RNT_NS_BASE(%edi),%eax /* + ns_base */ ; \
+ adcl RNT_NS_BASE+4(%edi),%edx ; \
+ cmpl RNT_GENERATION(%edi),%esi /* check for update */ ; \
+ jne 0b /* do it all again */
+
+#elif defined(__x86_64__)
+
+/*
+ * Assembly snippet included in exception handlers and rtc_nanotime_read()
+ * %rdi points to nanotime info struct.
+ * %rax returns nanotime
+ */
+#define RTC_NANOTIME_READ_FAST() \
+0: movl RNT_GENERATION(%rdi),%esi ; \
+ test %esi,%esi /* info updating? */ ; \
+ jz 0b /* - wait if so */ ; \
+ rdtsc ; \
+ lfence ; \
+ shlq $32,%rdx ; \
+ orq %rdx,%rax /* %rax := tsc */ ; \
+ subq RNT_TSC_BASE(%rdi),%rax /* tsc - tsc_base */ ; \
+ xorq %rcx,%rcx ; \
+ movl RNT_SCALE(%rdi),%ecx ; \
+ mulq %rcx /* delta * scale */ ; \
+ shrdq $32,%rdx,%rax /* %rdx:%rax >>= 32 */ ; \
+ addq RNT_NS_BASE(%rdi),%rax /* add ns_base */ ; \
+ cmpl RNT_GENERATION(%rdi),%esi /* repeat if changed */ ; \
+ jne 0b
+
+#endif
+