+Entry(call_continuation)
+
+ movq %rdi, %r12 /* continuation */
+ movq %rsi, %r13 /* continuation param */
+ movq %rdx, %r14 /* wait result */
+
+ movq %gs:CPU_KERNEL_STACK,%rsp /* set the stack */
+ xorq %rbp,%rbp /* zero frame pointer */
+
+ test %ecx, %ecx
+ jz 1f
+ mov $1, %edi
+ call _ml_set_interrupts_enabled
+1:
+
+ movq %r12,%rcx /* continuation */
+ movq %r13,%rdi /* continuation param */
+ movq %r14,%rsi /* wait result */
+
+ call *%rcx /* call continuation */
+ movq %gs:CPU_ACTIVE_THREAD,%rdi
+ call EXT(thread_terminate)
+
+
+Entry(x86_init_wrapper)
+ xor %rbp, %rbp
+ movq %rsi, %rsp
+ callq *%rdi
+
+#if CONFIG_VMX
+
+/*
+ * __vmxon -- Enter VMX Operation
+ * int __vmxon(addr64_t v);
+ */
+Entry(__vmxon)
+ FRAME
+ push %rdi
+
+ mov $(VMX_FAIL_INVALID), %ecx
+ mov $(VMX_FAIL_VALID), %edx
+ mov $(VMX_SUCCEED), %eax
+ vmxon (%rsp)
+ cmovcl %ecx, %eax /* CF = 1, ZF = 0 */
+ cmovzl %edx, %eax /* CF = 0, ZF = 1 */
+
+ pop %rdi
+ EMARF
+ ret
+
+/*
+ * __vmxoff -- Leave VMX Operation
+ * int __vmxoff(void);
+ */
+Entry(__vmxoff)
+ FRAME
+
+ mov $(VMX_FAIL_INVALID), %ecx
+ mov $(VMX_FAIL_VALID), %edx
+ mov $(VMX_SUCCEED), %eax
+ vmxoff
+ cmovcl %ecx, %eax /* CF = 1, ZF = 0 */
+ cmovzl %edx, %eax /* CF = 0, ZF = 1 */
+
+ EMARF
+ ret
+
+#endif /* CONFIG_VMX */
+
+/*
+ * mfence -- Memory Barrier
+ * Use out-of-line assembly to get
+ * standard x86-64 ABI guarantees
+ * about what the caller's codegen
+ * has in registers vs. memory
+ */
+Entry(do_mfence)
+ mfence
+ ret