+/*
+ * Copyin 32 bit aligned word as a single transaction
+ * rdi: source address (user)
+ * rsi: destination address (kernel)
+ */
+Entry(_copyin_atomic32)
+ pushq %rbp /* Save registers */
+ movq %rsp, %rbp
+ RECOVERY_SECTION
+ RECOVER(L_copyin_atomic32_fail) /* Set up recovery handler for next instruction */
+ movl (%rdi), %eax /* Load long from user */
+ movl %eax, (%rsi) /* Store to kernel */
+ xorl %eax, %eax /* Return success */
+ popq %rbp /* Restore registers */
+ retq /* Return */
+
+L_copyin_atomic32_fail:
+ movl $(EFAULT), %eax /* Return error for failure */
+ popq %rbp /* Restore registers */
+ retq /* Return */
+
+/*
+ * Copyin 64 bit aligned word as a single transaction
+ * rdi: source address (user)
+ * rsi: destination address (kernel)
+ */
+Entry(_copyin_atomic64)
+ pushq %rbp /* Save registers */
+ movq %rsp, %rbp
+ RECOVERY_SECTION
+ RECOVER(L_copyin_atomic64_fail) /* Set up recovery handler for next instruction*/
+ movq (%rdi), %rax /* Load quad from user */
+ movq %rax, (%rsi) /* Store to kernel */
+ xorl %eax, %eax /* Return success */
+ popq %rbp /* Restore registers */
+ retq /* Return */
+
+L_copyin_atomic64_fail:
+ movl $(EFAULT), %eax /* Return error for failure */
+ popq %rbp /* Restore registers */
+ retq /* Return */
+
+/*
+ * Copyin 32 bit aligned word as a single transaction
+ * rdi: source address (kernel)
+ * rsi: destination address (user)
+ */
+Entry(_copyout_atomic32)
+ pushq %rbp /* Save registers */
+ movq %rsp, %rbp
+ movl (%rdi), %eax /* Load long from kernel */
+ RECOVERY_SECTION
+ RECOVER(L_copyout_atomic32_fail) /* Set up recovery handler for next instruction*/
+ movl %eax, (%rsi) /* Store long to user */
+ xorl %eax, %eax /* Return success */
+ popq %rbp /* Restore registers */
+ retq /* Return */
+
+L_copyout_atomic32_fail:
+ movl $(EFAULT), %eax /* Return error for failure */
+ popq %rbp /* Restore registers */
+ retq /* Return */
+
+/*
+ * Copyin 64 bit aligned word as a single transaction
+ * rdi: source address (kernel)
+ * rsi: destination address (user)
+ */
+Entry(_copyout_atomic64)
+ pushq %rbp /* Save registers */
+ movq %rsp, %rbp
+ movq (%rdi), %rax /* Load quad from kernel */
+ RECOVERY_SECTION
+ RECOVER(L_copyout_atomic64_fail) /* Set up recovery handler for next instruction*/
+ movq %rax, (%rsi) /* Store quad to user */
+ xorl %eax, %eax /* Return success */
+ popq %rbp /* Restore registers */
+ retq /* Return */
+
+L_copyout_atomic64_fail:
+ movl $(EFAULT), %eax /* Return error for failure */
+ popq %rbp /* Restore registers */
+ retq /* Return */
+
+