]> git.saurik.com Git - apple/xnu.git/blame_incremental - osfmk/arm64/machine_routines_asm.h
xnu-7195.101.1.tar.gz
[apple/xnu.git] / osfmk / arm64 / machine_routines_asm.h
... / ...
CommitLineData
1/*
2 * Copyright (c) 2019 Apple Inc. All rights reserved.
3 *
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
14 *
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
17 *
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
25 *
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27 */
28
29#include <arm64/proc_reg.h>
30#include <pexpert/arm64/board_config.h>
31#include "assym.s"
32
33#ifndef __ASSEMBLER__
34#error "This header should only be used in .s files"
35#endif
36
37/**
38 * Loads the following values from the thread_kernel_state pointer in x0:
39 *
40 * x1: $x0->ss_64.pc
41 * w2: $x0->ss_64.cpsr
42 * x16: $x0->ss_64.x16
43 * x17: $x0->ss_64.x17
44 * lr: $x0->ss_64.lr
45 *
46 * On CPUs with PAC support, this macro will auth the above values with ml_check_signed_state().
47 *
48 * tmp1 - scratch register 1
49 * tmp2 - scratch register 2
50 * tmp3 - scratch register 3
51 * tmp4 - scratch register 4
52 * tmp5 - scratch register 5
53 */
54/* BEGIN IGNORE CODESTYLE */
55.macro AUTH_THREAD_STATE_IN_X0_COMMON tmp1, tmp2, tmp3, tmp4, tmp5, el0_state_allowed=0, PC_OFF=SS64_PC, CPSR_OFF=SS64_CPSR, X16_OFF=SS64_X16, LR_OFF=SS64_LR, check_func=ml_check_signed_state
56 ldr w2, [x0, \CPSR_OFF]
57.if \el0_state_allowed==0
58#if __has_feature(ptrauth_calls)
59 // If testing for a canary CPSR value, ensure that we do not observe writes to other fields without it
60 dmb ld
61#endif
62.endif
63 ldr x1, [x0, \PC_OFF]
64 ldp x16, x17, [x0, \X16_OFF]
65
66#if defined(HAS_APPLE_PAC)
67 // Save x3-x5 to preserve across call
68 mov \tmp3, x3
69 mov \tmp4, x4
70 mov \tmp5, x5
71
72 /*
73 * Arg0: The ARM context pointer (already in x0)
74 * Arg1: PC to check (loaded above)
75 * Arg2: CPSR to check (loaded above)
76 * Arg3: the LR to check
77 *
78 * Stash saved state PC and CPSR in other registers to avoid reloading potentially unauthed
79 * values from memory. (ml_check_signed_state will clobber x1 and x2.)
80 */
81 mov \tmp1, x1
82 mov \tmp2, x2
83 ldr x3, [x0, \LR_OFF]
84 mov x4, x16
85 mov x5, x17
86 bl EXT(\check_func)
87 mov x1, \tmp1
88 mov x2, \tmp2
89
90.if \el0_state_allowed==0
91 and \tmp2, \tmp2, #PSR64_MODE_MASK
92 cbnz \tmp2, 1f
93 bl EXT(ml_auth_thread_state_invalid_cpsr)
941:
95.endif
96
97 // LR was already loaded/authed earlier, if we reload it we might be loading a potentially unauthed value
98 mov lr, x3
99 mov x3, \tmp3
100 mov x4, \tmp4
101 mov x5, \tmp5
102#else
103 ldr lr, [x0, \LR_OFF]
104#endif /* defined(HAS_APPLE_PAC) */
105.endmacro
106
107.macro AUTH_THREAD_STATE_IN_X0 tmp1, tmp2, tmp3, tmp4, tmp5, el0_state_allowed=0
108 AUTH_THREAD_STATE_IN_X0_COMMON \tmp1, \tmp2, \tmp3, \tmp4, \tmp5, \el0_state_allowed
109.endmacro
110
111.macro AUTH_KERNEL_THREAD_STATE_IN_X0 tmp1, tmp2, tmp3, tmp4, tmp5, el0_state_allowed=0
112 AUTH_THREAD_STATE_IN_X0_COMMON \tmp1, \tmp2, \tmp3, \tmp4, \tmp5, \el0_state_allowed, SS64_KERNEL_PC, SS64_KERNEL_CPSR, SS64_KERNEL_X16, SS64_KERNEL_LR, ml_check_kernel_signed_state
113.endmacro
114/* END IGNORE CODESTYLE */
115
116/* vim: set ft=asm: */