xref: /xnu-12377.81.4/osfmk/arm64/machine_routines_asm.h (revision 043036a2b3718f7f0be807e2870f8f47d3fa0796)
1 /*
2  * Copyright (c) 2019 Apple Inc. All rights reserved.
3  *
4  * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
5  *
6  * This file contains Original Code and/or Modifications of Original Code
7  * as defined in and that are subject to the Apple Public Source License
8  * Version 2.0 (the 'License'). You may not use this file except in
9  * compliance with the License. The rights granted to you under the License
10  * may not be used to create, or enable the creation or redistribution of,
11  * unlawful or unlicensed copies of an Apple operating system, or to
12  * circumvent, violate, or enable the circumvention or violation of, any
13  * terms of an Apple operating system software license agreement.
14  *
15  * Please obtain a copy of the License at
16  * http://www.opensource.apple.com/apsl/ and read it before using this file.
17  *
18  * The Original Code and all software distributed under the License are
19  * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20  * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21  * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22  * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23  * Please see the License for the specific language governing rights and
24  * limitations under the License.
25  *
26  * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27  */
28 
29 #include <arm64/proc_reg.h>
30 #include <pexpert/arm64/board_config.h>
31 #include "assym.s"
32 
33 #ifndef __ASSEMBLER__
34 #error "This header should only be used in .s files"
35 #endif
36 
37 /**
38  * Loads the following values from the thread_kernel_state pointer in x0:
39  *
40  * x1: $x0->ss_64.pc
41  * w2: $x0->ss_64.cpsr
42  * x16: $x0->ss_64.x16
43  * x17: $x0->ss_64.x17
44  * lr: $x0->ss_64.lr
45  *
46  * On CPUs with PAC support, this macro will auth the above values with ml_check_signed_state().
47  *
48  * tmp1 - scratch register 1
49  * tmp2 - scratch register 2
50  * tmp3 - scratch register 3
51  * tmp4 - scratch register 4
52  * tmp5 - scratch register 5
53  * tmp6 - scratch register 6
54  */
55 /* BEGIN IGNORE CODESTYLE */
56 .macro AUTH_THREAD_STATE_IN_X0 tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, el0_state_allowed=0
57 #if __has_feature(ptrauth_calls)
58 	msr		SPSel, #1
59 #endif
60 	ldr		w2, [x0, SS64_CPSR]
61 .if \el0_state_allowed==0
62 #if __has_feature(ptrauth_calls)
63 	// If testing for a canary CPSR value, ensure that we do not observe writes to other fields without it
64 	dmb		ld
65 #endif
66 .endif
67 	ldr		x1, [x0, SS64_PC]
68 	ldp		x16, x17, [x0, SS64_X16]
69 
70 #if defined(HAS_APPLE_PAC)
71 	// Save x3-x6 to preserve across call
72 	mov		\tmp3, x3
73 	mov		\tmp4, x4
74 	mov		\tmp5, x5
75 	mov		\tmp6, x6
76 
77 	/*
78 	* Arg0: The ARM context pointer (already in x0)
79 	* Arg1: PC to check (loaded above)
80 	* Arg2: CPSR to check (loaded above)
81 	* Arg3: the LR to check
82 	* Arg4: the X16 to check
83 	* Arg5: the X17 to check
84 	*
85 	* Stash saved state PC and CPSR in other registers to avoid reloading potentially unauthed
86 	* values from memory.  (ml_check_signed_state will clobber x1, x2, x16 and x17.)
87 	*/
88 	mov		\tmp1, x1
89 	mov		\tmp2, x2
90 	ldr		x3, [x0, SS64_LR]
91 	mov		x4, x16
92 	mov		x5, x17
93 	bl		EXT(ml_check_signed_state)
94 	mov		x1, \tmp1
95 	mov		x2, \tmp2
96 	mov		x16, x4
97 	mov		x17, x5
98 	msr		SPSel, #0
99 
100 .if \el0_state_allowed==0
101 	and		\tmp2, \tmp2, #PSR64_MODE_MASK
102 	cbnz		\tmp2, 1f
103 	bl		EXT(ml_auth_thread_state_invalid_cpsr)
104 1:
105 .endif
106 
107 	// LR was already loaded/authed earlier, if we reload it we might be loading a potentially unauthed value
108 	mov		lr, x3
109 	mov		x3, \tmp3
110 	mov		x4, \tmp4
111 	mov		x5, \tmp5
112 	mov		x6, \tmp6
113 #else
114 	ldr		lr, [x0, SS64_LR]
115 #endif /* defined(HAS_APPLE_PAC) */
116 .endmacro
117 
118 #if !__ARM_ARCH_8_6__
119 .set BRK_AUTDA_FAILURE, 0xc472
120 #endif
121 
122 /**
123  * Performs the appropriate SoC specific routine for a blended AUTDA operation.
124  * On success, falls through with stripped result in \value. Faults otherwise.
125  *
126  * value (inout): The register holding the PAC'd pointer to authenticate.
127  * Stripped result will be returned in this register.
128  * address (input, clobbered): The register holding the address from which
129  * \value was loaded. This forms a part of the diversification.
130  * diversifier (input): The diversifier constant to blend with \address.
131  */
132 .macro AUTDA_DIVERSIFIED value, address, diversifier
133 #if __has_feature(ptrauth_calls)
134 	/* Blend */
135 	movk		\address, \diversifier, lsl #48
136 	autda		\value, \address
137 #if !__ARM_ARCH_8_6__
138 	mov		\address, \value
139 	xpacd	\address
140 	cmp		\address, \value
141 	b.eq	Lautda_ok_\@
142 	brk		#BRK_AUTDA_FAILURE
143 Lautda_ok_\@:
144 #endif /* !__ARM_ARCH_8_6__ */
145 #endif /* __has_feature(ptrauth_calls) */
146 .endmacro
147 
148 /**
149  * Loads and auths the top of a thread's kernel stack pointer.
150  *
151  * Faults on auth failure.  src and dst can be the same register, as long as the
152  * caller doesn't mind clobbering the input.
153  *
154  * src (input): struct thread *
155  * dst (output): ptrauth_auth(src->machine.kstackptr)
156  * tmp: clobbered
157  */
158 .macro LOAD_KERN_STACK_TOP	dst, src, tmp
159 	add		\tmp, \src, TH_KSTACKPTR
160 	ldr		\dst, [\tmp]
161 	AUTDA_DIVERSIFIED \dst, address=\tmp, diversifier=TH_KSTACKPTR_DIVERSIFIER
162 .endmacro
163 
164 /**
165  * Loads and auths a thread's user context data.
166  *
167  * Faults on auth failure.  src and dst can be the same register, as long as the
168  * caller doesn't mind clobbering the input.
169  *
170  * src (input): struct thread *
171  * dst (output): ptrauth_auth(src->machine.upcb)
172  * tmp: clobbered
173  */
174 .macro LOAD_USER_PCB	dst, src, tmp
175 	add		\tmp, \src, TH_UPCB
176 	ldr		\dst, [\tmp]
177 	AUTDA_DIVERSIFIED \dst, address=\tmp, diversifier=TH_UPCB_DIVERSIFIER
178 .endmacro
179 
180 /**
181  * Loads and auths a thread's interrupt stack pointer.
182  *
183  * Faults on auth failure.  src and dst can be the same register, as long as the
184  * caller doesn't mind clobbering the input.
185  *
186  * src (input): struct thread *
187  * dst (output): ptrauth_auth(src->cpuDataP.istackptr)
188  * tmp: clobbered
189  */
190 .macro LOAD_INT_STACK_THREAD	dst, src, tmp
191 	ldr		\tmp, [\src, #ACT_CPUDATAP]
192 	LOAD_INT_STACK_CPU_DATA \dst, src=\tmp, tmp=\tmp
193 .endmacro
194 
195 /**
196  * Loads and auths a CPU's interrupt stack pointer.
197  *
198  * Faults on auth failure.
199  *
200  * src (input): cpu_data_t *
201  * dst (output): ptrauth_auth(cpuDataP.istackptr)
202  * tmp (clobber): Temporary register. Can be the same as \src if callers don't
203  * care to preserve it.
204  */
205 .macro LOAD_INT_STACK_CPU_DATA	dst, src, tmp
206 	add		\tmp, \src, #CPU_ISTACKPTR
207 	ldr		\dst, [\tmp]
208 	AUTDA_DIVERSIFIED \dst, address=\tmp, diversifier=CPU_ISTACKPTR_DIVERSIFIER
209 .endmacro
210 
211 /**
212  * Loads and auths a thread's exception stack pointer.
213  *
214  * Faults on auth failure.  src and dst can be the same register, as long as
215  * the caller doesn't mind clobbering the input.
216  *
217  * src (input): struct thread *
218  * dst (output): ptrauth_auth(src->cpuDataP.excepstackptr)
219  * tmp: clobbered
220  */
221 .macro LOAD_EXCEP_STACK_THREAD	dst, src, tmp
222 	ldr		\tmp, [\src, #ACT_CPUDATAP]
223 	LOAD_EXCEP_STACK_CPU_DATA \dst, src=\tmp, tmp=\tmp
224 .endmacro
225 
226 /**
227  * Loads and auths a CPU's exception stack pointer.
228  *
229  * Faults on auth failure.
230  *
231  * src (input): cpu_data_t *
232  * dst (output): ptrauth_auth(cpuDataP.excepstackptr)
233  * tmp (clobber): Temporary register. Can be the same as \src if callers don't
234  * care to preserve it.
235  */
236 .macro LOAD_EXCEP_STACK_CPU_DATA	dst, src, tmp
237 	add		\tmp, \src, #CPU_EXCEPSTACKPTR
238 	ldr		\dst, [\tmp]
239 	AUTDA_DIVERSIFIED \dst, address=\tmp, diversifier=CPU_EXCEPSTACKPTR_DIVERSIFIER
240 .endmacro
241 /* END IGNORE CODESTYLE */
242 /* vim: set ft=asm: */
243