xref: /xnu-10002.81.5/osfmk/arm64/machine_routines_asm.h (revision 5e3eaea39dcf651e66cb99ba7d70e32cc4a99587)
1 /*
2  * Copyright (c) 2019 Apple Inc. All rights reserved.
3  *
4  * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
5  *
6  * This file contains Original Code and/or Modifications of Original Code
7  * as defined in and that are subject to the Apple Public Source License
8  * Version 2.0 (the 'License'). You may not use this file except in
9  * compliance with the License. The rights granted to you under the License
10  * may not be used to create, or enable the creation or redistribution of,
11  * unlawful or unlicensed copies of an Apple operating system, or to
12  * circumvent, violate, or enable the circumvention or violation of, any
13  * terms of an Apple operating system software license agreement.
14  *
15  * Please obtain a copy of the License at
16  * http://www.opensource.apple.com/apsl/ and read it before using this file.
17  *
18  * The Original Code and all software distributed under the License are
19  * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20  * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21  * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22  * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23  * Please see the License for the specific language governing rights and
24  * limitations under the License.
25  *
26  * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27  */
28 
29 #include <arm64/proc_reg.h>
30 #include <pexpert/arm64/board_config.h>
31 #include "assym.s"
32 
33 #ifndef __ASSEMBLER__
34 #error "This header should only be used in .s files"
35 #endif
36 
37 /**
38  * Loads the following values from the thread_kernel_state pointer in x0:
39  *
40  * x1: $x0->ss_64.pc
41  * w2: $x0->ss_64.cpsr
42  * x16: $x0->ss_64.x16
43  * x17: $x0->ss_64.x17
44  * lr: $x0->ss_64.lr
45  *
46  * On CPUs with PAC support, this macro will auth the above values with ml_check_signed_state().
47  *
48  * tmp1 - scratch register 1
49  * tmp2 - scratch register 2
50  * tmp3 - scratch register 3
51  * tmp4 - scratch register 4
52  * tmp5 - scratch register 5
53  * tmp6 - scratch register 6
54  */
55 /* BEGIN IGNORE CODESTYLE */
56 .macro AUTH_THREAD_STATE_IN_X0 tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, el0_state_allowed=0
57 #if __has_feature(ptrauth_calls)
58 	msr		SPSel, #1
59 #endif
60 	ldr		w2, [x0, SS64_CPSR]
61 .if \el0_state_allowed==0
62 #if __has_feature(ptrauth_calls)
63 	// If testing for a canary CPSR value, ensure that we do not observe writes to other fields without it
64 	dmb		ld
65 #endif
66 .endif
67 	ldr		x1, [x0, SS64_PC]
68 	ldp		x16, x17, [x0, SS64_X16]
69 
70 #if defined(HAS_APPLE_PAC)
71 	// Save x3-x6 to preserve across call
72 	mov		\tmp3, x3
73 	mov		\tmp4, x4
74 	mov		\tmp5, x5
75 	mov		\tmp6, x6
76 
77 	/*
78 	* Arg0: The ARM context pointer (already in x0)
79 	* Arg1: PC to check (loaded above)
80 	* Arg2: CPSR to check (loaded above)
81 	* Arg3: the LR to check
82 	*
83 	* Stash saved state PC and CPSR in other registers to avoid reloading potentially unauthed
84 	* values from memory.  (ml_check_signed_state will clobber x1 and x2.)
85 	*/
86 	mov		\tmp1, x1
87 	mov		\tmp2, x2
88 	ldr		x3, [x0, SS64_LR]
89 	mov		x4, x16
90 	mov		x5, x17
91 	bl		EXT(ml_check_signed_state)
92 	mov		x1, \tmp1
93 	mov		x2, \tmp2
94 	msr		SPSel, #0
95 
96 .if \el0_state_allowed==0
97 	and		\tmp2, \tmp2, #PSR64_MODE_MASK
98 	cbnz		\tmp2, 1f
99 	bl		EXT(ml_auth_thread_state_invalid_cpsr)
100 1:
101 .endif
102 
103 	// LR was already loaded/authed earlier, if we reload it we might be loading a potentially unauthed value
104 	mov		lr, x3
105 	mov		x3, \tmp3
106 	mov		x4, \tmp4
107 	mov		x5, \tmp5
108 	mov		x6, \tmp6
109 #else
110 	ldr		lr, [x0, SS64_LR]
111 #endif /* defined(HAS_APPLE_PAC) */
112 .endmacro
113 
114 #if !__ARM_ARCH_8_6__
115 .set BRK_AUTDA_FAILURE, 0xc472
116 #endif
117 
118 /**
119  * Loads and auths the top of a thread's kernel stack pointer.
120  *
121  * Faults on auth failure.  src and dst can be the same register, as long as
122  * the caller doesn't mind clobbering the input.
123  *
124  * src (input): struct thread *
125  * dst (output): ptrauth_auth(src->machine.kstackptr)
126  * tmp: clobbered
127  */
128 .macro LOAD_KERN_STACK_TOP	dst, src, tmp
129 	add		\tmp, \src, TH_KSTACKPTR
130 	ldr		\dst, [\tmp]
131 #if __has_feature(ptrauth_calls)
132 	movk		\tmp, TH_KSTACKPTR_DIVERSIFIER, lsl #48
133 	autda		\dst, \tmp
134 #if !__ARM_ARCH_8_6__
135 	mov		\tmp, \dst
136 	xpacd		\tmp
137 	cmp		\tmp, \dst
138 	b.eq		Lkstackptr_ok_\@
139 	brk		BRK_AUTDA_FAILURE
140 Lkstackptr_ok_\@:
141 #endif /* !__ARM_ARCH_8_6__ */
142 #endif /* __has_feature(ptrauth_calls) */
143 .endmacro
144 
145 /**
146  * Loads and auths a thread's user context data.
147  *
148  * Faults on auth failure.  src and dst can be the same register, as long as
149  * the caller doesn't mind clobbering the input.
150  *
151  * src (input): struct thread *
152  * dst (output): ptrauth_auth(src->machine.upcb)
153  * tmp: clobbered
154  */
155 .macro LOAD_USER_PCB	dst, src, tmp
156 	add		\tmp, \src, TH_UPCB
157 	ldr		\dst, [\tmp]
158 #if __has_feature(ptrauth_calls)
159 	movk		\tmp, TH_UPCB_DIVERSIFIER, lsl #48
160 	autda		\dst, \tmp
161 #if !__ARM_ARCH_8_6__
162 	mov		\tmp, \dst
163 	xpacd		\tmp
164 	cmp		\tmp, \dst
165 	b.eq		Lupcb_ok_\@
166 	brk		BRK_AUTDA_FAILURE
167 Lupcb_ok_\@:
168 #endif /* !__ARM_ARCH_8_6__ */
169 #endif /* __has_feature(ptrauth_calls) */
170 .endmacro
171 
172 /**
173  * Loads and auths a thread's interrupt stack pointer.
174  *
175  * Faults on auth failure.  src and dst can be the same register, as long as
176  * the caller doesn't mind clobbering the input.
177  *
178  * src (input): struct thread *
179  * dst (output): ptrauth_auth(src->cpuDataP.istackptr)
180  * tmp: clobbered
181  */
182 .macro LOAD_INT_STACK	dst, src, tmp
183 	ldr		\tmp, [\src, ACT_CPUDATAP]
184 	add		\tmp, \tmp, CPU_ISTACKPTR
185 	ldr		\dst, [\tmp]
186 #if __has_feature(ptrauth_calls)
187 	movk		\tmp, CPU_ISTACKPTR_DIVERSIFIER, lsl #48
188 	autda		\dst, \tmp
189 #if !__ARM_ARCH_8_6__
190 	mov		\tmp, \dst
191 	xpacd		\tmp
192 	cmp		\tmp, \dst
193 	b.eq		Listackptr_ok_\@
194 	brk		BRK_AUTDA_FAILURE
195 Listackptr_ok_\@:
196 #endif /* !__ARM_ARCH_8_6__ */
197 #endif /* __has_feature(ptrauth_calls) */
198 .endmacro
199 /* END IGNORE CODESTYLE */
200 
201 /* vim: set ft=asm: */
202