xref: /xnu-12377.1.9/osfmk/arm64/exception_asm.h (revision f6217f891ac0bb64f3d375211650a4c1ff8ca1ea)
1 /*
2  * Copyright (c) 2018 Apple Inc. All rights reserved.
3  *
4  * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
5  *
6  * This file contains Original Code and/or Modifications of Original Code
7  * as defined in and that are subject to the Apple Public Source License
8  * Version 2.0 (the 'License'). You may not use this file except in
9  * compliance with the License. The rights granted to you under the License
10  * may not be used to create, or enable the creation or redistribution of,
11  * unlawful or unlicensed copies of an Apple operating system, or to
12  * circumvent, violate, or enable the circumvention or violation of, any
13  * terms of an Apple operating system software license agreement.
14  *
15  * Please obtain a copy of the License at
16  * http://www.opensource.apple.com/apsl/ and read it before using this file.
17  *
18  * The Original Code and all software distributed under the License are
19  * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20  * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21  * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22  * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23  * Please see the License for the specific language governing rights and
24  * limitations under the License.
25  *
26  * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27  */
28 
29 #include <arm64/pac_asm.h>
30 #include <pexpert/arm64/board_config.h>
31 #include "assym.s"
32 
33 
34 #if XNU_MONITOR
35 /*
36  * Exit path defines; for controlling PPL -> kernel transitions.
37  * These should fit within a 32-bit integer, as the PPL trampoline packs them into a 32-bit field.
38  */
39 #define PPL_EXIT_DISPATCH   0 /* This is a clean exit after a PPL request. */
40 #define PPL_EXIT_PANIC_CALL 1 /* The PPL has called panic. */
41 #define PPL_EXIT_BAD_CALL   2 /* The PPL request failed. */
42 #define PPL_EXIT_EXCEPTION  3 /* The PPL took an exception. */
43 
44 #define KERNEL_MODE_ELR      ELR_GL11
45 #define KERNEL_MODE_FAR      FAR_GL11
46 #define KERNEL_MODE_ESR      ESR_GL11
47 #define KERNEL_MODE_SPSR     SPSR_GL11
48 #define KERNEL_MODE_VBAR     VBAR_GL11
49 #define KERNEL_MODE_TPIDR    TPIDR_GL11
50 
51 #define GUARDED_MODE_ELR     ELR_EL1
52 #define GUARDED_MODE_FAR     FAR_EL1
53 #define GUARDED_MODE_ESR     ESR_EL1
54 #define GUARDED_MODE_SPSR    SPSR_EL1
55 #define GUARDED_MODE_VBAR    VBAR_EL1
56 #define GUARDED_MODE_TPIDR   TPIDR_EL1
57 
58 /*
59  * LOAD_PMAP_CPU_DATA
60  *
61  * Loads the PPL per-CPU data array entry for the current CPU.
62  *   arg0 - Address of the PPL per-CPU data is returned through this
63  *   arg1 - Scratch register
64  *   arg2 - Scratch register
65  *
66  */
67 .macro LOAD_PMAP_CPU_DATA
68 	/* Get the CPU ID. */
69 	mrs		$0, MPIDR_EL1
70 	ubfx	$1, $0, MPIDR_AFF1_SHIFT, MPIDR_AFF1_WIDTH
71 	adrp	$2, EXT(cluster_offsets)@page
72 	add		$2, $2, EXT(cluster_offsets)@pageoff
73 	ldr		$1, [$2, $1, lsl #3]
74 
75 	and		$0, $0, MPIDR_AFF0_MASK
76 	add		$0, $0, $1
77 
78 	/* Get the PPL CPU data array. */
79 	adrp	$1, EXT(pmap_cpu_data_array)@page
80 	add		$1, $1, EXT(pmap_cpu_data_array)@pageoff
81 
82 	/*
83 	 * Sanity check the CPU ID (this is not a panic because this pertains to
84 	 * the hardware configuration; this should only fail if our
85 	 * understanding of the hardware is incorrect).
86 	 */
87 	cmp		$0, MAX_CPUS
88 	b.hs	.
89 
90 	mov		$2, PMAP_CPU_DATA_ARRAY_ENTRY_SIZE
91 	/* Get the PPL per-CPU data. */
92 	madd	$0, $0, $2, $1
93 .endmacro
94 
95 /*
96  * GET_PMAP_CPU_DATA
97  *
98  * Retrieves the PPL per-CPU data for the current CPU.
99  *   arg0 - Address of the PPL per-CPU data is returned through this
100  *   arg1 - Scratch register
101  *   arg2 - Scratch register
102  *
103  */
104 .macro GET_PMAP_CPU_DATA
105 	LOAD_PMAP_CPU_DATA $0, $1, $2
106 .endmacro
107 
108 #endif /* XNU_MONITOR */
109 
110 /*
111  * INIT_SAVED_STATE_FLAVORS
112  *
113  * Initializes the saved state flavors of a new saved state structure
114  *  arg0 - saved state pointer
115  *  arg1 - 32-bit scratch reg
116  *  arg2 - 32-bit scratch reg
117  */
118 .macro INIT_SAVED_STATE_FLAVORS
119 	mov		$1, ARM_SAVED_STATE64                                   // Set saved state to 64-bit flavor
120 	mov		$2, ARM_SAVED_STATE64_COUNT
121 	stp		$1, $2, [$0, SS_FLAVOR]
122 	mov		$1, ARM_NEON_SAVED_STATE64                              // Set neon state to 64-bit flavor
123 	str		$1, [$0, NS_FLAVOR]
124 	mov		$1, ARM_NEON_SAVED_STATE64_COUNT
125 	str		$1, [$0, NS_COUNT]
126 .endmacro
127 
128 /*
129  * SPILL_REGISTERS
130  *
131  * Spills the current set of registers (excluding x0, x1, sp as well as x2, x3
132  * in KERNEL_MODE) to the specified save area.
133  *
134  * On CPUs with PAC, the kernel "A" keys are used to create a thread signature.
135  * These keys are deliberately kept loaded into the CPU for later kernel use.
136  *
137  *   arg0 - KERNEL_MODE or HIBERNATE_MODE
138  *   arg1 - ADD_THREAD_SIGNATURE or POISON_THREAD_SIGNATURE
139  *   x0 - Address of the save area
140  *   x25 - Return the value of FPCR
141  */
142 #define KERNEL_MODE 0
143 #define HIBERNATE_MODE 1
144 
145 /** When set, the thread will be given an invalid thread signature */
146 #define SPILL_REGISTERS_OPTION_POISON_THREAD_SIGNATURE_SHIFT	(0)
147 #define SPILL_REGISTERS_OPTION_POISON_THREAD_SIGNATURE \
148 	(1 << SPILL_REGISTERS_OPTION_POISON_THREAD_SIGNATURE_SHIFT)
149 /** When set, ELR and FAR will not be spilled */
150 #define SPILL_REGISTERS_OPTION_DONT_SPILL_ELR_FAR_SHIFT			(1)
151 #define SPILL_REGISTERS_OPTION_DONT_SPILL_ELR_FAR \
152 	(1 << SPILL_REGISTERS_OPTION_DONT_SPILL_ELR_FAR_SHIFT)
153 
154 #define FLEH_DISPATCH64_OPTION_SYNC_EXCEPTION 0
155 #if CONFIG_SPTM
156 #undef FLEH_DISPATCH64_OPTION_SYNC_EXCEPTION
157 #define FLEH_DISPATCH64_OPTION_SYNC_EXCEPTION \
158 	(SPILL_REGISTERS_OPTION_DONT_SPILL_ELR_FAR)
159 #endif /* CONFIG_SPTM */
160 
161 #define FLEH_DISPATCH64_OPTION_FATAL_EXCEPTION \
162 	(SPILL_REGISTERS_OPTION_POISON_THREAD_SIGNATURE)
163 
164 #define FLEH_DISPATCH64_OPTION_FATAL_SYNC_EXCEPTION \
165 	(FLEH_DISPATCH64_OPTION_FATAL_EXCEPTION | \
166 	 FLEH_DISPATCH64_OPTION_SYNC_EXCEPTION)
167 
168 #define FLEH_DISPATCH64_OPTION_NONE 0
169 
170 .macro SPILL_REGISTERS	mode options_register=
171 	/* Spill remaining GPRs */
172 	.if \mode != KERNEL_MODE
173 	stp		x2, x3, [x0, SS64_X2]
174 	.endif
175 	stp		x4, x5, [x0, SS64_X4]
176 	stp		x6, x7, [x0, SS64_X6]
177 	stp		x8, x9, [x0, SS64_X8]
178 	stp		x10, x11, [x0, SS64_X10]
179 	stp		x12, x13, [x0, SS64_X12]
180 	stp		x14, x15, [x0, SS64_X14]
181 	stp		x16, x17, [x0, SS64_X16]
182 	stp		x18, x19, [x0, SS64_X18]
183 	stp		x20, x21, [x0, SS64_X20]
184 	stp		x22, x23, [x0, SS64_X22]
185 	stp		x24, x25, [x0, SS64_X24]
186 	stp		x26, x27, [x0, SS64_X26]
187 	stp		x28, fp,  [x0, SS64_X28]
188 	str		lr, [x0, SS64_LR]
189 
190 	/* Save arm_neon_saved_state64 */
191 	stp		q0, q1, [x0, NS64_Q0]
192 	stp		q2, q3, [x0, NS64_Q2]
193 	stp		q4, q5, [x0, NS64_Q4]
194 	stp		q6, q7, [x0, NS64_Q6]
195 	stp		q8, q9, [x0, NS64_Q8]
196 	stp		q10, q11, [x0, NS64_Q10]
197 	stp		q12, q13, [x0, NS64_Q12]
198 	stp		q14, q15, [x0, NS64_Q14]
199 	stp		q16, q17, [x0, NS64_Q16]
200 	stp		q18, q19, [x0, NS64_Q18]
201 	stp		q20, q21, [x0, NS64_Q20]
202 	stp		q22, q23, [x0, NS64_Q22]
203 	stp		q24, q25, [x0, NS64_Q24]
204 	stp		q26, q27, [x0, NS64_Q26]
205 	stp		q28, q29, [x0, NS64_Q28]
206 	stp		q30, q31, [x0, NS64_Q30]
207 	mrs		x24, FPSR
208 	str		w24, [x0, NS64_FPSR]
209 	mrs		x25, FPCR
210 	str		w25, [x0, NS64_FPCR]
211 Lsave_neon_state_done_\@:
212 
213 	mrs		x22, ELR_EL1                                                     // Get exception link register
214 	mrs		x23, SPSR_EL1                                                   // Load CPSR into var reg x23
215 
216 #if defined(HAS_APPLE_PAC)
217 	.if \mode != HIBERNATE_MODE
218 
219 .ifnb \options_register
220 	tbnz	\options_register, SPILL_REGISTERS_OPTION_POISON_THREAD_SIGNATURE_SHIFT, Lspill_registers_do_poison_\@
221 .endif /* options_register */
222 
223 	/* Save x1 and LR to preserve across call */
224 	mov		x21, x1
225 	mov		x20, lr
226 
227 	/*
228 	 * Create thread state signature
229 	 *
230 	 * Arg0: The ARM context pointer
231 	 * Arg1: The PC value to sign
232 	 * Arg2: The CPSR value to sign
233 	 * Arg3: The LR value to sign
234 	 * Arg4: The X16 value to sign
235 	 * Arg5: The X17 value to sign
236 	 */
237 	mov		x1, x22
238 	mov		w2, w23
239 	mov		x3, x20
240 	mov		x4, x16
241 	mov		x5, x17
242 
243 	mrs		x19, SPSel
244 	msr		SPSel, #1
245 	bl		_ml_sign_thread_state
246 	/* ml_sign_thread_state has special ABI, overwrites x1, x2, x17 */
247 	mov		x17, x5
248 	msr		SPSel, x19
249 	mov		lr, x20
250 	mov		x1, x21
251 .ifnb \options_register
252 	b		Lspill_registers_poison_continue_\@
253 
254 Lspill_registers_do_poison_\@:
255 	mov		x21, #-1
256 	str		x21, [x0, SS64_JOPHASH]
257 
258 Lspill_registers_poison_continue_\@:
259 .endif /* options_register */
260 
261 	.endif
262 #endif /* defined(HAS_APPLE_PAC) */
263 
264 	mrs		x20, FAR_EL1
265 	mrs		x21, ESR_EL1
266 
267 .ifnb \options_register
268 	tbnz	\options_register, SPILL_REGISTERS_OPTION_DONT_SPILL_ELR_FAR_SHIFT, Lspill_registers_skip_elr_far_\@
269 .endif /* options_register != NONE */
270 
271 	str		x20, [x0, SS64_FAR]
272 	str		x22, [x0, SS64_PC]
273 
274 .ifnb \options_register
275 Lspill_registers_skip_elr_far_\@:
276 .endif /* options_register != NONE */
277 	str		x21, [x0, SS64_ESR]
278 	str		w23, [x0, SS64_CPSR]
279 .endmacro
280 
281 .macro DEADLOOP
282 	b	.
283 .endmacro
284 
285 /**
286  * Reloads SP with the current thread's interrupt stack.
287  *
288  * SP0 is expected to already be selected.  Clobbers x1 and tmp.
289  */
290 .macro SWITCH_TO_INT_STACK	tmp
291 	mrs		x1, TPIDR_EL1
292 	LOAD_INT_STACK_THREAD	dst=x1, src=x1, tmp=\tmp
293 	mov		sp, x1			// Set the stack pointer to the interrupt stack
294 .endmacro
295 
296 #if HAS_ARM_FEAT_SME
297 /*
298  * LOAD_OR_STORE_Z_P_REGISTERS - loads or stores the Z and P register files
299  *
300  * instr: ldr or str
301  * svl_b: register containing SVL_B
302  * ss: register pointing to save area of size 34 * SVL_B (clobbered)
303  */
304 .macro LOAD_OR_STORE_Z_P_REGISTERS	instr, svl_b, ss
305 	\instr	z0, [\ss, #0, mul vl]
306 	\instr	z1, [\ss, #1, mul vl]
307 	\instr	z2, [\ss, #2, mul vl]
308 	\instr	z3, [\ss, #3, mul vl]
309 	\instr	z4, [\ss, #4, mul vl]
310 	\instr	z5, [\ss, #5, mul vl]
311 	\instr	z6, [\ss, #6, mul vl]
312 	\instr	z7, [\ss, #7, mul vl]
313 	\instr	z8, [\ss, #8, mul vl]
314 	\instr	z9, [\ss, #9, mul vl]
315 	\instr	z10, [\ss, #10, mul vl]
316 	\instr	z11, [\ss, #11, mul vl]
317 	\instr	z12, [\ss, #12, mul vl]
318 	\instr	z13, [\ss, #13, mul vl]
319 	\instr	z14, [\ss, #14, mul vl]
320 	\instr	z15, [\ss, #15, mul vl]
321 	\instr	z16, [\ss, #16, mul vl]
322 	\instr	z17, [\ss, #17, mul vl]
323 	\instr	z18, [\ss, #18, mul vl]
324 	\instr	z19, [\ss, #19, mul vl]
325 	\instr	z20, [\ss, #20, mul vl]
326 	\instr	z21, [\ss, #21, mul vl]
327 	\instr	z22, [\ss, #22, mul vl]
328 	\instr	z23, [\ss, #23, mul vl]
329 	\instr	z24, [\ss, #24, mul vl]
330 	\instr	z25, [\ss, #25, mul vl]
331 	\instr	z26, [\ss, #26, mul vl]
332 	\instr	z27, [\ss, #27, mul vl]
333 	\instr	z28, [\ss, #28, mul vl]
334 	\instr	z29, [\ss, #29, mul vl]
335 	\instr	z30, [\ss, #30, mul vl]
336 	\instr	z31, [\ss, #31, mul vl]
337 
338 	add		\ss, \ss, \svl_b, lsl #5
339 	\instr	p0, [\ss, #0, mul vl]
340 	\instr	p1, [\ss, #1, mul vl]
341 	\instr	p2, [\ss, #2, mul vl]
342 	\instr	p3, [\ss, #3, mul vl]
343 	\instr	p4, [\ss, #4, mul vl]
344 	\instr	p5, [\ss, #5, mul vl]
345 	\instr	p6, [\ss, #6, mul vl]
346 	\instr	p7, [\ss, #7, mul vl]
347 	\instr	p8, [\ss, #8, mul vl]
348 	\instr	p9, [\ss, #9, mul vl]
349 	\instr	p10, [\ss, #10, mul vl]
350 	\instr	p11, [\ss, #11, mul vl]
351 	\instr	p12, [\ss, #12, mul vl]
352 	\instr	p13, [\ss, #13, mul vl]
353 	\instr	p14, [\ss, #14, mul vl]
354 	\instr	p15, [\ss, #15, mul vl]
355 .endmacro
356 #endif /* HAS_ARM_FEAT_SME */
357