xref: /xnu-11417.140.69/osfmk/arm64/platform_tests_asm.s (revision 43a90889846e00bfb5cf1d255cdc0a701a1e05a4)
1/*
2 * Copyright (c) 2018 Apple Inc. All rights reserved.
3 *
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
14 *
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
17 *
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
25 *
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27 */
28
29#include <arm64/asm.h>
30#include <pexpert/arm64/board_config.h>
31
32
33/**
34 * Raise a sync exception while LR is being used as a GPR.
35 */
36	.globl EXT(arm64_brk_lr_fault)
37	.globl EXT(arm64_brk_lr_gpr)
38LEXT(arm64_brk_lr_gpr)
39	ARM64_PROLOG
40	stp lr, xzr, [sp, #-0x10]!
41	mov lr, #0x80
42LEXT(arm64_brk_lr_fault)
43	brk		0xC470
44	ldp lr, xzr, [sp], 0x10
45	ret
46
47#if CONFIG_SPTM
48	.text
49	.align 2
50	.globl EXT(arm64_panic_lockdown_test_load)
51LEXT(arm64_panic_lockdown_test_load)
52	ARM64_PROLOG
53	ldr		x0, [x0]
54	ret
55
56	.globl EXT(arm64_panic_lockdown_test_gdbtrap)
57LEXT(arm64_panic_lockdown_test_gdbtrap)
58	ARM64_PROLOG
59	.long 0xe7ffdefe
60	ret
61
62#if __has_feature(ptrauth_calls)
63	.globl EXT(arm64_panic_lockdown_test_pac_brk_c470)
64LEXT(arm64_panic_lockdown_test_pac_brk_c470)
65	ARM64_PROLOG
66	brk		0xC470
67	ret
68
69	.globl EXT(arm64_panic_lockdown_test_pac_brk_c471)
70LEXT(arm64_panic_lockdown_test_pac_brk_c471)
71	ARM64_PROLOG
72	brk		0xC471
73	ret
74
75	.globl EXT(arm64_panic_lockdown_test_pac_brk_c472)
76LEXT(arm64_panic_lockdown_test_pac_brk_c472)
77	ARM64_PROLOG
78	brk		0xC472
79	ret
80
81	.globl EXT(arm64_panic_lockdown_test_pac_brk_c473)
82LEXT(arm64_panic_lockdown_test_pac_brk_c473)
83	ARM64_PROLOG
84	brk		0xC473
85	ret
86
87	.globl EXT(arm64_panic_lockdown_test_telemetry_brk_ff00)
88LEXT(arm64_panic_lockdown_test_telemetry_brk_ff00)
89	ARM64_PROLOG
90	brk		0xFF00
91	ret
92
93	.globl EXT(arm64_panic_lockdown_test_br_auth_fail)
94LEXT(arm64_panic_lockdown_test_br_auth_fail)
95	ARM64_PROLOG
96	braaz	x0
97	ret
98
99	.globl EXT(arm64_panic_lockdown_test_ldr_auth_fail)
100LEXT(arm64_panic_lockdown_test_ldr_auth_fail)
101	ARM64_PROLOG
102	ldraa	x0, [x0]
103	ret
104#endif /* ptrauth_calls  */
105
106#if __ARM_ARCH_8_6__
107	.globl EXT(arm64_panic_lockdown_test_fpac)
108LEXT(arm64_panic_lockdown_test_fpac)
109	ARM64_PROLOG
110	autiza	x0
111	ret
112#endif /* __ARM_ARCH_8_6__ */
113
114#if BTI_ENFORCED && CONFIG_BTI_TELEMETRY
115	.globl EXT(arm64_panic_lockdown_test_bti_telemetry)
116LEXT(arm64_panic_lockdown_test_bti_telemetry)
117	ARM64_PROLOG
118	/*
119	 * Trigger a BTI exception on the first instruction *after* the landing pad.
120	 */
1210:
122	nop
123	adr		x0, 0b
124	br		x0
125#endif /* BTI_ENFORCED && CONFIG_BTI_TELEMETRY */
126
127/*
128 * SP1 Panic Lockdown Tests
129 *
130 * These tests are somewhat complex because we're round tripping through an
131 * exception vector which is not intended to return. This means we'll lose a
132 * fair amount of state. The only thing we can really rely on being preserved is
133 * SP_EL0 as we stay on SP1 for the entire vector. As such, we need to save all
134 * callee saved registers here.
135 */
136
137.macro SAVE_CALLEE_REGISTERS
138	stp		x19, x20, [sp, #-(16 * 10)]!
139	stp		x21, x22, [sp, #0x10]
140	stp		x23, x24, [sp, #0x20]
141	stp		x25, x26, [sp, #0x30]
142	stp		x27, x28, [sp, #0x40]
143	stp		x29, x30, [sp, #0x50]
144	stp		q4, q5, [sp, #0x60]
145	stp		q6, q7, [sp, #0x80]
146.endmacro
147
148.macro LOAD_CALLEE_REGISTERS
149	ldp		x21, x22, [sp, #0x10]
150	ldp		x23, x24, [sp, #0x20]
151	ldp		x25, x26, [sp, #0x30]
152	ldp		x27, x28, [sp, #0x40]
153	ldp		x29, x30, [sp, #0x50]
154	ldp		q4, q5, [sp, #0x60]
155	ldp		q6, q7, [sp, #0x80]
156	ldp		x19, x20, [sp], #(16*10)
157.endmacro
158
159/**
160 * arm64_panic_lockdown_test_sp1_invalid_stack
161 *
162 * This test simulates a stack overflow/corruption
163 */
164	.globl EXT(arm64_panic_lockdown_test_sp1_invalid_stack)
165LEXT(arm64_panic_lockdown_test_sp1_invalid_stack)
166	ARM64_STACK_PROLOG
167	SAVE_CALLEE_REGISTERS
168	/* Spill the real SP1 to the stack and trash the old one */
169	msr		SPSel, #1
170	mov		x0, sp
171	mov		x1, #0
172	mov		sp, x1
173	msr		SPSel, #0
174	str		x0, [sp, #-16]!
175	/* Take an exception on SP1 but outside the critical region */
176	msr		SPSel, #1
177	b		EXT(arm64_panic_lockdown_test_pac_brk_c470)
178
179	.global EXT(arm64_panic_lockdown_test_sp1_invalid_stack_handler)
180LEXT(arm64_panic_lockdown_test_sp1_invalid_stack_handler)
181	ARM64_PROLOG
182	/* If we made it here, the test passed. Fix the system up. */
183	mrs		x0, SP_EL0
184	ldr		x1, [x0], #16
185	/* Restore the real SP1 */
186	mov		sp, x1
187	/* Update SP0 to prepare to return */
188	msr		SPSel, #0
189	mov		sp, x0
190	/* Return 1 to indicate success */
191	mov		x0, #1
192	LOAD_CALLEE_REGISTERS
193	ARM64_STACK_EPILOG
194
195/**
196 * arm64_panic_lockdown_test_sp1_exception_in_vector
197 * This test simulates an exception in the SP1 critical region
198 */
199	.globl EXT(arm64_panic_lockdown_test_sp1_exception_in_vector)
200LEXT(arm64_panic_lockdown_test_sp1_exception_in_vector)
201	ARM64_STACK_PROLOG
202	SAVE_CALLEE_REGISTERS
203	/* Trigger an exception inside the vector on SP1 */
204	msr		SPSel, #1
205	b		EXT(el1_sp1_synchronous_raise_exception_in_vector)
206
207	.globl EXT(arm64_panic_lockdown_test_sp1_exception_in_vector_handler)
208LEXT(arm64_panic_lockdown_test_sp1_exception_in_vector_handler)
209	ARM64_PROLOG
210	/* Return to SP0 */
211	msr		SPSel, #0
212	/* Return 1 to indicate success */
213	mov		x0, #1
214	LOAD_CALLEE_REGISTERS
215	ARM64_STACK_EPILOG
216
217#endif /* CONFIG_SPTM */
218
219#if BTI_ENFORCED
220	.text
221	.align 2
222	.global EXT(arm64_bti_test_jump_shim)
223LEXT(arm64_bti_test_jump_shim)
224	ARM64_PROLOG
225#if __has_feature(ptrauth_calls)
226	braaz	x0
227#else
228	br		x0
229#endif /* __has_feature(ptrauth_calls) */
230
231	.global EXT(arm64_bti_test_call_shim)
232LEXT(arm64_bti_test_call_shim)
233	ARM64_STACK_PROLOG
234	PUSH_FRAME
235#if __has_feature(ptrauth_calls)
236	blraaz	x0
237#else
238	blr		x0
239#endif /* __has_feature(ptrauth_calls) */
240	POP_FRAME
241	ARM64_STACK_EPILOG
242
243	.globl EXT(arm64_bti_test_func_with_no_landing_pad)
244LEXT(arm64_bti_test_func_with_no_landing_pad)
245	mov		x0, #1
246	ret
247
248	.globl EXT(arm64_bti_test_func_with_call_landing_pad)
249LEXT(arm64_bti_test_func_with_call_landing_pad)
250	bti		c
251	mov		x0, #2
252	ret
253
254	.globl EXT(arm64_bti_test_func_with_jump_landing_pad)
255LEXT(arm64_bti_test_func_with_jump_landing_pad)
256	bti		j
257	mov		x0, #3
258	ret
259
260	.globl EXT(arm64_bti_test_func_with_jump_call_landing_pad)
261LEXT(arm64_bti_test_func_with_jump_call_landing_pad)
262	bti		jc
263	mov		x0, #4
264	ret
265
266#if __has_feature(ptrauth_returns)
267	.globl EXT(arm64_bti_test_func_with_pac_landing_pad)
268LEXT(arm64_bti_test_func_with_pac_landing_pad)
269	pacibsp
270	mov		x0, #5
271	retab
272#endif /* __has_feature(ptrauth_returns) */
273#endif /* BTI_ENFORCED */
274