xref: /xnu-12377.81.4/osfmk/arm64/platform_tests_asm.s (revision 043036a2b3718f7f0be807e2870f8f47d3fa0796)
1/*
2 * Copyright (c) 2018 Apple Inc. All rights reserved.
3 *
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
14 *
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
17 *
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
25 *
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27 */
28
29#include <arm64/asm.h>
30#include <pexpert/arm64/board_config.h>
31
32.macro SAVE_CALLEE_REGISTERS
33	stp		x19, x20, [sp, #-(16 * 10)]!
34	stp		x21, x22, [sp, #0x10]
35	stp		x23, x24, [sp, #0x20]
36	stp		x25, x26, [sp, #0x30]
37	stp		x27, x28, [sp, #0x40]
38	stp		x29, x30, [sp, #0x50]
39	stp		q4, q5, [sp, #0x60]
40	stp		q6, q7, [sp, #0x80]
41.endmacro
42
43.macro LOAD_CALLEE_REGISTERS
44	ldp		x21, x22, [sp, #0x10]
45	ldp		x23, x24, [sp, #0x20]
46	ldp		x25, x26, [sp, #0x30]
47	ldp		x27, x28, [sp, #0x40]
48	ldp		x29, x30, [sp, #0x50]
49	ldp		q4, q5, [sp, #0x60]
50	ldp		q6, q7, [sp, #0x80]
51	ldp		x19, x20, [sp], #(16*10)
52.endmacro
53
54
55/**
56 * Raise a sync exception while LR is being used as a GPR.
57 */
58	.globl EXT(arm64_brk_lr_fault)
59	.globl EXT(arm64_brk_lr_gpr)
60LEXT(arm64_brk_lr_gpr)
61	ARM64_PROLOG
62	stp lr, xzr, [sp, #-0x10]!
63	mov lr, #0x80
64LEXT(arm64_brk_lr_fault)
65	brk		0xC470
66	ldp lr, xzr, [sp], 0x10
67	ret
68
69#if CONFIG_SPTM
70	.text
71	.align 2
72	.globl EXT(arm64_panic_lockdown_test_load)
73LEXT(arm64_panic_lockdown_test_load)
74	ARM64_PROLOG
75	ldr		x0, [x0]
76	ret
77
78	.globl EXT(arm64_panic_lockdown_test_gdbtrap)
79LEXT(arm64_panic_lockdown_test_gdbtrap)
80	ARM64_PROLOG
81	.long 0xe7ffdefe
82	ret
83
84#if __has_feature(ptrauth_calls)
85	.globl EXT(arm64_panic_lockdown_test_pac_brk_c470)
86LEXT(arm64_panic_lockdown_test_pac_brk_c470)
87	ARM64_PROLOG
88	brk		0xC470
89	ret
90
91	.globl EXT(arm64_panic_lockdown_test_pac_brk_c471)
92LEXT(arm64_panic_lockdown_test_pac_brk_c471)
93	ARM64_PROLOG
94	brk		0xC471
95	ret
96
97	.globl EXT(arm64_panic_lockdown_test_pac_brk_c472)
98LEXT(arm64_panic_lockdown_test_pac_brk_c472)
99	ARM64_PROLOG
100	brk		0xC472
101	ret
102
103	.globl EXT(arm64_panic_lockdown_test_pac_brk_c473)
104LEXT(arm64_panic_lockdown_test_pac_brk_c473)
105	ARM64_PROLOG
106	brk		0xC473
107	ret
108
109	.globl EXT(arm64_panic_lockdown_test_telemetry_brk_ff00)
110LEXT(arm64_panic_lockdown_test_telemetry_brk_ff00)
111	ARM64_PROLOG
112	brk		0xFF00
113	ret
114
115	.globl EXT(arm64_panic_lockdown_test_br_auth_fail)
116LEXT(arm64_panic_lockdown_test_br_auth_fail)
117	ARM64_PROLOG
118	braaz	x0
119	ret
120
121	.globl EXT(arm64_panic_lockdown_test_ldr_auth_fail)
122LEXT(arm64_panic_lockdown_test_ldr_auth_fail)
123	ARM64_PROLOG
124	ldraa	x0, [x0]
125	ret
126#endif /* ptrauth_calls  */
127
128#if __ARM_ARCH_8_6__
129	.globl EXT(arm64_panic_lockdown_test_fpac)
130LEXT(arm64_panic_lockdown_test_fpac)
131	ARM64_PROLOG
132	autiza	x0
133	ret
134#endif /* __ARM_ARCH_8_6__ */
135
136/*
137 * SP1 Panic Lockdown Tests
138 *
139 * These tests are somewhat complex because we're round tripping through an
140 * exception vector which is not intended to return. This means we'll lose a
141 * fair amount of state. The only thing we can really rely on being preserved is
142 * SP_EL0 as we stay on SP1 for the entire vector. As such, we need to save all
143 * callee saved registers here.
144 */
145
146/**
147 * arm64_panic_lockdown_test_sp1_invalid_stack
148 *
149 * This test simulates a stack overflow/corruption
150 */
151	.globl EXT(arm64_panic_lockdown_test_sp1_invalid_stack)
152LEXT(arm64_panic_lockdown_test_sp1_invalid_stack)
153	ARM64_STACK_PROLOG
154	SAVE_CALLEE_REGISTERS
155	/* Spill the real SP1 to the stack and trash the old one */
156	msr		SPSel, #1
157	mov		x0, sp
158	mov		x1, #0
159	mov		sp, x1
160	msr		SPSel, #0
161	str		x0, [sp, #-16]!
162	/* Take an exception on SP1 but outside the critical region */
163	msr		SPSel, #1
164	b		EXT(arm64_panic_lockdown_test_pac_brk_c470)
165
166	.global EXT(arm64_panic_lockdown_test_sp1_invalid_stack_handler)
167LEXT(arm64_panic_lockdown_test_sp1_invalid_stack_handler)
168	ARM64_PROLOG
169	/* If we made it here, the test passed. Fix the system up. */
170	mrs		x0, SP_EL0
171	ldr		x1, [x0], #16
172	/* Restore the real SP1 */
173	mov		sp, x1
174	/* Update SP0 to prepare to return */
175	msr		SPSel, #0
176	mov		sp, x0
177	/* Return 1 to indicate success */
178	mov		x0, #1
179	LOAD_CALLEE_REGISTERS
180	ARM64_STACK_EPILOG EXT(arm64_panic_lockdown_test_sp1_invalid_stack)
181
182/**
183 * arm64_panic_lockdown_test_sp1_exception_in_vector
184 * This test simulates an exception in the SP1 critical region
185 */
186	.globl EXT(arm64_panic_lockdown_test_sp1_exception_in_vector)
187LEXT(arm64_panic_lockdown_test_sp1_exception_in_vector)
188	ARM64_STACK_PROLOG
189	SAVE_CALLEE_REGISTERS
190	/* Trigger an exception inside the vector on SP1 */
191	msr		SPSel, #1
192	b		EXT(el1_sp1_synchronous_raise_exception_in_vector)
193
194	.globl EXT(arm64_panic_lockdown_test_sp1_exception_in_vector_handler)
195LEXT(arm64_panic_lockdown_test_sp1_exception_in_vector_handler)
196	ARM64_PROLOG
197	/* Return to SP0 */
198	msr		SPSel, #0
199	/* Return 1 to indicate success */
200	mov		x0, #1
201	LOAD_CALLEE_REGISTERS
202	ARM64_STACK_EPILOG EXT(arm64_panic_lockdown_test_sp1_exception_in_vector)
203
204#endif /* CONFIG_SPTM */
205
206#if BTI_ENFORCED
207	.text
208	.align 2
209	.global EXT(arm64_bti_test_jump_shim)
210LEXT(arm64_bti_test_jump_shim)
211	ARM64_PROLOG
212#if __has_feature(ptrauth_calls)
213	braaz	x0
214#else
215	br		x0
216#endif /* __has_feature(ptrauth_calls) */
217
218	.global EXT(arm64_bti_test_call_shim)
219LEXT(arm64_bti_test_call_shim)
220	ARM64_STACK_PROLOG
221	PUSH_FRAME
222#if __has_feature(ptrauth_calls)
223	blraaz	x0
224#else
225	blr		x0
226#endif /* __has_feature(ptrauth_calls) */
227	POP_FRAME
228	ARM64_STACK_EPILOG EXT(arm64_bti_test_call_shim)
229
230	.globl EXT(arm64_bti_test_func_with_no_landing_pad)
231LEXT(arm64_bti_test_func_with_no_landing_pad)
232	mov		x0, #1
233	ret
234
235	.globl EXT(arm64_bti_test_func_with_call_landing_pad)
236LEXT(arm64_bti_test_func_with_call_landing_pad)
237	bti		c
238	mov		x0, #2
239	ret
240
241	.globl EXT(arm64_bti_test_func_with_jump_landing_pad)
242LEXT(arm64_bti_test_func_with_jump_landing_pad)
243	bti		j
244	mov		x0, #3
245	ret
246
247	.globl EXT(arm64_bti_test_func_with_jump_call_landing_pad)
248LEXT(arm64_bti_test_func_with_jump_call_landing_pad)
249	bti		jc
250	mov		x0, #4
251	ret
252
253#if __has_feature(ptrauth_returns)
254	.globl EXT(arm64_bti_test_func_with_pac_landing_pad)
255LEXT(arm64_bti_test_func_with_pac_landing_pad)
256	pacibsp
257	mov		x0, #5
258	retab
259#endif /* __has_feature(ptrauth_returns) */
260#endif /* BTI_ENFORCED */
261
262
263