xref: /xnu-8020.140.41/osfmk/arm64/machine_routines_asm.h (revision 27b03b360a988dfd3dfdf34262bb0042026747cc)
1 /*
2  * Copyright (c) 2019 Apple Inc. All rights reserved.
3  *
4  * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
5  *
6  * This file contains Original Code and/or Modifications of Original Code
7  * as defined in and that are subject to the Apple Public Source License
8  * Version 2.0 (the 'License'). You may not use this file except in
9  * compliance with the License. The rights granted to you under the License
10  * may not be used to create, or enable the creation or redistribution of,
11  * unlawful or unlicensed copies of an Apple operating system, or to
12  * circumvent, violate, or enable the circumvention or violation of, any
13  * terms of an Apple operating system software license agreement.
14  *
15  * Please obtain a copy of the License at
16  * http://www.opensource.apple.com/apsl/ and read it before using this file.
17  *
18  * The Original Code and all software distributed under the License are
19  * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20  * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21  * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22  * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23  * Please see the License for the specific language governing rights and
24  * limitations under the License.
25  *
26  * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27  */
28 
29 #include <arm64/proc_reg.h>
30 #include <pexpert/arm64/board_config.h>
31 #include "assym.s"
32 
33 #ifndef __ASSEMBLER__
34 #error "This header should only be used in .s files"
35 #endif
36 
37 /**
38  * Loads the following values from the thread_kernel_state pointer in x0:
39  *
40  * x1: $x0->ss_64.pc
41  * w2: $x0->ss_64.cpsr
42  * x16: $x0->ss_64.x16
43  * x17: $x0->ss_64.x17
44  * lr: $x0->ss_64.lr
45  *
46  * On CPUs with PAC support, this macro will auth the above values with ml_check_signed_state().
47  *
48  * tmp1 - scratch register 1
49  * tmp2 - scratch register 2
50  * tmp3 - scratch register 3
51  * tmp4 - scratch register 4
52  * tmp5 - scratch register 5
53  */
54 /* BEGIN IGNORE CODESTYLE */
55 .macro AUTH_THREAD_STATE_IN_X0 tmp1, tmp2, tmp3, tmp4, tmp5, el0_state_allowed=0
56 #if __has_feature(ptrauth_calls)
57 	msr		SPSel, #1
58 #endif
59 	ldr		w2, [x0, SS64_CPSR]
60 .if \el0_state_allowed==0
61 #if __has_feature(ptrauth_calls)
62 	// If testing for a canary CPSR value, ensure that we do not observe writes to other fields without it
63 	dmb		ld
64 #endif
65 .endif
66 	ldr		x1, [x0, SS64_PC]
67 	ldp		x16, x17, [x0, SS64_X16]
68 
69 #if defined(HAS_APPLE_PAC)
70 	// Save x3-x5 to preserve across call
71 	mov		\tmp3, x3
72 	mov		\tmp4, x4
73 	mov		\tmp5, x5
74 
75 	/*
76 	* Arg0: The ARM context pointer (already in x0)
77 	* Arg1: PC to check (loaded above)
78 	* Arg2: CPSR to check (loaded above)
79 	* Arg3: the LR to check
80 	*
81 	* Stash saved state PC and CPSR in other registers to avoid reloading potentially unauthed
82 	* values from memory.  (ml_check_signed_state will clobber x1 and x2.)
83 	*/
84 	mov		\tmp1, x1
85 	mov		\tmp2, x2
86 	ldr		x3, [x0, SS64_LR]
87 	mov		x4, x16
88 	mov		x5, x17
89 	bl		EXT(ml_check_signed_state)
90 	mov		x1, \tmp1
91 	mov		x2, \tmp2
92 	msr		SPSel, #0
93 
94 .if \el0_state_allowed==0
95 	and		\tmp2, \tmp2, #PSR64_MODE_MASK
96 	cbnz		\tmp2, 1f
97 	bl		EXT(ml_auth_thread_state_invalid_cpsr)
98 1:
99 .endif
100 
101 	// LR was already loaded/authed earlier, if we reload it we might be loading a potentially unauthed value
102 	mov		lr, x3
103 	mov		x3, \tmp3
104 	mov		x4, \tmp4
105 	mov		x5, \tmp5
106 #else
107 	ldr		lr, [x0, SS64_LR]
108 #endif /* defined(HAS_APPLE_PAC) */
109 .endmacro
110 /* END IGNORE CODESTYLE */
111 
112 /* vim: set ft=asm: */
113