1 /* 2 * Copyright (c) 2019 Apple Inc. All rights reserved. 3 * 4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@ 5 * 6 * This file contains Original Code and/or Modifications of Original Code 7 * as defined in and that are subject to the Apple Public Source License 8 * Version 2.0 (the 'License'). You may not use this file except in 9 * compliance with the License. The rights granted to you under the License 10 * may not be used to create, or enable the creation or redistribution of, 11 * unlawful or unlicensed copies of an Apple operating system, or to 12 * circumvent, violate, or enable the circumvention or violation of, any 13 * terms of an Apple operating system software license agreement. 14 * 15 * Please obtain a copy of the License at 16 * http://www.opensource.apple.com/apsl/ and read it before using this file. 17 * 18 * The Original Code and all software distributed under the License are 19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER 20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, 21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, 22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. 23 * Please see the License for the specific language governing rights and 24 * limitations under the License. 25 * 26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@ 27 */ 28 29 #include <arm64/proc_reg.h> 30 #include <pexpert/arm64/board_config.h> 31 #include "assym.s" 32 33 #ifndef __ASSEMBLER__ 34 #error "This header should only be used in .s files" 35 #endif 36 37 /** 38 * Loads the following values from the thread_kernel_state pointer in x0: 39 * 40 * x1: $x0->ss_64.pc 41 * w2: $x0->ss_64.cpsr 42 * x16: $x0->ss_64.x16 43 * x17: $x0->ss_64.x17 44 * lr: $x0->ss_64.lr 45 * 46 * On CPUs with PAC support, this macro will auth the above values with ml_check_signed_state(). 47 * 48 * tmp1 - scratch register 1 49 * tmp2 - scratch register 2 50 * tmp3 - scratch register 3 51 * tmp4 - scratch register 4 52 * tmp5 - scratch register 5 53 * tmp6 - scratch register 6 54 */ 55 /* BEGIN IGNORE CODESTYLE */ 56 .macro AUTH_THREAD_STATE_IN_X0 tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, el0_state_allowed=0 57 #if __has_feature(ptrauth_calls) 58 msr SPSel, #1 59 #endif 60 ldr w2, [x0, SS64_CPSR] 61 .if \el0_state_allowed==0 62 #if __has_feature(ptrauth_calls) 63 // If testing for a canary CPSR value, ensure that we do not observe writes to other fields without it 64 dmb ld 65 #endif 66 .endif 67 ldr x1, [x0, SS64_PC] 68 ldp x16, x17, [x0, SS64_X16] 69 70 #if defined(HAS_APPLE_PAC) 71 // Save x3-x6 to preserve across call 72 mov \tmp3, x3 73 mov \tmp4, x4 74 mov \tmp5, x5 75 mov \tmp6, x6 76 77 /* 78 * Arg0: The ARM context pointer (already in x0) 79 * Arg1: PC to check (loaded above) 80 * Arg2: CPSR to check (loaded above) 81 * Arg3: the LR to check 82 * 83 * Stash saved state PC and CPSR in other registers to avoid reloading potentially unauthed 84 * values from memory. (ml_check_signed_state will clobber x1, x2, and x16.) 85 */ 86 mov \tmp1, x1 87 mov \tmp2, x2 88 ldr x3, [x0, SS64_LR] 89 mov x4, x16 90 mov x5, x17 91 bl EXT(ml_check_signed_state) 92 mov x1, \tmp1 93 mov x2, \tmp2 94 mov x16, x4 95 msr SPSel, #0 96 97 .if \el0_state_allowed==0 98 and \tmp2, \tmp2, #PSR64_MODE_MASK 99 cbnz \tmp2, 1f 100 bl EXT(ml_auth_thread_state_invalid_cpsr) 101 1: 102 .endif 103 104 // LR was already loaded/authed earlier, if we reload it we might be loading a potentially unauthed value 105 mov lr, x3 106 mov x3, \tmp3 107 mov x4, \tmp4 108 mov x5, \tmp5 109 mov x6, \tmp6 110 #else 111 ldr lr, [x0, SS64_LR] 112 #endif /* defined(HAS_APPLE_PAC) */ 113 .endmacro 114 115 #if !__ARM_ARCH_8_6__ 116 .set BRK_AUTDA_FAILURE, 0xc472 117 #endif 118 119 /** 120 * Performs the appropriate SoC specific routine for a blended AUTDA operation. 121 * On success, falls through with stripped result in \value. Faults otherwise. 122 * 123 * value (inout): The register holding the PAC'd pointer to authenticate. 124 * Stripped result will be returned in this register. 125 * address (input, clobbered): The register holding the address from which 126 * \value was loaded. This forms a part of the diversification. 127 * diversifier (input): The diversifier constant to blend with \address. 128 */ 129 .macro AUTDA_DIVERSIFIED value, address, diversifier 130 #if __has_feature(ptrauth_calls) 131 /* Blend */ 132 movk \address, \diversifier, lsl #48 133 autda \value, \address 134 #if !__ARM_ARCH_8_6__ 135 mov \address, \value 136 xpacd \address 137 cmp \address, \value 138 b.eq Lautda_ok_\@ 139 brk #BRK_AUTDA_FAILURE 140 Lautda_ok_\@: 141 #endif /* !__ARM_ARCH_8_6__ */ 142 #endif /* __has_feature(ptrauth_calls) */ 143 .endmacro 144 145 /** 146 * Loads and auths the top of a thread's kernel stack pointer. 147 * 148 * Faults on auth failure. src and dst can be the same register, as long as the 149 * caller doesn't mind clobbering the input. 150 * 151 * src (input): struct thread * 152 * dst (output): ptrauth_auth(src->machine.kstackptr) 153 * tmp: clobbered 154 */ 155 .macro LOAD_KERN_STACK_TOP dst, src, tmp 156 add \tmp, \src, TH_KSTACKPTR 157 ldr \dst, [\tmp] 158 AUTDA_DIVERSIFIED \dst, address=\tmp, diversifier=TH_KSTACKPTR_DIVERSIFIER 159 .endmacro 160 161 /** 162 * Loads and auths a thread's user context data. 163 * 164 * Faults on auth failure. src and dst can be the same register, as long as the 165 * caller doesn't mind clobbering the input. 166 * 167 * src (input): struct thread * 168 * dst (output): ptrauth_auth(src->machine.upcb) 169 * tmp: clobbered 170 */ 171 .macro LOAD_USER_PCB dst, src, tmp 172 add \tmp, \src, TH_UPCB 173 ldr \dst, [\tmp] 174 AUTDA_DIVERSIFIED \dst, address=\tmp, diversifier=TH_UPCB_DIVERSIFIER 175 .endmacro 176 177 /** 178 * Loads and auths a thread's interrupt stack pointer. 179 * 180 * Faults on auth failure. src and dst can be the same register, as long as the 181 * caller doesn't mind clobbering the input. 182 * 183 * src (input): struct thread * 184 * dst (output): ptrauth_auth(src->cpuDataP.istackptr) 185 * tmp: clobbered 186 */ 187 .macro LOAD_INT_STACK_THREAD dst, src, tmp 188 ldr \tmp, [\src, #ACT_CPUDATAP] 189 LOAD_INT_STACK_CPU_DATA \dst, src=\tmp, tmp=\tmp 190 .endmacro 191 192 /** 193 * Loads and auths a CPU's interrupt stack pointer. 194 * 195 * Faults on auth failure. 196 * 197 * src (input): cpu_data_t * 198 * dst (output): ptrauth_auth(cpuDataP.istackptr) 199 * tmp (clobber): Temporary register. Can be the same as \src if callers don't 200 * care to preserve it. 201 */ 202 .macro LOAD_INT_STACK_CPU_DATA dst, src, tmp 203 add \tmp, \src, #CPU_ISTACKPTR 204 ldr \dst, [\tmp] 205 AUTDA_DIVERSIFIED \dst, address=\tmp, diversifier=CPU_ISTACKPTR_DIVERSIFIER 206 .endmacro 207 208 /** 209 * Loads and auths a thread's exception stack pointer. 210 * 211 * Faults on auth failure. src and dst can be the same register, as long as 212 * the caller doesn't mind clobbering the input. 213 * 214 * src (input): struct thread * 215 * dst (output): ptrauth_auth(src->cpuDataP.excepstackptr) 216 * tmp: clobbered 217 */ 218 .macro LOAD_EXCEP_STACK_THREAD dst, src, tmp 219 ldr \tmp, [\src, #ACT_CPUDATAP] 220 LOAD_EXCEP_STACK_CPU_DATA \dst, src=\tmp, tmp=\tmp 221 .endmacro 222 223 /** 224 * Loads and auths a CPU's exception stack pointer. 225 * 226 * Faults on auth failure. 227 * 228 * src (input): cpu_data_t * 229 * dst (output): ptrauth_auth(cpuDataP.excepstackptr) 230 * tmp (clobber): Temporary register. Can be the same as \src if callers don't 231 * care to preserve it. 232 */ 233 .macro LOAD_EXCEP_STACK_CPU_DATA dst, src, tmp 234 add \tmp, \src, #CPU_EXCEPSTACKPTR 235 ldr \dst, [\tmp] 236 AUTDA_DIVERSIFIED \dst, address=\tmp, diversifier=CPU_EXCEPSTACKPTR_DIVERSIFIER 237 .endmacro 238 /* END IGNORE CODESTYLE */ 239 /* vim: set ft=asm: */ 240