1 /* 2 * Copyright (c) 2019 Apple Inc. All rights reserved. 3 * 4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@ 5 * 6 * This file contains Original Code and/or Modifications of Original Code 7 * as defined in and that are subject to the Apple Public Source License 8 * Version 2.0 (the 'License'). You may not use this file except in 9 * compliance with the License. The rights granted to you under the License 10 * may not be used to create, or enable the creation or redistribution of, 11 * unlawful or unlicensed copies of an Apple operating system, or to 12 * circumvent, violate, or enable the circumvention or violation of, any 13 * terms of an Apple operating system software license agreement. 14 * 15 * Please obtain a copy of the License at 16 * http://www.opensource.apple.com/apsl/ and read it before using this file. 17 * 18 * The Original Code and all software distributed under the License are 19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER 20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, 21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, 22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. 23 * Please see the License for the specific language governing rights and 24 * limitations under the License. 25 * 26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@ 27 */ 28 29 #include <arm64/proc_reg.h> 30 #include <pexpert/arm64/board_config.h> 31 #include "assym.s" 32 33 #ifndef __ASSEMBLER__ 34 #error "This header should only be used in .s files" 35 #endif 36 37 /** 38 * Loads the following values from the thread_kernel_state pointer in x0: 39 * 40 * x1: $x0->ss_64.pc 41 * w2: $x0->ss_64.cpsr 42 * x16: $x0->ss_64.x16 43 * x17: $x0->ss_64.x17 44 * lr: $x0->ss_64.lr 45 * 46 * On CPUs with PAC support, this macro will auth the above values with ml_check_signed_state(). 47 * 48 * tmp1 - scratch register 1 49 * tmp2 - scratch register 2 50 * tmp3 - scratch register 3 51 * tmp4 - scratch register 4 52 * tmp5 - scratch register 5 53 * tmp6 - scratch register 6 54 */ 55 /* BEGIN IGNORE CODESTYLE */ 56 .macro AUTH_THREAD_STATE_IN_X0 tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, el0_state_allowed=0 57 #if __has_feature(ptrauth_calls) 58 msr SPSel, #1 59 #endif 60 ldr w2, [x0, SS64_CPSR] 61 .if \el0_state_allowed==0 62 #if __has_feature(ptrauth_calls) 63 // If testing for a canary CPSR value, ensure that we do not observe writes to other fields without it 64 dmb ld 65 #endif 66 .endif 67 ldr x1, [x0, SS64_PC] 68 ldp x16, x17, [x0, SS64_X16] 69 70 #if defined(HAS_APPLE_PAC) 71 // Save x3-x6 to preserve across call 72 mov \tmp3, x3 73 mov \tmp4, x4 74 mov \tmp5, x5 75 mov \tmp6, x6 76 77 /* 78 * Arg0: The ARM context pointer (already in x0) 79 * Arg1: PC to check (loaded above) 80 * Arg2: CPSR to check (loaded above) 81 * Arg3: the LR to check 82 * 83 * Stash saved state PC and CPSR in other registers to avoid reloading potentially unauthed 84 * values from memory. (ml_check_signed_state will clobber x1, x2, and x16.) 85 */ 86 mov \tmp1, x1 87 mov \tmp2, x2 88 ldr x3, [x0, SS64_LR] 89 mov x4, x16 90 mov x5, x17 91 bl EXT(ml_check_signed_state) 92 mov x1, \tmp1 93 mov x2, \tmp2 94 mov x16, x4 95 msr SPSel, #0 96 97 .if \el0_state_allowed==0 98 and \tmp2, \tmp2, #PSR64_MODE_MASK 99 cbnz \tmp2, 1f 100 bl EXT(ml_auth_thread_state_invalid_cpsr) 101 1: 102 .endif 103 104 // LR was already loaded/authed earlier, if we reload it we might be loading a potentially unauthed value 105 mov lr, x3 106 mov x3, \tmp3 107 mov x4, \tmp4 108 mov x5, \tmp5 109 mov x6, \tmp6 110 #else 111 ldr lr, [x0, SS64_LR] 112 #endif /* defined(HAS_APPLE_PAC) */ 113 .endmacro 114 115 #if !__ARM_ARCH_8_6__ 116 .set BRK_AUTDA_FAILURE, 0xc472 117 #endif 118 119 /** 120 * Loads and auths the top of a thread's kernel stack pointer. 121 * 122 * Faults on auth failure. src and dst can be the same register, as long as 123 * the caller doesn't mind clobbering the input. 124 * 125 * src (input): struct thread * 126 * dst (output): ptrauth_auth(src->machine.kstackptr) 127 * tmp: clobbered 128 */ 129 .macro LOAD_KERN_STACK_TOP dst, src, tmp 130 add \tmp, \src, TH_KSTACKPTR 131 ldr \dst, [\tmp] 132 #if __has_feature(ptrauth_calls) 133 movk \tmp, TH_KSTACKPTR_DIVERSIFIER, lsl #48 134 autda \dst, \tmp 135 #if !__ARM_ARCH_8_6__ 136 mov \tmp, \dst 137 xpacd \tmp 138 cmp \tmp, \dst 139 b.eq Lkstackptr_ok_\@ 140 brk BRK_AUTDA_FAILURE 141 Lkstackptr_ok_\@: 142 #endif /* !__ARM_ARCH_8_6__ */ 143 #endif /* __has_feature(ptrauth_calls) */ 144 .endmacro 145 146 /** 147 * Loads and auths a thread's user context data. 148 * 149 * Faults on auth failure. src and dst can be the same register, as long as 150 * the caller doesn't mind clobbering the input. 151 * 152 * src (input): struct thread * 153 * dst (output): ptrauth_auth(src->machine.upcb) 154 * tmp: clobbered 155 */ 156 .macro LOAD_USER_PCB dst, src, tmp 157 add \tmp, \src, TH_UPCB 158 ldr \dst, [\tmp] 159 #if __has_feature(ptrauth_calls) 160 movk \tmp, TH_UPCB_DIVERSIFIER, lsl #48 161 autda \dst, \tmp 162 #if !__ARM_ARCH_8_6__ 163 mov \tmp, \dst 164 xpacd \tmp 165 cmp \tmp, \dst 166 b.eq Lupcb_ok_\@ 167 brk BRK_AUTDA_FAILURE 168 Lupcb_ok_\@: 169 #endif /* !__ARM_ARCH_8_6__ */ 170 #endif /* __has_feature(ptrauth_calls) */ 171 .endmacro 172 173 /** 174 * Loads and auths a thread's interrupt stack pointer. 175 * 176 * Faults on auth failure. src and dst can be the same register, as long as 177 * the caller doesn't mind clobbering the input. 178 * 179 * src (input): struct thread * 180 * dst (output): ptrauth_auth(src->cpuDataP.istackptr) 181 * tmp: clobbered 182 */ 183 .macro LOAD_INT_STACK dst, src, tmp 184 ldr \tmp, [\src, ACT_CPUDATAP] 185 add \tmp, \tmp, CPU_ISTACKPTR 186 ldr \dst, [\tmp] 187 #if __has_feature(ptrauth_calls) 188 movk \tmp, CPU_ISTACKPTR_DIVERSIFIER, lsl #48 189 autda \dst, \tmp 190 #if !__ARM_ARCH_8_6__ 191 mov \tmp, \dst 192 xpacd \tmp 193 cmp \tmp, \dst 194 b.eq Listackptr_ok_\@ 195 brk BRK_AUTDA_FAILURE 196 Listackptr_ok_\@: 197 #endif /* !__ARM_ARCH_8_6__ */ 198 #endif /* __has_feature(ptrauth_calls) */ 199 .endmacro 200 /* END IGNORE CODESTYLE */ 201 202 /* vim: set ft=asm: */ 203