1 /*
2 * Copyright (c) 2007 Apple Inc. All rights reserved.
3 *
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
14 *
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
17 *
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
25 *
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27 */
28 /*
29 * @OSF_COPYRIGHT@
30 */
31 /*
32 * Mach Operating System
33 * Copyright (c) 1991,1990 Carnegie Mellon University
34 * All Rights Reserved.
35 *
36 * Permission to use, copy, modify and distribute this software and its
37 * documentation is hereby granted, provided that both the copyright
38 * notice and this permission notice appear in all copies of the
39 * software, derivative works or modified versions, and any portions
40 * thereof, and that both notices appear in supporting documentation.
41 *
42 * CARNEGIE MELLON ALLOWS FREE USE OF THIS SOFTWARE IN ITS "AS IS"
43 * CONDITION. CARNEGIE MELLON DISCLAIMS ANY LIABILITY OF ANY KIND FOR
44 * ANY DAMAGES WHATSOEVER RESULTING FROM THE USE OF THIS SOFTWARE.
45 *
46 * Carnegie Mellon requests users of this software to return to
47 *
48 * Software Distribution Coordinator or [email protected]
49 * School of Computer Science
50 * Carnegie Mellon University
51 * Pittsburgh PA 15213-3890
52 *
53 * any improvements or extensions that they make and grant Carnegie Mellon
54 * the rights to redistribute these changes.
55 */
56 /*
57 */
58
59 #ifndef _ARM_TRAP_H_
60 #define _ARM_TRAP_H_
61
62 /*
63 * Hardware trap vectors for ARM.
64 */
65
66 #define T_RESET 0
67 #define T_UNDEF 1
68 #define T_SWI 2
69 #define T_PREFETCH_ABT 3
70 #define T_DATA_ABT 4
71 #define T_IRQ 6
72 #define T_FIQ 7
73 #define T_PMU 8
74
75
76 #define TRAP_NAMES "reset", "undefined instruction", "software interrupt", \
77 "prefetch abort", "data abort", "irq interrupt", \
78 "fast interrupt", "perfmon"
79
80 /*
81 * Page-fault trap codes.
82 */
83 #define T_PF_PROT 0x1 /* protection violation */
84 #define T_PF_WRITE 0x2 /* write access */
85 #define T_PF_USER 0x4 /* from user state */
86
87 #if !defined(ASSEMBLER)
88 __attribute__((cold, always_inline))
89 static inline void
ml_recoverable_trap(unsigned int code)90 ml_recoverable_trap(unsigned int code)
91 __attribute__((diagnose_if(!__builtin_constant_p(code), "code must be constant", "error")))
92 {
93 __asm__ volatile ("brk #%0" : : "i"(code));
94 }
95
96 __attribute__((cold, noreturn, always_inline))
97 static inline void
ml_fatal_trap(unsigned int code)98 ml_fatal_trap(unsigned int code)
99 __attribute__((diagnose_if(!__builtin_constant_p(code), "code must be constant", "error")))
100 {
101 __asm__ volatile ("brk #%0" : : "i"(code));
102 __builtin_unreachable();
103 }
104
105 #if defined(XNU_KERNEL_PRIVATE)
106 /*
107 * Unfortunately brk instruction only takes constant, so we have to unroll all the
108 * cases and let compiler do the real work. ¯\_(ツ)_/¯
109 *
110 * Codegen should be clean due to inlining which enables constant-folding.
111 */
112 #define TRAP_CASE(code) \
113 case code: \
114 ml_fatal_trap(0x5500 + code);
115
116 #define TRAP_5CASES(code) \
117 TRAP_CASE(code) \
118 TRAP_CASE(code + 1) \
119 TRAP_CASE(code + 2) \
120 TRAP_CASE(code + 3) \
121 TRAP_CASE(code + 4)
122
123 /* For use by clang option -ftrap-function only */
124 __attribute__((cold, always_inline))
125 static inline void
ml_bound_chk_soft_trap(unsigned char code)126 ml_bound_chk_soft_trap(unsigned char code)
127 {
128 switch (code) {
129 /* 0 ~ 24 */
130 TRAP_5CASES(0)
131 TRAP_5CASES(5)
132 TRAP_5CASES(10)
133 TRAP_5CASES(15)
134 TRAP_5CASES(20)
135 case 25: /* Bound check */
136 ml_recoverable_trap(0xFF00 + 25); /* code defined in kern/telemetry.h */
137 break;
138 default:
139 ml_fatal_trap(0x0);
140 }
141 }
142 #endif /* XNU_KERNEL_PRIVATE */
143 #endif /* !ASSEMBLER */
144
145 #if defined(MACH_KERNEL_PRIVATE)
146
147 #if !defined(ASSEMBLER) && defined(MACH_KERNEL)
148
149 #include <arm/thread.h>
150
151 #define GDB_TRAP_INSTR1 0xe7ffdefe
152 #define GDB_TRAP_INSTR2 0xe7ffdeff
153
154 #define ARM_GDB_INSTR1 GDB_TRAP_INSTR1
155 #define ARM_GDB_INSTR2 GDB_TRAP_INSTR2
156
157 #define IS_ARM_GDB_TRAP(op) \
158 (((op) == ARM_GDB_INSTR1) || ((op) == ARM_GDB_INSTR2))
159
160 #define THUMB_GDB_INSTR1 (GDB_TRAP_INSTR1 & 0xFFFF)
161 #define THUMB_GDB_INSTR2 (GDB_TRAP_INSTR2 & 0xFFFF)
162
163 #define IS_THUMB_GDB_TRAP(op) \
164 (((op) == THUMB_GDB_INSTR1) || ((op) == THUMB_GDB_INSTR2))
165
166
167 #define ARM_STR 0x04000000 /* STR */
168 #define ARM_STRH 0x000000B0 /* STRH */
169 #define ARM_STRH_MASK 0x0E1000F0 /* STRH MASK */
170 #define ARM_SDX_MASK 0x0C100000 /* SINGLE DATA TRANSFER */
171 #define ARM_SNGL_DX_MASK 0x0C000000 /* SINGLE DATA TRANSFER MASK */
172 #define ARM_SDX 0x04000000
173
174 #define ARM_STM 0x08000000 /* STM */
175 #define ARM_BDX_MASK 0x0E100000 /* BLOCK DATA TRANSFER */
176 #define ARM_BLK_MASK 0x0E000000 /* BLOCK DATA TRANSFER */
177 #define ARM_BDX 0x08000000 /* BLOCK DATA TRANSFER */
178
179 #define ARM_WRITE_BACK 0x00200000
180 #define ARM_BASE_REG 0x000F0000
181 #define ARM_INCREMENT 0x00800000
182
183 #define ARM_STC 0x0C000000 /* STC */
184 #define ARM_CDX_MASK ARM_BDX_MASK /* COPROCESSOR DATA TRANSFER */
185 #define ARM_CBLK_MASK ARM_BLK_MASK
186 #define ARM_CDX 0x0C000000 /* COPROCESSOR DATA TRANSFER */
187
188 #define ARM_SWP 0x01000090 /* SWP */
189 #define ARM_SWP_MASK 0x0FB00FF0 /* SWP */
190
191 #define ARM_POST_INDEXING 0x01000000
192 #define ARM_IMMEDIATE 0x02000000
193 #define ARM_LSL 0
194 #define ARM_LSR 1
195 #define ARM_ASR 2
196 #define ARM_ROR 3
197
198 #define MCR_MASK 0x0F100F10
199 #define MCR_CP15 0x0E000F10
200 #define MCRR_MASK 0x0FF00F00
201 #define MCRR_CP15 0x0C400F00
202
203 #define arm_mcr_cp15(op) (((op)&MCR_MASK) == 0x0E000F10)
204 #define arm_mcrr_cp15(op) (((op)&0x0FF00F00) == 0x0C400F00)
205
206 #define IS_THUMB32(op) ( \
207 (((op) & 0xE000) == 0xE000) && (((op) & 0x1800) != 0x0000))
208
209 #define THUMB_LDR_1_MASK 0x8800 /* (1) forms of LD* instructions */
210 #define THUMB_STR_1_MASK 0xF800 /* (1) forms of ST* instructions */
211 #define THUMB_STR_2_MASK 0xFE00 /* (2) forms of ST* instructions */
212 #define THUMB_STR_3_MASK 0xF800 /* (3) forms of ST* instructions */
213 #define THUMB_PUSH_MASK 0xFE00 /* PUSH instruction */
214
215 #define THUMB_LDRH_1 0x8800 /* LDRH(1) */
216 #define THUMB_STMIA 0xC000 /* STMIA */
217 #define THUMB_STR_1 0x6000 /* STR(1) */
218 #define THUMB_STR_2 0x5000 /* STR(2) */
219 #define THUMB_STR_3 0x9000 /* STR(3) */
220 #define THUMB_STRB_1 0x7000 /* STRB(1) */
221 #define THUMB_STRB_2 0x5400 /* STRB(2) */
222 #define THUMB_STRH_1 0x8000 /* STRH(1) */
223 #define THUMB_STRH_2 0x5200 /* STRH(2) */
224 #define THUMB_PUSH 0xB400 /* PUSH */
225 #define THUMB_LDMIA 0xC800 /* LDMIA */
226 #define THUMB_POP 0xBC00 /* POP */
227
228
229 /*
230 * Shifts, masks, and other values for load/store multiple decoding; largely needed for
231 * supporting misaligned accesses.
232 */
233 #define THUMB_STR_1_BASE_OFFSET 8 /* Offset of the base register field */
234 #define THUMB_PUSH_EXTRA_OFFSET 8 /* Offset of the "extra" register field */
235 #define ARM_STM_BASE_OFFSET 16 /* Offset of the base register field */
236 #define ARM_STM_LOAD_OFFSET 20 /* Offset of the load flag */
237 #define ARM_STM_WBACK_OFFSET 21 /* Offset of the writeback flag */
238 #define ARM_STM_INCR_OFFSET 23 /* Offset of the increment flag */
239 #define ARM_STM_BEFORE_OFFSET 24 /* Offset of the pre-index flag */
240 #define ARM_REG_LIST_LR_OFFSET 14 /* Offset of LR in the register list */
241 #define ARM_REG_LIST_PC_OFFSET 15 /* Offset of PC in the register list */
242
243 #define THUMB_STR_REG_LIST_MASK 0x000000FF /* Offset of the reg list is 0 */
244 #define THUMB_STR_1_BASE_MASK 0x00000700
245 #define THUMB_PUSH_EXTRA_MASK 0x00000100
246 #define ARM_STM_REG_LIST_MASK 0x0000FFFF /* Offset of the reg list is 0 */
247 #define ARM_STM_BASE_MASK 0x000F0000
248 #define ARM_STM_LOAD_MASK 0x00100000
249 #define ARM_STM_WBACK_MASK 0x00200000
250 #define ARM_STM_INCR_MASK 0x00800000
251 #define ARM_STM_BEFORE_MASK 0x01000000
252 #define ARM_COND_MASK 0xF0000000 /* Mask for the condition code */
253
254 #define ARM_COND_UNCOND 0xF0000000 /* Instruction does not support condition codes */
255
256 #define ARM_SIMD_MASK0 0xFE000000
257 #define ARM_SIMD_CODE0 0xF2000000
258
259 #define ARM_VFP_MASK0 0x0F000E10
260 #define ARM_VFP_CODE0 0x0E000A00
261
262 #define ARM_SIMD_VFP_MASK0 0x0E000E00
263 #define ARM_SIMD_VFP_CODE0 0x0C000A00
264 #define ARM_SIMD_VFP_MASK1 0xFF100000
265 #define ARM_SIMD_VFP_CODE1 0xF4000000
266 #define ARM_SIMD_VFP_MASK2 0x0F000E10
267 #define ARM_SIMD_VFP_CODE2 0x0E000A10
268 #define ARM_SIMD_VFP_MASK3 0x0FE00E00
269 #define ARM_SIMD_VFP_CODE3 0x0C400A00
270
271 #define IS_ARM_VFP(op) ( \
272 (((op) & ARM_SIMD_MASK0) == ARM_SIMD_CODE0) \
273 ||(((op) & ARM_VFP_MASK0) == ARM_VFP_CODE0) \
274 ||(((op) & ARM_SIMD_VFP_MASK0) == ARM_SIMD_VFP_CODE0) \
275 ||(((op) & ARM_SIMD_VFP_MASK1) == ARM_SIMD_VFP_CODE1) \
276 ||(((op) & ARM_SIMD_VFP_MASK2) == ARM_SIMD_VFP_CODE2) \
277 || (((op) & ARM_SIMD_VFP_MASK3) == ARM_SIMD_VFP_CODE3))
278
279 #define THUMB_SIMD_MASK0 0xEF000000
280 #define THUMB_SIMD_CODE0 0xEF000000
281
282 #define THUMB_VFP_MASK0 0xEF000E10
283 #define THUMB_VFP_CODE0 0xEE000A00
284
285 #define THUMB_SIMD_VFP_MASK0 0xEE000E00
286 #define THUMB_SIMD_VFP_CODE0 0xEC000A00
287 #define THUMB_SIMD_VFP_MASK1 0xFF100000
288 #define THUMB_SIMD_VFP_CODE1 0xF9000000
289 #define THUMB_SIMD_VFP_MASK2 0xEF000E10
290 #define THUMB_SIMD_VFP_CODE2 0xEE000A10
291 #define THUMB_SIMD_VFP_MASK3 0xEFE00E00
292 #define THUMB_SIMD_VFP_CODE3 0xEC400A00
293
294 #define IS_THUMB_VFP(op) ( \
295 (((op) & THUMB_SIMD_MASK0) == THUMB_SIMD_CODE0 ) \
296 || (((op) & THUMB_VFP_MASK0) == THUMB_VFP_CODE0 ) \
297 || (((op) & THUMB_SIMD_VFP_MASK0) == THUMB_SIMD_VFP_CODE0 ) \
298 || (((op) & THUMB_SIMD_VFP_MASK1) == THUMB_SIMD_VFP_CODE1 ) \
299 || (((op) & THUMB_SIMD_VFP_MASK2) == THUMB_SIMD_VFP_CODE2 ) \
300 || (((op) & THUMB_SIMD_VFP_MASK3) == THUMB_SIMD_VFP_CODE3))
301
302 extern boolean_t arm_force_fast_fault(ppnum_t, vm_prot_t, int, void *);
303 extern kern_return_t arm_fast_fault(pmap_t, vm_map_address_t, vm_prot_t, bool, bool);
304
305 /*
306 * Determines if the aborted instruction is read or write operation
307 */
308 #define arm_fault_type(op, spsr, vaddr) \
309 (((((op)&ARM_CDX_MASK) == ARM_STC) || \
310 (((op)&ARM_STRH_MASK) == ARM_STRH) || \
311 (((op)&ARM_BDX_MASK) == ARM_STM) || \
312 (((op)&ARM_SDX_MASK) == ARM_STR)) ? \
313 (VM_PROT_WRITE|VM_PROT_READ) : (VM_PROT_READ))
314
315 #define thumb_fault_type(op, spsr, vaddr) \
316 (((((op)&THUMB_STR_1_MASK) == THUMB_STMIA) || \
317 (((op)&THUMB_STR_1_MASK) == THUMB_STR_1) || \
318 (((op)&THUMB_STR_2_MASK) == THUMB_STR_2) || \
319 (((op)&THUMB_STR_3_MASK) == THUMB_STR_3) || \
320 (((op)&THUMB_STR_1_MASK) == THUMB_STRB_1) || \
321 (((op)&THUMB_STR_2_MASK) == THUMB_STRB_2) || \
322 (((op)&THUMB_STR_1_MASK) == THUMB_STRH_1) || \
323 (((op)&THUMB_STR_2_MASK) == THUMB_STRH_2) || \
324 (((op)&THUMB_PUSH_MASK) == THUMB_PUSH)) ? \
325 (VM_PROT_WRITE|VM_PROT_READ) : (VM_PROT_READ))
326
327 typedef kern_return_t (*perfCallback)(
328 int trapno,
329 struct arm_saved_state *ss,
330 int,
331 int);
332
333 #endif /* !ASSEMBLER && MACH_KERNEL */
334
335 #endif /* MACH_KERNEL_PRIVATE */
336
337 #endif /* _ARM_TRAP_H_ */
338