Home
last modified time | relevance | path

Searched refs:x0 (Results 1 – 25 of 44) sorted by relevance

12

/xnu-11215.1.10/osfmk/arm64/
H A Dstart.s51 msr VBAR_EL1, x0
57 mov x0, x1
72 msr TTBR1_EL1, x0
87 msr SCTLR_EL1, x0
135 adrp x0, EXT(LowExceptionVectorBase)@page
136 add x0, x0, EXT(LowExceptionVectorBase)@pageoff
137 msr VBAR_EL1, x0
147 and x0, x15, #0xFFFF // CPU number in Affinity0, cluster ID in Affinity1
149 and x0, x15, #0xFF // CPU number is in MPIDR Affinity Level 0
157 cmp x0, x2 // Compare cpu data phys cpu and MPIDR_EL1 phys cpu
[all …]
H A Dmachine_routines_asm.s98 mov x2, x0
99 MOV64 x0, VMAPPLE_PAC_SET_EL0_DIVERSIFIER_AT_EL1
102 cbnz x0, .
103 LOAD_CPU_JOP_KEY x0, x1
116 MOV64 x0, VMAPPLE_PAC_SET_EL0_DIVERSIFIER_AT_EL1
119 cbnz x0, .
177 orr x0, x4, x5
178 and x1, x1, x0 // Be paranoid, and clear bits we expect to
183 orr x0, x4, x5
184 and x2, x2, x0 // Be paranoid, and clear bits we expect to
[all …]
H A Dcaches_asm.s67 and x2, x0, x9
68 bic x0, x0, x9 // Cached aligned
73 ic ivau, x0 // Invalidate icache line
74 add x0, x0, #1<<MMU_I_CLINE // Get next cache aligned addr
168 cmp x1, x0
222 mrs x0, CLIDR_EL1
223 ubfx x0, x0, #24, #3 // extract CLIDR_EL1.LoC
242 mrs x0, CLIDR_EL1
243 ubfx x0, x0, #21, 3 // extract CLIDR_EL1.LoUIS
261 and x2, x0, x9
[all …]
H A Dlocore.s100 mov x0, #0
121 mrs x0, TPIDR_EL1
127 cbz x0, Lbegin_panic_lockdown_real_\@
128 ldr x1, [x0, TH_EXPECTED_FAULT_HANDLER]
133 ldr x1, [x0, TH_EXPECTED_FAULT_PC]
144 ldr x1, [x0, TH_EXPECTED_FAULT_ADDR]
159 mov x0, #0 // not a simulated lockdown
168 mov x0, #1 // this is a simulated lockdown!
263 mrs x0, SP_EL0 // Get SP_EL0
268 cmp x0, x2 // if (SP_EL0 >= kstack top)
[all …]
H A Dexception_asm.h173 stp x2, x3, [x0, SS64_X2]
175 stp x4, x5, [x0, SS64_X4]
176 stp x6, x7, [x0, SS64_X6]
177 stp x8, x9, [x0, SS64_X8]
178 stp x10, x11, [x0, SS64_X10]
179 stp x12, x13, [x0, SS64_X12]
180 stp x14, x15, [x0, SS64_X14]
181 stp x16, x17, [x0, SS64_X16]
182 stp x18, x19, [x0, SS64_X18]
183 stp x20, x21, [x0, SS64_X20]
[all …]
H A Dplatform_tests_asm.s39 ldr x0, [x0]
82 braaz x0
88 ldraa x0, [x0]
96 autiza x0
109 adr x0, 0b
110 br x0
156 mov x0, sp
160 str x0, [sp, #-16]!
168 mrs x0, SP_EL0
169 ldr x1, [x0], #16
[all …]
H A Dmemcmp_zero.s85 mov x2, x0 // copy the original addr
86 add x0, x0, #64
87 and x0, x0, #-64 // aligned addr
90 sub x2, x0, x2 // bytes between original and aligned addr
96 ldp q0, q1, [x0]
97 ldp q2, q3, [x0, #32]
102 add x0, x0, #64 // advance pointer
111 add x0, x0, x1
112 ldp q0, q1, [x0]
113 ldp q2, q3, [x0, #32]
[all …]
H A Dstrnlen.s81 and x2, x0, #-16
95 and x4, x0, #0xf
117 sub x0, x2, x0
119 add x0, x0, x1
123 mov x0, #0
135 sub x0, x2, x0 // index of vector in string
138 add x0, x0, x1
149 and x1, x0, #-16
163 and x2, x0, #0xf
200 sub x0, x1, x0
[all …]
H A Dstrncmp.s78 0: tst x0, #(kVectorSize-1)
80 ldrb w4, [x0],#1 // load byte from src1
91 mov x0, x3
127 1: ldr q0, [x0],#(kVectorSize)
141 2: ldrb w4, [x0],#1 // load byte from src1
148 tst x0, #(kVectorSize-1)
165 add x4, x0, x3 // save the addresses of the last vectors
170 ldr q0, [x0],#(kVectorSize)
181 mov x0, x4
183 ldr q0, [x0],#(kVectorSize)
[all …]
H A Darm64_hypercall.c54 hvc_5(uint64_t *x0, uint64_t *x1, uint64_t *x2, uint64_t *x3, uint64_t *x4) in hvc_5() argument
68 : [o0] "=m" (*x0), in hvc_5()
73 : [i0] "r" (*x0), in hvc_5()
80 return *(int64_t *)x0 >= 0; in hvc_5()
84 hvc_2(uint64_t *x0, uint64_t *x1) in hvc_2() argument
87 return hvc_5(x0, x1, &x, &x, &x); in hvc_2()
91 hvc_1(uint64_t *x0) in hvc_1() argument
94 return hvc_5(x0, &x, &x, &x, &x); in hvc_1()
193 uint64_t x0 = VMAPPLE_PAC_NOP; in hvg_is_hcall_available() local
194 (void) hvc_1(&x0); in hvg_is_hcall_available()
[all …]
H A Dbzero.s57 mov x3, x0
69 stp x1, x1, [x0]
70 stp x1, x1, [x0, #16]
71 stp x1, x1, [x0, #32]
72 stp x1, x1, [x0, #48]
73 add x3, x0, #64
75 add x2, x2, x0 // end of buffer
107 mov x3, x0
119 stp x1, x1, [x0]
120 add x3, x0, #16
[all …]
H A Dcswitch.s271 set_thread_registers x0, x1, x2
272 LOAD_KERN_STACK_TOP dst=x1, src=x0, tmp=x2 // Get top of kernel stack
274 set_process_dependent_keys_and_sync_context x0, x1, x2, x3, w4
275 mov x0, #0 // Clear argument to thread_continue
301 mov x20, x0 //continuation
306 mov x0, #1
310 mov x0, x21 // Set the first parameter
318 mrs x0, TPIDR_EL1 // Get the current thread pointer
334 LOAD_KERN_STACK_TOP dst=x3, src=x0, tmp=x4 // Get the old kernel stack top
371 mrs x0, TPIDR_EL1 // Get thread pointer
[all …]
H A Dpac_asm.h111 MOV64 x0, VMAPPLE_PAC_SET_EL0_DIVERSIFIER
114 cbnz x0, .
132 MOV64 x0, VMAPPLE_PAC_SET_B_KEYS
135 cbnz x0, .
158 mov x0, #VMAPPLE_PAC_SET_INITIAL_STATE
160 cbnz x0, .
H A Dbcopy.s73 mov x3, x0
74 mov x0, x1
95 sub x3, x0, x1
98 mov x3, x0 // copy destination pointer
116 sub x5, x3, x0 // bytes between original dst and aligned dst
140 stp x12,x13,[x0] // initial unaligned store
141 stp x14,x15,[x0, #16] // initial unaligned store
232 add x4, x0, x2
272 stp x12,x13,[x0, #16] // In the forward copy, we need to compute the
273 stp x14,x15,[x0] // address of these stores, but here we already
H A Dpinst.s62 msr TTBR1_EL1, x0
66 msr VBAR_EL1, x0
70 msr TCR_EL1, x0
75 msr SCTLR_EL1, x0
H A Diofilter_asm.s46 at s1e1w, x0 // Get PA of the addr passed in for comparison.
51 bfxil x10, x0, #0, #14 // Copy the page offset from the VA to assemble the PA.
53 csel x0, x10, xzr, eq // If translation was successful return PA, else 0.
H A Dmachine_routines_asm.h60 ldr w2, [x0, SS64_CPSR]
67 ldr x1, [x0, SS64_PC]
68 ldp x16, x17, [x0, SS64_X16]
88 ldr x3, [x0, SS64_LR]
111 ldr lr, [x0, SS64_LR]
H A Dsmccc_asm.h43 stp x0, x1, [sp, #- 16]!
55 ldp x0, x1, [sp], #16
/xnu-11215.1.10/san/memory/
H A Dkasan-test-arm64.s23 stp x19, x20, [x0, JMP_r19_20]
24 stp x21, x22, [x0, JMP_r21_22]
25 stp x23, x24, [x0, JMP_r23_24]
26 stp x25, x26, [x0, JMP_r25_26]
27 stp x27, x28, [x0, JMP_r27_28]
28 stp x29, lr, [x0, JMP_r29_lr]
29 stp fp, x1, [x0, JMP_fp_sp]
30 stp d8, d9, [x0, JMP_d8_d9]
31 stp d10, d11, [x0, JMP_d10_d11]
32 stp d12, d13, [x0, JMP_d12_d13]
[all …]
/xnu-11215.1.10/osfmk/arm/commpage/
H A Dcommpage_asm.s150 stp x0, x1, [sp, #-16]!
154 mov x0, \lock_addr
159 ldp x0, x1, [sp], #16
170 stp x0, xzr, [sp, #-16]! // Save x0 since it'll be clobbered by return value
173 mov \result, x0
175 ldp x0, xzr, [sp], #16 // Restore saved registers
185 stp x0, xzr, [sp, #-16]! // Save x0 since it'll be clobbered by return value
188 mov \result, x0
190 ldp x0, xzr, [sp], #16 // Restore saved registers
279 mov x0, #-1 // Failed
[all …]
/xnu-11215.1.10/doc/debugging/
H A Dtask_ref.md71 0xffffff801ace9250 task_kernel 68 367663 367595 0x0
72 0xffffff801ace9288 task_internal 974 4953 3979 0x0
73 0xffffff801ace92c0 task_mig 0 3670 3670 0x0
74 0xffffff801ace9218 task_external 35 108 73 0x0
75 0xffffff9369dc7b20 task_com.apple.iokit.IOAcceleratorFamily2 29 77 48 0x0
76 0xffffff936a3f0a20 task_com.apple.iokit.CoreAnalyticsFamily 1 1 0 0x0
77 0xffffff936a22cb20 task_com.apple.iokit.EndpointSecurity 0 1 1 0x0
78 0xffffff936a283f60 task_com.apple.iokit.IOSurface 5 5 0 0x0
79 0xffffff936a3f08a0 task_com.apple.security.sandbox 0 24 24 0x0
88 0xffffff936a4b9200 task_local_kernel 1 6 5 0x0
[all …]
/xnu-11215.1.10/osfmk/arm64/sptm/
H A Dstart_sptm.s78 cmp x0, x8
90 mov x0, x1
110 cmp x0, x20
159 mov x0, x26
187 and x0, x15, #(MPIDR_AFF0_MASK | MPIDR_AFF1_MASK)
189 and x0, x15, #(MPIDR_AFF0_MASK)
213 cmp x0, x2
235 mov x0, x21
266 MOV64 x0, 0xDEADB001
274 MOV64 x0, 0xDEADB002
/xnu-11215.1.10/bsd/dev/arm64/
H A Dcpu_copy_in_cksum.s65 #define src x0
321 and x0, sum, #0xffff
322 add x0, x0, sum, lsr #16
325 add x0, x0, partial, lsr #16
327 add x0, x0, partial
330 and t, x0, #0xffff
331 add x0, t, x0, lsr #16
337 and t, x0, #0xffff
338 add x0, t, x0, lsr #16
H A Dcpu_in_cksum.s97 #define m x0
108 #define ptr_m x0
152 mov x0, x3
424 and x0, x4, x3, lsr #48
447 adrp x0, Lin_cksum_whoops_str@page
448 add x0, x0, Lin_cksum_whoops_str@pageoff
450 mov x0, #-1
/xnu-11215.1.10/libsyscall/wrappers/
H A D__get_cpu_capabilities.s69 ldr x0, Lcommpage_cc_addr
70 ldr x0, [x0]

12