Home
last modified time | relevance | path

Searched refs:x3 (Results 1 – 25 of 37) sorted by relevance

12

/xnu-12377.81.4/osfmk/arm64/
H A Dbzero.s57 mov x3, x0
73 add x3, x0, #64
74 and x3, x3, #-64
76 add x4, x3, #64 // end of first cacheline to zero
79 0: dc zva, x3 // zero cacheline
80 add x3, x3, #64 // increment pointer
83 1: add x3, x3, x2 // back up pointer to (end of buffer) - 64.
84 stp x1, x1, [x3] // and store 64 bytes to reach end of buffer.
85 stp x1, x1, [x3, #16]
86 stp x1, x1, [x3, #32]
[all …]
H A Dbcopy.s74 mov x3, x0
76 mov x1, x3
96 sub x3, x0, x1
97 cmp x3, x2
99 mov x3, x0 // copy destination pointer
113 add x3, x3, #32
114 and x3, x3, #-32 // aligned dst
117 sub x5, x3, x0 // bytes between original dst and aligned dst
157 stnp x8, x9, [x3]
158 stnp x10,x11,[x3, #16]
[all …]
H A Dstrncmp.s74 eor x3, x3, x3
81 subs x3, x4, x5 // if the are not equal
90 mov x0, x3
155 subs x3, x4, x5 // if the are not equal
192 subs x3, x2, #(kVectorSize)
194 add x4, x0, x3 // save the addresses of the last vectors
195 add x5, x1, x3
196 mov x2, x3 // length -= kVectorSize
241 sub x3, x3, #(kVectorSize)
242 ldrb w4, [x0, x3]
[all …]
H A Dstrnlen.s87 adr x3, L_masks
88 ldr q2, [x3],#16
97 sub x3, x3, x4
98 ldr q1, [x3]
137 cmp x1, x3 // if NUL occurs before maxlen bytes
138 csel x1, x1, x3, cc // return strlen, else maxlen
155 adr x3, L_masks
156 ldr q2, [x3],#16
165 sub x3, x3, x2
166 ldr q1, [x3]
H A Dmachine_routines_asm.s545 ldp x3, x4, [\src]
546 stp x3, x4, [\dst]
549 ldp x3, x4, [\src, #32]
550 stp x3, x4, [\dst, #32]
555 ldp x3, x4, [\src, #64]
556 stp x3, x4, [\dst, #64]
559 ldp x3, x4, [\src, #96]
560 stp x3, x4, [\dst, #96]
565 ldp x3, x4, [\src, #128]
566 stp x3, x4, [\dst, #128]
[all …]
H A Dpinst.s89 check_instruction x2, x3, __pinst_set_ttbr1, 0xd65f03c0d5182020
95 check_instruction x2, x3, __pinst_set_vbar, 0xd65f03c0d518c000
101 check_instruction x2, x3, __pinst_set_tcr, 0xd65f03c0d5182040
107 check_instruction x2, x3, __pinst_set_sctlr, 0xd65f03c0d5181000
129 check_instruction x2, x3, __pinst_spsel_1, 0xd65f03c0d50041bf
H A Dlocore.s110 mov x3, #0
146 mov x3, #((1 << (64 - T1SZ_BOOT - 1)) - 1)
147 and x4, x1, x3
148 and x5, x2, x3
157 and x4, x1, x3
158 and x5, x2, x3
275 stp x2, x3, [sp, #-16]! // Save {x2-x3}
294 LOAD_KERN_STACK_TOP dst=x2, src=x1, tmp=x3 // Get top of kernel stack
295 sub x3, x2, KERNEL_STACK_SIZE // Find bottom of kernel stack
298 cmp x0, x3 // if (SP_EL0 > kstack bottom)
[all …]
H A Dmemcmp_zero.s131 mov x3, #0
133 orr x3, x3, x2 // use orr to keep non-zero bytes
137 tst x3, x3
H A Dcswitch.s293 set_process_dependent_keys_and_sync_context x0, x1, x2, x3, w4
324 cbz x3, 1f
353 LOAD_KERN_STACK_TOP dst=x3, src=x0, tmp=x4 // Get the old kernel stack top
354 save_general_registers x3, 4
356 set_thread_registers x2, x3, x4
357 LOAD_KERN_STACK_TOP dst=x3, src=x2, tmp=x4
358 load_general_registers x3, 4
359 set_process_dependent_keys_and_sync_context x2, x3, x4, x5, w6
410 set_process_dependent_keys_and_sync_context x0, x1, x2, x3, w4
H A Dmachine_routines_asm.h72 mov \tmp3, x3
90 ldr x3, [x0, SS64_LR]
108 mov lr, x3
109 mov x3, \tmp3
H A Dsmccc_asm.h44 stp x2, x3, [sp, #- 16]!
54 ldp x2, x3, [sp], #16
H A Dalternate_debugger_asm.s41 str x3, [sp, #0x8]
43 mov x1, x3
H A Dcaches_asm.s149 CACHE_AT_LEVEL x2, x1, x3
262 bic x3, x0, x9 // Cached aligned
268 dc cvau, x3 // Clean dcache line to PoU
269 add x3, x3, #(1<<MMU_CLINE) // Get next cache aligned addr
H A Darm64_hypercall.c54 hvc_5(uint64_t *x0, uint64_t *x1, uint64_t *x2, uint64_t *x3, uint64_t *x4) in hvc_5() argument
71 [o3] "=m" (*x3), in hvc_5()
76 [i3] "r" (*x3), in hvc_5()
H A Dstart.s152 add x3, x1, MAX_CPUS * 16 // end addr of data entries = start + (16 * MAX_CPUS)
161 cmp x1, x3
171 SET_PIO_ONLY_REGISTERS x21, x2, x3, x4, x5, x6
608 add x3, x1, PGBYTES
609 mov x2, x3
621 create_bootstrap_mapping x14, x15, x5, x3, x2, x9, x10, x11, x12, x13
H A Dlz4_encode_arm64.s48 #define src_begin x3
97 sub x14, x13, x3 // match_postion = match_begin - src_begin
227 ccmp x15, x3, #0xd, gt // check if ref_begin reached src_begin
H A Dexception_asm.h173 stp x2, x3, [x0, SS64_X2]
239 mov x3, x20
/xnu-12377.81.4/osfmk/arm64/sptm/
H A Dstart_sptm.s180 mov x20, x3
211 mul x3, x19, x4
212 add x3, x1, x3
230 cmp x1, x3
255 adrp x3, EXT(arm_init_cpu)@page
256 add x3, x3, EXT(arm_init_cpu)@pageoff
257 cmp x2, x3
260 adrp x3, EXT(arm_init_idle_cpu)@page
261 add x3, x3, EXT(arm_init_idle_cpu)@pageoff
262 cmp x2, x3
/xnu-12377.81.4/bsd/dev/arm64/
H A Dcpu_in_cksum.s102 #define sum x3
157 mov x0, x3
408 add x3, x3, x7, lsr #32
410 add x3, x3, x7
412 add x3, x7, x3, lsr #32
429 and x0, x4, x3, lsr #48
430 and x1, x4, x3, lsr #32
431 and x2, x4, x3, lsr #16
432 and x3, x4, x3
/xnu-12377.81.4/osfmk/corecrypto/
H A Dccmode_gcm_gf_mult.c47 cc_dunit x1, x2, x3, x4, x5; in bmul64() local
61 x3 = x & m3; in bmul64()
68 z = (x1 * y1) ^ (x2 * y5) ^ (x3 * y4) ^ (x4 * y3) ^ (x5 * y2); in bmul64()
70 z = (x1 * y2) ^ (x2 * y1) ^ (x3 * y5) ^ (x4 * y4) ^ (x5 * y3); in bmul64()
72 z = (x1 * y3) ^ (x2 * y2) ^ (x3 * y1) ^ (x4 * y5) ^ (x5 * y4); in bmul64()
74 z = (x1 * y4) ^ (x2 * y3) ^ (x3 * y2) ^ (x4 * y1) ^ (x5 * y5); in bmul64()
76 z = (x1 * y5) ^ (x2 * y4) ^ (x3 * y3) ^ (x4 * y2) ^ (x5 * y1); in bmul64()
123 uint32_t x0, x1, x2, x3; in bmul32() local
135 x3 = x & m8; in bmul32()
141 z0 = ((uint64_t)x0 * y0) ^ ((uint64_t)x1 * y3) ^ ((uint64_t)x2 * y2) ^ ((uint64_t)x3 * y1); in bmul32()
[all …]
/xnu-12377.81.4/libsyscall/wrappers/
H A Dvarargs_wrappers.s42 ldp x2, x3, [fp, #16]
82 ldp x3, x4, [fp, #32]
99 ldp x3, x4, [fp, #32]
116 ldr x3, [fp, #16]
133 ldr x3, [fp, #32]
H A Dmach_absolute_time.s229 ldr x1, [x3] // Load the offset
231 ldr x2, [x3] // Load the offset
252 movk x3, #(((_COMM_PAGE_TIMEBASE_OFFSET) >> 48) & 0x000000000000FFFF), lsl #48
253 movk x3, #(((_COMM_PAGE_TIMEBASE_OFFSET) >> 32) & 0x000000000000FFFF), lsl #32
254 movk x3, #(((_COMM_PAGE_TIMEBASE_OFFSET) >> 16) & 0x000000000000FFFF), lsl #16
255 movk x3, #((_COMM_PAGE_TIMEBASE_OFFSET) & 0x000000000000FFFF)
256 ldrb w2, [x3, #((_COMM_PAGE_USER_TIMEBASE) - (_COMM_PAGE_TIMEBASE_OFFSET))]
/xnu-12377.81.4/osfmk/arm/commpage/
H A Dcommpage_asm.s276 casa w10, w11, [x3] // Atomic CAS with acquire barrier
297 stlr wzr, [x3]
377 add x3, x0, #16 // address of lock = x3 = x0 + 16
388 ldxr w9, [x3] // arm the monitor for the lock address
399 BACKOFF x3
/xnu-12377.81.4/libsyscall/custom/
H A D__syscall.s66 ldp x3, x4, [sp, #16]
H A Dcustom.s140 mov x3, #2

12