Home
last modified time | relevance | path

Searched refs:__asm__ (Results 1 – 25 of 79) sorted by relevance

1234

/xnu-8020.121.3/tests/
H A Davx.c163 __asm__ volatile ("vmovaps %%ymm0, %0" :"=m" (vec256array[i])); in store_ymm()
164 i++; __asm__ volatile ("vmovaps %%ymm1, %0" :"=m" (vec256array[i])); in store_ymm()
165 i++; __asm__ volatile ("vmovaps %%ymm2, %0" :"=m" (vec256array[i])); in store_ymm()
166 i++; __asm__ volatile ("vmovaps %%ymm3, %0" :"=m" (vec256array[i])); in store_ymm()
167 i++; __asm__ volatile ("vmovaps %%ymm4, %0" :"=m" (vec256array[i])); in store_ymm()
168 i++; __asm__ volatile ("vmovaps %%ymm5, %0" :"=m" (vec256array[i])); in store_ymm()
169 i++; __asm__ volatile ("vmovaps %%ymm6, %0" :"=m" (vec256array[i])); in store_ymm()
170 i++; __asm__ volatile ("vmovaps %%ymm7, %0" :"=m" (vec256array[i])); in store_ymm()
172 i++; __asm__ volatile ("vmovaps %%ymm8, %0" :"=m" (vec256array[i])); in store_ymm()
173 i++; __asm__ volatile ("vmovaps %%ymm9, %0" :"=m" (vec256array[i])); in store_ymm()
[all …]
H A Drestart.c17 __asm__(" .align 4\n"
32 __asm__(" .align 4\n"
44 __asm__(" .align 4\n"
65 __asm__(" .align 4\n"
H A Dhvtest_x86_guest.c7 #define VMCALL(x) __asm__("vmcall" : : "a" ((x)) :)
39 __asm__("rdmsr" : "=d"(outhi), "=a"(outlo) : "c"(idx)); in rdmsr()
51 __asm__("wrmsr" : : "d"(inhi),"a"(inlo),"c"(idx)); in wrmsr()
/xnu-8020.121.3/osfmk/i386/
H A Dproc_reg.h217 __asm__ volatile ("mov %%es, %0" : "=r" (es)); in get_es()
224 __asm__ volatile ("mov %0, %%es" : : "r" (es)); in set_es()
231 __asm__ volatile ("mov %%ds, %0" : "=r" (ds)); in get_ds()
238 __asm__ volatile ("mov %0, %%ds" : : "r" (ds)); in set_ds()
245 __asm__ volatile ("mov %%fs, %0" : "=r" (fs)); in get_fs()
252 __asm__ volatile ("mov %0, %%fs" : : "r" (fs)); in set_fs()
259 __asm__ volatile ("mov %%gs, %0" : "=r" (gs)); in get_gs()
266 __asm__ volatile ("mov %0, %%gs" : : "r" (gs)); in set_gs()
273 __asm__ volatile ("mov %%ss, %0" : "=r" (ss)); in get_ss()
280 __asm__ volatile ("mov %0, %%ss" : : "r" (ss)); in set_ss()
[all …]
H A Dbit_routines.h75 __asm__ volatile(" jmp 1f \n \
86 __asm__ volatile(" lock \n \
98 __asm__ volatile(" lock \n \
104 __asm__ volatile(" lock \n \
112 __asm__ volatile (" lock \n \ in atomic_incl()
120 __asm__ volatile (" lock \n \ in atomic_decl()
129 __asm__ volatile ( in atomic_decl_and_test()
H A Dpal_native.h40 #define pal_hlt() __asm__ volatile ("sti; hlt")
41 #define pal_sti() __asm__ volatile ("sti")
42 #define pal_cli() __asm__ volatile ("cli")
48 __asm__ volatile ( "cli"); in pal_stop_cpu()
50 __asm__ volatile ( "wbinvd; hlt"); in pal_stop_cpu()
64 #define pal_pmc_swi() __asm__ __volatile__("int %0"::"i"(LAPIC_PMC_SWI_VECTOR):"memory")
H A Dtrap.c503 __asm__ volatile ("mov %0,%%dr7" : : "r" (dr7)); in reset_dr7()
954 __asm__ volatile ("mov %%db6, %0" : "=r" (dr6)); in user_trap()
955 __asm__ volatile ("mov %0, %%db6" : : "r" (clear)); in user_trap()
1493 __asm__ volatile ("movq %%rbx, %0" : "=m" (iks->k_rbx)); in sync_iss_to_iks()
1494 __asm__ volatile ("movq %%rsp, %0" : "=m" (iks->k_rsp)); in sync_iss_to_iks()
1495 __asm__ volatile ("movq %%rbp, %0" : "=m" (iks->k_rbp)); in sync_iss_to_iks()
1496 __asm__ volatile ("movq %%r12, %0" : "=m" (iks->k_r12)); in sync_iss_to_iks()
1497 __asm__ volatile ("movq %%r13, %0" : "=m" (iks->k_r13)); in sync_iss_to_iks()
1498 __asm__ volatile ("movq %%r14, %0" : "=m" (iks->k_r14)); in sync_iss_to_iks()
1499 __asm__ volatile ("movq %%r15, %0" : "=m" (iks->k_r15)); in sync_iss_to_iks()
[all …]
H A Dfpu.c107 __asm__ volatile("fninit")
110 __asm__("fnstcw %0" : "=m" (*(unsigned short *)(control)))
113 __asm__ volatile("fldcw %0" : : "m" (*(unsigned short *) &(control)) )
116 __asm__ volatile("fnclex")
119 __asm__ volatile("fnsave %0" : "=m" (*state))
122 __asm__ volatile("frstor %0" : : "m" (state))
125 __asm__("fwait");
130 __asm__ __volatile__ ("fxrstor %0" :: "m" (*a)); in fxrstor()
136 __asm__ __volatile__ ("fxsave %0" : "=m" (*a)); in fxsave()
142 __asm__ __volatile__ ("fxrstor64 %0" :: "m" (*a)); in fxrstor64()
[all …]
/xnu-8020.121.3/osfmk/arm/
H A Dkpc_arm.c68 __asm__ volatile ("mrc p15, 0, %0, c9, c12, 1;" : "=r" (PMCNTENSET)); in enable_counter()
73 __asm__ volatile ("mcr p15, 0, %0, c9, c14, 1;" : : "r" (mask)); in enable_counter()
76 __asm__ volatile ("mcr p15, 0, %0, c9, c12, 1;" : : "r" (mask)); in enable_counter()
83 __asm__ volatile ("mcr p15, 0, %0, c9, c12, 0;" : : "r" (PMCR)); in enable_counter()
99 __asm__ volatile ("mrc p15, 0, %0, c9, c12, 2;" : "=r" (PMCNTENCLR)); in disable_counter()
104 __asm__ volatile ("mcr p15, 0, %0, c9, c12, 2;" : : "r" (mask)); in disable_counter()
107 __asm__ volatile ("mcr p15, 0, %0, c9, c14, 2;" : : "r" (mask)); in disable_counter()
114 __asm__ volatile ("mcr p15, 0, %0, c9, c12, 0;" : : "r" (PMCR)); in disable_counter()
129 __asm__ volatile ("mrc p15, 0, %0, c9, c13, 0;" : "=r" (low)); in read_counter()
136 __asm__ volatile ("mcr p15, 0, %0, c9, c12, 5;" : : "r" (counter - 1)); in read_counter()
[all …]
H A Dmachine_cpuid.c44 __asm__ volatile ("mrs %0, MIDR_EL1" : "=r" (midr)); in machine_read_midr()
56 __asm__ volatile ("mrs %0, CLIDR_EL1" : "=r" (clidr)); in machine_read_clidr()
68 __asm__ volatile ("mrs %0, CCSIDR_EL1" : "=r" (ccsidr)); in machine_read_ccsidr()
91 __asm__ volatile ("msr CSSELR_EL1, %0" : : "r" (csselr)); in machine_write_csselr()
120 __asm__ volatile ("mrs %0, ID_AA64DFR0_EL1" : "=r"(id_dfr0.value)); in machine_do_debugid()
142 __asm__ volatile ("vmrs %0, mvfr0" :"=r"(arm_mvfr0_info.value)); in machine_do_mvfpid()
143 __asm__ volatile ("vmrs %0, mvfr1" :"=r"(arm_mvfr1_info.value)); in machine_do_mvfpid()
H A Dcpu_data.h63 __asm__ ("mrs %0, TPIDR_EL1" : "=r" (result)); in current_thread_fast()
98 __asm__ volatile ("mrs %0, TPIDR_EL1" : "=r" (result)); in current_thread_volatile()
112 __asm__ volatile ( in exception_stack_pointer()
H A Dmachdep_call.c55 __asm__ volatile ("mrs %0, TPIDRRO_EL0" : "=r" (uthread)); in get_tpidrro()
66 __asm__ volatile ("msr TPIDRRO_EL0, %0" : : "r" (uthread)); in set_tpidrro()
H A Dlocks.h245 __asm__ volatile ("cpsid if" ::: "memory"); // Mask IRQ FIQ in disable_interrupts_noread()
257 __asm__ volatile ("mrs %[state], cpsr" :[state] "=r" (state)); // Read cpsr in get_interrupts()
278 __asm__ volatile ("msr cpsr, %[state]" :: [state] "r" (state) : "cc", "memory"); // Restore CPSR in restore_interrupts()
/xnu-8020.121.3/EXTERNAL_HEADERS/architecture/i386/
H A Dpio.h68 __asm__ volatile("inl %w1, %0" : "=a" (datum) : "Nd" (port)); in inl()
76 __asm__ volatile("inw %w1, %w0" : "=a" (datum) : "Nd" (port)); in inw()
84 __asm__ volatile("inb %w1, %b0" : "=a" (datum) : "Nd" (port)); in inb()
92 __asm__ volatile("outl %0, %w1" : : "a" (datum), "Nd" (port)); in outl()
99 __asm__ volatile("outw %w0, %w1" : : "a" (datum), "Nd" (port)); in outw()
106 __asm__ volatile("outb %b0, %w1" : : "a" (datum), "Nd" (port)); in outb()
/xnu-8020.121.3/osfmk/corecrypto/
H A Dccsha2_internal.h54 …el_avx2_compress(ccdigest_state_t state, size_t nblocks, const void *in) __asm__("_ccsha256_vng_in…
55 …el_avx1_compress(ccdigest_state_t state, size_t nblocks, const void *in) __asm__("_ccsha256_vng_in…
56 …l_ssse3_compress(ccdigest_state_t state, size_t nblocks, const void *in) __asm__("_ccsha256_vng_in…
57 …el_avx2_compress(ccdigest_state_t state, size_t nblocks, const void *in) __asm__("_ccsha512_vng_in…
58 …el_avx1_compress(ccdigest_state_t state, size_t nblocks, const void *in) __asm__("_ccsha512_vng_in…
59 …l_ssse3_compress(ccdigest_state_t state, size_t nblocks, const void *in) __asm__("_ccsha512_vng_in…
85 …el_sse3_compress(ccdigest_state_t state, size_t nblocks, const void *in) __asm__("_ccsha256_vng_in…
/xnu-8020.121.3/libsyscall/os/
H A Dtsd.h72 __asm__ __volatile__ ("mrc p15, 0, %[p], c13, c0, 3" : [p] "=&r" (p)); in _os_cpu_number()
76 __asm__ __volatile__ ("mrs %0, TPIDR_EL0" : "=r" (p)); in _os_cpu_number()
80 __asm__ __volatile__ ("sidt %[p]" : [p] "=&m" (p)); in _os_cpu_number()
103 __asm__("mov %%gs:%1, %0" : "=r" (ret) : "m" (*(void **)(slot * sizeof(void *)))); in _os_tsd_get_direct()
112 __asm__("movl %1, %%gs:%0" : "=m" (*(void **)(slot * sizeof(void *))) : "rn" (val)); in _os_tsd_set_direct()
114 __asm__("movl %1, %%gs:%0" : "=m" (*(void **)(slot * sizeof(void *))) : "ri" (val)); in _os_tsd_set_direct()
116 __asm__("movq %1, %%gs:%0" : "=m" (*(void **)(slot * sizeof(void *))) : "rn" (val)); in _os_tsd_set_direct()
130 __asm__("mrc p15, 0, %0, c13, c0, 3\n" in _os_tsd_get_base()
140 __asm__ ("mrs %0, TPIDRRO_EL0" : "=r" (tsd)); in _os_tsd_get_base()
/xnu-8020.121.3/libkern/os/
H A Dlog.h44 #define OS_LOG_NOTAILCALL_MARKER __asm__("")
296 __asm__(""); /* avoid tailcall */ \
331 __asm__(""); /* avoid tailcall */ \
366 __asm__(""); /* avoid tailcall */ \
400 __asm__(""); /* avoid tailcall */ \
437 __asm__(""); /* avoid tailcall */ \
465 __asm__(""); /* avoid tailcall */ \
496 __asm__(""); /* avoid tailcall */ \
575__asm__(""); /* avoid tailcall */ …
H A Dtrace.h69 __asm__(""); /* avoid tailcall */ \
86 __asm__(""); /* avoid tailcall */ \
107 __asm__(""); /* avoid tailcall */ \
131 __asm__(""); /* avoid tailcall */ \
158 __asm__(""); /* avoid tailcall */ \
188 __asm__(""); /* avoid tailcall */ \
221 __asm__(""); /* avoid tailcall */ \
257 __asm__(""); /* avoid tailcall */ \
264 __asm__(""); /* avoid tailcall */ \
281 __asm__(""); /* avoid tailcall */ \
[all …]
H A Datomic_private_arch.h59 __asm__ __volatile__("and %[_dep], %[_v], #0" \
138 __asm__ __volatile__("and %w[_dep], %w[_v], wzr" \
147 __asm__ __volatile__("and %[_dep], %[_v], xzr" \
/xnu-8020.121.3/libkern/libkern/i386/
H A D_OSByteOrder.h62 __asm__ ("bswap %0" : "+r" (_data)); in _OSSwapInt32()
84 __asm__ ("bswap %%eax\n\t" in _OSSwapInt64()
97 __asm__ ("bswap %0" : "+r" (_data)); in _OSSwapInt64()
/xnu-8020.121.3/EXTERNAL_HEADERS/corecrypto/
H A Dcc_priv.h150 __asm__ __volatile__ ( \
157 __asm__ __volatile__ ( \
187 __asm__ __volatile__ ( \
194 __asm__ __volatile__ ( \
246 __asm__ ("roll %%cl,%0" in CC_ROL()
254 __asm__ ("rorl %%cl,%0" in CC_ROR()
263 __asm__ __volatile__ ("roll %2,%0" \
272 __asm__ __volatile__ ("rorl %2,%0" \
304 __asm__("rolq %%cl,%0" in CC_ROL64()
312 __asm__("rorq %%cl,%0" in CC_ROR64()
[all …]
/xnu-8020.121.3/bsd/net/
H A Dflowhash.c260 __asm__ ( "rol $31, %[k1]\n\t" :[k1] "+r" (k1) : :); in net_flowhash_mh3_x64_128()
262 __asm__ ( "ror %[k1], %[k1], #(64-31)\n\t" :[k1] "+r" (k1) : :); in net_flowhash_mh3_x64_128()
270 __asm__ ( "rol $27, %[h1]\n\t" :[h1] "+r" (h1) : :); in net_flowhash_mh3_x64_128()
272 __asm__ ( "ror %[h1], %[h1], #(64-27)\n\t" :[h1] "+r" (h1) : :); in net_flowhash_mh3_x64_128()
281 __asm__ ( "rol $33, %[k2]\n\t" :[k2] "+r" (k2) : :); in net_flowhash_mh3_x64_128()
283 __asm__ ( "ror %[k2], %[k2], #(64-33)\n\t" :[k2] "+r" (k2) : :); in net_flowhash_mh3_x64_128()
291 __asm__ ( "rol $31, %[h2]\n\t" :[h2] "+r" (h2) : :); in net_flowhash_mh3_x64_128()
293 __asm__ ( "ror %[h2], %[h2], #(64-31)\n\t" :[h2] "+r" (h2) : :); in net_flowhash_mh3_x64_128()
329 __asm__ ( "rol $33, %[k2]\n\t" :[k2] "+r" (k2) : :); in net_flowhash_mh3_x64_128()
331 __asm__ ( "ror %[k2], %[k2], #(64-33)\n\t" :[k2] "+r" (k2) : :); in net_flowhash_mh3_x64_128()
[all …]
/xnu-8020.121.3/libsa/
H A Dlastkernelconstructor.c39 __asm__(".globl _last_kernel_symbol");
40 __asm__(".zerofill __LAST, __last, _last_kernel_symbol, 0");
/xnu-8020.121.3/bsd/skywalk/core/
H A Dskywalk_common.h50 __asm__(""); __builtin_trap(); \
55 __asm__(""); __builtin_trap(); \
160 __asm__ __volatile__ ( in __sk_vcopy64_16()
226 __asm__ __volatile__ ( in __sk_vcopy64_20()
265 __asm__ __volatile__ ( in __sk_vcopy64_24()
303 __asm__ __volatile__ ( in __sk_vcopy64_32()
375 __asm__ __volatile__ ( in __sk_vcopy64_40()
409 __asm__ __volatile__ ( in __sk_zero_16()
430 __asm__ __volatile__ ( in __sk_zero_32()
453 __asm__ __volatile__ ( in __sk_zero_48()
[all …]
/xnu-8020.121.3/libkern/gen/
H A DOSDebug.cpp169 __asm__ volatile ("movq %%rbp, %0" : "=m" (stackptr));
221 __asm__ volatile ("mov %0,r7" : "=r" (fp));
224 __asm__ volatile ("mov %0, fp" : "=r" (fp));

1234