| /xnu-8019.80.24/tests/ |
| H A D | avx.c | 163 __asm__ volatile ("vmovaps %%ymm0, %0" :"=m" (vec256array[i])); in store_ymm() 164 i++; __asm__ volatile ("vmovaps %%ymm1, %0" :"=m" (vec256array[i])); in store_ymm() 165 i++; __asm__ volatile ("vmovaps %%ymm2, %0" :"=m" (vec256array[i])); in store_ymm() 166 i++; __asm__ volatile ("vmovaps %%ymm3, %0" :"=m" (vec256array[i])); in store_ymm() 167 i++; __asm__ volatile ("vmovaps %%ymm4, %0" :"=m" (vec256array[i])); in store_ymm() 168 i++; __asm__ volatile ("vmovaps %%ymm5, %0" :"=m" (vec256array[i])); in store_ymm() 169 i++; __asm__ volatile ("vmovaps %%ymm6, %0" :"=m" (vec256array[i])); in store_ymm() 170 i++; __asm__ volatile ("vmovaps %%ymm7, %0" :"=m" (vec256array[i])); in store_ymm() 172 i++; __asm__ volatile ("vmovaps %%ymm8, %0" :"=m" (vec256array[i])); in store_ymm() 173 i++; __asm__ volatile ("vmovaps %%ymm9, %0" :"=m" (vec256array[i])); in store_ymm() [all …]
|
| H A D | restart.c | 17 __asm__(" .align 4\n" 32 __asm__(" .align 4\n" 44 __asm__(" .align 4\n" 65 __asm__(" .align 4\n"
|
| H A D | hvtest_x86_guest.c | 7 #define VMCALL(x) __asm__("vmcall" : : "a" ((x)) :) 39 __asm__("rdmsr" : "=d"(outhi), "=a"(outlo) : "c"(idx)); in rdmsr() 51 __asm__("wrmsr" : : "d"(inhi),"a"(inlo),"c"(idx)); in wrmsr()
|
| /xnu-8019.80.24/osfmk/i386/ |
| H A D | proc_reg.h | 217 __asm__ volatile ("mov %%es, %0" : "=r" (es)); in get_es() 224 __asm__ volatile ("mov %0, %%es" : : "r" (es)); in set_es() 231 __asm__ volatile ("mov %%ds, %0" : "=r" (ds)); in get_ds() 238 __asm__ volatile ("mov %0, %%ds" : : "r" (ds)); in set_ds() 245 __asm__ volatile ("mov %%fs, %0" : "=r" (fs)); in get_fs() 252 __asm__ volatile ("mov %0, %%fs" : : "r" (fs)); in set_fs() 259 __asm__ volatile ("mov %%gs, %0" : "=r" (gs)); in get_gs() 266 __asm__ volatile ("mov %0, %%gs" : : "r" (gs)); in set_gs() 273 __asm__ volatile ("mov %%ss, %0" : "=r" (ss)); in get_ss() 280 __asm__ volatile ("mov %0, %%ss" : : "r" (ss)); in set_ss() [all …]
|
| H A D | bit_routines.h | 75 __asm__ volatile(" jmp 1f \n \ 86 __asm__ volatile(" lock \n \ 98 __asm__ volatile(" lock \n \ 104 __asm__ volatile(" lock \n \ 112 __asm__ volatile (" lock \n \ in atomic_incl() 120 __asm__ volatile (" lock \n \ in atomic_decl() 129 __asm__ volatile ( in atomic_decl_and_test()
|
| H A D | pal_native.h | 40 #define pal_hlt() __asm__ volatile ("sti; hlt") 41 #define pal_sti() __asm__ volatile ("sti") 42 #define pal_cli() __asm__ volatile ("cli") 48 __asm__ volatile ( "cli"); in pal_stop_cpu() 50 __asm__ volatile ( "wbinvd; hlt"); in pal_stop_cpu() 64 #define pal_pmc_swi() __asm__ __volatile__("int %0"::"i"(LAPIC_PMC_SWI_VECTOR):"memory")
|
| H A D | trap.c | 503 __asm__ volatile ("mov %0,%%dr7" : : "r" (dr7)); in reset_dr7() 962 __asm__ volatile ("mov %%db6, %0" : "=r" (dr6)); in user_trap() 963 __asm__ volatile ("mov %0, %%db6" : : "r" (clear)); in user_trap() 1501 __asm__ volatile ("movq %%rbx, %0" : "=m" (iks->k_rbx)); in sync_iss_to_iks() 1502 __asm__ volatile ("movq %%rsp, %0" : "=m" (iks->k_rsp)); in sync_iss_to_iks() 1503 __asm__ volatile ("movq %%rbp, %0" : "=m" (iks->k_rbp)); in sync_iss_to_iks() 1504 __asm__ volatile ("movq %%r12, %0" : "=m" (iks->k_r12)); in sync_iss_to_iks() 1505 __asm__ volatile ("movq %%r13, %0" : "=m" (iks->k_r13)); in sync_iss_to_iks() 1506 __asm__ volatile ("movq %%r14, %0" : "=m" (iks->k_r14)); in sync_iss_to_iks() 1507 __asm__ volatile ("movq %%r15, %0" : "=m" (iks->k_r15)); in sync_iss_to_iks() [all …]
|
| H A D | fpu.c | 107 __asm__ volatile("fninit") 110 __asm__("fnstcw %0" : "=m" (*(unsigned short *)(control))) 113 __asm__ volatile("fldcw %0" : : "m" (*(unsigned short *) &(control)) ) 116 __asm__ volatile("fnclex") 119 __asm__ volatile("fnsave %0" : "=m" (*state)) 122 __asm__ volatile("frstor %0" : : "m" (state)) 125 __asm__("fwait"); 130 __asm__ __volatile__ ("fxrstor %0" :: "m" (*a)); in fxrstor() 136 __asm__ __volatile__ ("fxsave %0" : "=m" (*a)); in fxsave() 142 __asm__ __volatile__ ("fxrstor64 %0" :: "m" (*a)); in fxrstor64() [all …]
|
| /xnu-8019.80.24/osfmk/arm/ |
| H A D | kpc_arm.c | 68 __asm__ volatile ("mrc p15, 0, %0, c9, c12, 1;" : "=r" (PMCNTENSET)); in enable_counter() 73 __asm__ volatile ("mcr p15, 0, %0, c9, c14, 1;" : : "r" (mask)); in enable_counter() 76 __asm__ volatile ("mcr p15, 0, %0, c9, c12, 1;" : : "r" (mask)); in enable_counter() 83 __asm__ volatile ("mcr p15, 0, %0, c9, c12, 0;" : : "r" (PMCR)); in enable_counter() 99 __asm__ volatile ("mrc p15, 0, %0, c9, c12, 2;" : "=r" (PMCNTENCLR)); in disable_counter() 104 __asm__ volatile ("mcr p15, 0, %0, c9, c12, 2;" : : "r" (mask)); in disable_counter() 107 __asm__ volatile ("mcr p15, 0, %0, c9, c14, 2;" : : "r" (mask)); in disable_counter() 114 __asm__ volatile ("mcr p15, 0, %0, c9, c12, 0;" : : "r" (PMCR)); in disable_counter() 129 __asm__ volatile ("mrc p15, 0, %0, c9, c13, 0;" : "=r" (low)); in read_counter() 136 __asm__ volatile ("mcr p15, 0, %0, c9, c12, 5;" : : "r" (counter - 1)); in read_counter() [all …]
|
| H A D | machine_cpuid.c | 44 __asm__ volatile ("mrs %0, MIDR_EL1" : "=r" (midr)); in machine_read_midr() 56 __asm__ volatile ("mrs %0, CLIDR_EL1" : "=r" (clidr)); in machine_read_clidr() 68 __asm__ volatile ("mrs %0, CCSIDR_EL1" : "=r" (ccsidr)); in machine_read_ccsidr() 91 __asm__ volatile ("msr CSSELR_EL1, %0" : : "r" (csselr)); in machine_write_csselr() 120 __asm__ volatile ("mrs %0, ID_AA64DFR0_EL1" : "=r"(id_dfr0.value)); in machine_do_debugid() 142 __asm__ volatile ("vmrs %0, mvfr0" :"=r"(arm_mvfr0_info.value)); in machine_do_mvfpid() 143 __asm__ volatile ("vmrs %0, mvfr1" :"=r"(arm_mvfr1_info.value)); in machine_do_mvfpid()
|
| H A D | cpu_data.h | 63 __asm__ ("mrs %0, TPIDR_EL1" : "=r" (result)); in current_thread_fast() 98 __asm__ volatile ("mrs %0, TPIDR_EL1" : "=r" (result)); in current_thread_volatile() 112 __asm__ volatile ( in exception_stack_pointer()
|
| H A D | machdep_call.c | 55 __asm__ volatile ("mrs %0, TPIDRRO_EL0" : "=r" (uthread)); in get_tpidrro() 66 __asm__ volatile ("msr TPIDRRO_EL0, %0" : : "r" (uthread)); in set_tpidrro()
|
| H A D | locks.h | 245 __asm__ volatile ("cpsid if" ::: "memory"); // Mask IRQ FIQ in disable_interrupts_noread() 257 __asm__ volatile ("mrs %[state], cpsr" :[state] "=r" (state)); // Read cpsr in get_interrupts() 278 __asm__ volatile ("msr cpsr, %[state]" :: [state] "r" (state) : "cc", "memory"); // Restore CPSR in restore_interrupts()
|
| /xnu-8019.80.24/EXTERNAL_HEADERS/architecture/i386/ |
| H A D | pio.h | 68 __asm__ volatile("inl %w1, %0" : "=a" (datum) : "Nd" (port)); in inl() 76 __asm__ volatile("inw %w1, %w0" : "=a" (datum) : "Nd" (port)); in inw() 84 __asm__ volatile("inb %w1, %b0" : "=a" (datum) : "Nd" (port)); in inb() 92 __asm__ volatile("outl %0, %w1" : : "a" (datum), "Nd" (port)); in outl() 99 __asm__ volatile("outw %w0, %w1" : : "a" (datum), "Nd" (port)); in outw() 106 __asm__ volatile("outb %b0, %w1" : : "a" (datum), "Nd" (port)); in outb()
|
| /xnu-8019.80.24/osfmk/corecrypto/ |
| H A D | ccsha2_internal.h | 54 …el_avx2_compress(ccdigest_state_t state, size_t nblocks, const void *in) __asm__("_ccsha256_vng_in… 55 …el_avx1_compress(ccdigest_state_t state, size_t nblocks, const void *in) __asm__("_ccsha256_vng_in… 56 …l_ssse3_compress(ccdigest_state_t state, size_t nblocks, const void *in) __asm__("_ccsha256_vng_in… 57 …el_avx2_compress(ccdigest_state_t state, size_t nblocks, const void *in) __asm__("_ccsha512_vng_in… 58 …el_avx1_compress(ccdigest_state_t state, size_t nblocks, const void *in) __asm__("_ccsha512_vng_in… 59 …l_ssse3_compress(ccdigest_state_t state, size_t nblocks, const void *in) __asm__("_ccsha512_vng_in… 85 …el_sse3_compress(ccdigest_state_t state, size_t nblocks, const void *in) __asm__("_ccsha256_vng_in…
|
| /xnu-8019.80.24/libsyscall/os/ |
| H A D | tsd.h | 72 __asm__ __volatile__ ("mrc p15, 0, %[p], c13, c0, 3" : [p] "=&r" (p)); in _os_cpu_number() 76 __asm__ __volatile__ ("mrs %0, TPIDR_EL0" : "=r" (p)); in _os_cpu_number() 80 __asm__ __volatile__ ("sidt %[p]" : [p] "=&m" (p)); in _os_cpu_number() 103 __asm__("mov %%gs:%1, %0" : "=r" (ret) : "m" (*(void **)(slot * sizeof(void *)))); in _os_tsd_get_direct() 112 __asm__("movl %1, %%gs:%0" : "=m" (*(void **)(slot * sizeof(void *))) : "rn" (val)); in _os_tsd_set_direct() 114 __asm__("movl %1, %%gs:%0" : "=m" (*(void **)(slot * sizeof(void *))) : "ri" (val)); in _os_tsd_set_direct() 116 __asm__("movq %1, %%gs:%0" : "=m" (*(void **)(slot * sizeof(void *))) : "rn" (val)); in _os_tsd_set_direct() 130 __asm__("mrc p15, 0, %0, c13, c0, 3\n" in _os_tsd_get_base() 140 __asm__ ("mrs %0, TPIDRRO_EL0" : "=r" (tsd)); in _os_tsd_get_base()
|
| /xnu-8019.80.24/libkern/os/ |
| H A D | log.h | 44 #define OS_LOG_NOTAILCALL_MARKER __asm__("") 295 __asm__(""); /* avoid tailcall */ \ 330 __asm__(""); /* avoid tailcall */ \ 365 __asm__(""); /* avoid tailcall */ \ 399 __asm__(""); /* avoid tailcall */ \ 436 __asm__(""); /* avoid tailcall */ \ 464 __asm__(""); /* avoid tailcall */ \ 495 __asm__(""); /* avoid tailcall */ \ 574 …__asm__(""); /* avoid tailcall */ …
|
| H A D | trace.h | 69 __asm__(""); /* avoid tailcall */ \ 86 __asm__(""); /* avoid tailcall */ \ 107 __asm__(""); /* avoid tailcall */ \ 131 __asm__(""); /* avoid tailcall */ \ 158 __asm__(""); /* avoid tailcall */ \ 188 __asm__(""); /* avoid tailcall */ \ 221 __asm__(""); /* avoid tailcall */ \ 257 __asm__(""); /* avoid tailcall */ \ 264 __asm__(""); /* avoid tailcall */ \ 281 __asm__(""); /* avoid tailcall */ \ [all …]
|
| H A D | atomic_private_arch.h | 59 __asm__ __volatile__("and %[_dep], %[_v], #0" \ 138 __asm__ __volatile__("and %w[_dep], %w[_v], wzr" \ 147 __asm__ __volatile__("and %[_dep], %[_v], xzr" \
|
| /xnu-8019.80.24/EXTERNAL_HEADERS/corecrypto/ |
| H A D | cc_priv.h | 150 __asm__ __volatile__ ( \ 157 __asm__ __volatile__ ( \ 187 __asm__ __volatile__ ( \ 194 __asm__ __volatile__ ( \ 246 __asm__ ("roll %%cl,%0" in CC_ROL() 254 __asm__ ("rorl %%cl,%0" in CC_ROR() 263 __asm__ __volatile__ ("roll %2,%0" \ 272 __asm__ __volatile__ ("rorl %2,%0" \ 304 __asm__("rolq %%cl,%0" in CC_ROL64() 312 __asm__("rorq %%cl,%0" in CC_ROR64() [all …]
|
| /xnu-8019.80.24/libkern/libkern/i386/ |
| H A D | _OSByteOrder.h | 62 __asm__ ("bswap %0" : "+r" (_data)); in _OSSwapInt32() 84 __asm__ ("bswap %%eax\n\t" in _OSSwapInt64() 97 __asm__ ("bswap %0" : "+r" (_data)); in _OSSwapInt64()
|
| /xnu-8019.80.24/bsd/net/ |
| H A D | flowhash.c | 260 __asm__ ( "rol $31, %[k1]\n\t" :[k1] "+r" (k1) : :); in net_flowhash_mh3_x64_128() 262 __asm__ ( "ror %[k1], %[k1], #(64-31)\n\t" :[k1] "+r" (k1) : :); in net_flowhash_mh3_x64_128() 270 __asm__ ( "rol $27, %[h1]\n\t" :[h1] "+r" (h1) : :); in net_flowhash_mh3_x64_128() 272 __asm__ ( "ror %[h1], %[h1], #(64-27)\n\t" :[h1] "+r" (h1) : :); in net_flowhash_mh3_x64_128() 281 __asm__ ( "rol $33, %[k2]\n\t" :[k2] "+r" (k2) : :); in net_flowhash_mh3_x64_128() 283 __asm__ ( "ror %[k2], %[k2], #(64-33)\n\t" :[k2] "+r" (k2) : :); in net_flowhash_mh3_x64_128() 291 __asm__ ( "rol $31, %[h2]\n\t" :[h2] "+r" (h2) : :); in net_flowhash_mh3_x64_128() 293 __asm__ ( "ror %[h2], %[h2], #(64-31)\n\t" :[h2] "+r" (h2) : :); in net_flowhash_mh3_x64_128() 329 __asm__ ( "rol $33, %[k2]\n\t" :[k2] "+r" (k2) : :); in net_flowhash_mh3_x64_128() 331 __asm__ ( "ror %[k2], %[k2], #(64-33)\n\t" :[k2] "+r" (k2) : :); in net_flowhash_mh3_x64_128() [all …]
|
| /xnu-8019.80.24/libsa/ |
| H A D | lastkernelconstructor.c | 39 __asm__(".globl _last_kernel_symbol"); 40 __asm__(".zerofill __LAST, __last, _last_kernel_symbol, 0");
|
| /xnu-8019.80.24/bsd/skywalk/core/ |
| H A D | skywalk_common.h | 50 __asm__(""); __builtin_trap(); \ 55 __asm__(""); __builtin_trap(); \ 160 __asm__ __volatile__ ( in __sk_vcopy64_16() 226 __asm__ __volatile__ ( in __sk_vcopy64_20() 265 __asm__ __volatile__ ( in __sk_vcopy64_24() 303 __asm__ __volatile__ ( in __sk_vcopy64_32() 375 __asm__ __volatile__ ( in __sk_vcopy64_40() 409 __asm__ __volatile__ ( in __sk_zero_16() 430 __asm__ __volatile__ ( in __sk_zero_32() 453 __asm__ __volatile__ ( in __sk_zero_48() [all …]
|
| /xnu-8019.80.24/libsyscall/wrappers/ |
| H A D | mach_continuous_time.c | 40 __asm__ volatile ("dsb sy" ::: "memory"); in _mach_continuous_time_base() 42 __asm__ volatile ("lfence" ::: "memory"); in _mach_continuous_time_base()
|