Home
last modified time | relevance | path

Searched refs:MMU_CLINE (Results 1 – 4 of 4) sorted by relevance

/xnu-11417.101.15/osfmk/arm64/
H A Dcaches_asm.s260 mov x9, #((1<<MMU_CLINE)-1)
265 lsr x4, x4, #MMU_CLINE // Set cache line counter
269 add x3, x3, #(1<<MMU_CLINE) // Get next cache aligned addr
277 #define CLINE_FLUSH_STRIDE MMU_CLINE
423 mov x9, #((1<<MMU_CLINE)-1)
428 lsr x1, x1, #MMU_CLINE // Set cache line counter
432 add x0, x0, #(1<<MMU_CLINE) // Get next cache aligned addr
H A Dproc_reg.h156 #define MMU_CLINE 6 /* cache line size as 1<<MMU_CLINE (64) */ macro
164 #define MMU_CLINE 6 /* cache line size is 1<<MMU_CLINE (64) */ macro
172 #define MMU_CLINE 6 /* cache line size is 1<<MMU_CLINE (64) */ macro
180 #define MMU_CLINE 6 /* cache line size is 1<<MMU_CLINE (64) */ macro
188 #define MMU_CLINE 6 /* cache line size is 1<<MMU_CLINE (64) */ macro
196 #define MMU_CLINE 6 /* cache line size is 1<<MMU_CLINE (64) */ macro
204 #define MMU_CLINE 6 /* cache line size is 1<<MMU_CLINE (64) */ macro
212 #define MMU_CLINE 6 /* cache line size is 1<<MMU_CLINE (64) */ macro
220 #define MMU_CLINE 6 /* cache line size is 1<<MMU_CLINE (64) */ macro
228 #define MMU_CLINE 6 /* cache line size is 1<<MMU_CLINE (64) */ macro
[all …]
H A Dloose_ends.c213 #pragma unroll (4096 / (4 << MMU_CLINE)) in bzero_phys_page()
214 for (vm_offset_t offset = 0; offset < PAGE_SIZE; offset += (4 << MMU_CLINE)) { in bzero_phys_page()
221 : "r"(buf + offset + (0 << MMU_CLINE)) in bzero_phys_page()
222 , "r"(buf + offset + (1 << MMU_CLINE)) in bzero_phys_page()
223 , "r"(buf + offset + (2 << MMU_CLINE)) in bzero_phys_page()
224 , "r"(buf + offset + (3 << MMU_CLINE)) in bzero_phys_page()
/xnu-11417.101.15/doc/building/
H A Dxnu_build_consolidation.md127 The L1 cache line size is still hardcoded, and defined as `MMU_CLINE`. Since this value is always t…