| /xnu-8019.80.24/osfmk/i386/ |
| H A D | cpu.c | 95 cpu_data_t *cdp = current_cpu_datap(); in cpu_sleep() 106 cpu_data_t *cdp = current_cpu_datap(); in cpu_init() 187 cpu_data_t *cdp = current_cpu_datap(); in cpu_machine_init() 203 return current_cpu_datap()->cpu_processor; in current_processor() 216 return ¤t_cpu_datap()->cpu_pending_ast; in ast_pending() 243 return current_cpu_datap()->cpu_type; in cpu_type() 249 return current_cpu_datap()->cpu_subtype; in cpu_subtype() 255 return current_cpu_datap()->cpu_threadtype; in cpu_threadtype()
|
| H A D | mp_desc.h | 95 #define current_gdt() (current_cpu_datap()->cpu_desc_index.cdi_gdtb.ptr) 96 #define current_idt() (current_cpu_datap()->cpu_desc_index.cdi_idtb.ptr) 97 #define current_ldt() (current_cpu_datap()->cpu_desc_index.cdi_ldtb) 98 #define current_ktss() (current_cpu_datap()->cpu_desc_index.cdi_ktssb) 99 #define current_sstk() (current_cpu_datap()->cpu_desc_index.cdi_sstkb)
|
| H A D | i386_timer.c | 80 pp = current_cpu_datap(); in timer_intr() 183 pp = current_cpu_datap(); in timer_set_deadline() 209 pp = current_cpu_datap(); in timer_resync_deadlines() 269 pp = current_cpu_datap(); in timer_queue_expire_local() 291 pp = current_cpu_datap(); in timer_queue_expire_rescan() 331 cpu_data_t *cdp = current_cpu_datap(); in timer_queue_assign() 353 if (queue == ¤t_cpu_datap()->rtclock_timer.queue) { in timer_queue_cancel() 372 cpu_data_t *cdp = current_cpu_datap(); in timer_queue_migrate_cpu()
|
| H A D | acpi.c | 139 if (current_cpu_datap()->cpu_hibernate) { in acpi_hibernate() 190 cpu_data_t *cdp = current_cpu_datap(); in acpi_sleep_kernel() 287 if (current_cpu_datap()->cpu_hibernate) { in acpi_sleep_kernel() 296 cpu_syscall_init(current_cpu_datap()); in acpi_sleep_kernel() 369 pmCPUMarkRunning(current_cpu_datap()); in acpi_sleep_kernel() 408 if (current_cpu_datap()->cpu_hibernate) { in ml_hibernate_active_post() 412 current_cpu_datap()->cpu_hibernate = 0; in ml_hibernate_active_post() 458 if (current_cpu_datap()->cpu_hibernate) { in acpi_idle_kernel()
|
| H A D | mp.c | 530 pmSafeMode(¤t_cpu_datap()->lcpu, PM_SAFE_FL_SAFE); in cpu_signal_handler() 534 pmSafeMode(¤t_cpu_datap()->lcpu, PM_SAFE_FL_NORMAL); in cpu_signal_handler() 574 panic_i386_backtrace(stackptr, 64, &pstr[0], TRUE, current_cpu_datap()->cpu_int_state); in NMI_pte_corruption_callback() 588 pmSafeMode(¤t_cpu_datap()->lcpu, PM_SAFE_FL_SAFE); in NMIInterruptHandler() 617 cpu_number(), now, current_cpu_datap()->cpu_tlb_invalid); in NMIInterruptHandler() 633 pmSafeMode(¤t_cpu_datap()->lcpu, PM_SAFE_FL_SAFE); in NMIInterruptHandler() 635 current_cpu_datap()->cpu_NMI_acknowledged = TRUE; in NMIInterruptHandler() 636 i_bit_clear(MP_KDP, ¤t_cpu_datap()->cpu_signals); in NMIInterruptHandler() 663 pmSafeMode(¤t_cpu_datap()->lcpu, PM_SAFE_FL_NORMAL); in NMIInterruptHandler() 885 current_cpu_datap()->cpu_rendezvous_in_progress = TRUE; in mp_rendezvous_action() [all …]
|
| H A D | pmCPU.c | 107 cpu_data_t *my_cpu = current_cpu_datap(); in machine_idle() 241 cpu_data_t *cpup = current_cpu_datap(); in pmCPUHalt() 324 cpu_data_t *cpup = current_cpu_datap(); in pmGetMyLogicalCPU() 338 cpu_data_t *cpup = current_cpu_datap(); in pmGetMyCore() 352 cpu_data_t *cpup = current_cpu_datap(); in pmGetMyDie() 366 cpu_data_t *cpup = current_cpu_datap(); in pmGetMyPackage() 497 cpu_data_t *cpup = current_cpu_datap(); in pmCPUMarkRunning() 786 current_cpu_datap()->cpu_nthread = nthread; in thread_tell_urgency() 912 if ((uint32_t)cpu == current_cpu_datap()->lcpu.cpu_num) { in pmReSyncDeadlines() 1026 cpu_data_t *my_cpu = current_cpu_datap(); in machine_track_platform_idle()
|
| H A D | thread.h | 212 stack_depth = current_cpu_datap()->cpu_kernel_stack in current_kernel_stack_depth() 219 "depth limit: 0x%016lx", current_cpu_datap()->cpu_kernel_stack, in current_kernel_stack_depth()
|
| H A D | machine_routines.c | 432 *pidlep = (current_cpu_datap()->lcpu.package->num_idle == topoParms.nLThreadsPerPackage); in ml_get_power_state() 1073 current_cpu_datap()->cpu_ldt == KERNEL_LDT) { in ml_cpu_set_ldt() 1078 current_cpu_datap()->cpu_ldt = selector; in ml_cpu_set_ldt() 1090 return current_cpu_datap()->cpu_int_event_time; in ml_cpu_int_event_time() 1099 return local - (current_cpu_datap()->cpu_int_stack_top - INTSTACK_SIZE); in ml_stack_remaining() 1113 return current_cpu_datap()->cpu_int_stack_top - INTSTACK_SIZE; in ml_stack_base() 1134 return ¤t_cpu_datap()->cpu_kcov_data; in current_kcov_data()
|
| H A D | bsd_i386.c | 870 NULL != current_cpu_datap()->cpu_int_state && in find_kern_regs() 871 !(USER_STATE(thread) == current_cpu_datap()->cpu_int_state && in find_kern_regs() 872 current_cpu_datap()->cpu_interrupt_level == 1)) { in find_kern_regs() 873 return current_cpu_datap()->cpu_int_state; in find_kern_regs() 884 return current_cpu_datap()->cpu_int_stack_top; in dtrace_get_cpu_int_stack_top()
|
| H A D | cpu_data.h | 456 current_cpu_datap(void) in current_cpu_datap() function 550 cpu_data_t *cdata = current_cpu_datap(); in pltrace_internal() 588 cdata = current_cpu_datap(); in iotrace() 620 cdata = current_cpu_datap(); in traptrace_start()
|
| H A D | Diagnostics.c | 262 pkes.pkg_idle_exits = current_cpu_datap()->lcpu.package->package_idle_exits; in diagCall64() 357 cpu_data_t *cdp = current_cpu_datap(); in cpu_powerstats() 408 cpu_data_t *cdp = current_cpu_datap(); in cpu_pmc_control()
|
| H A D | lapic_native.c | 235 current_cpu_datap()->cpu_soft_apic_lvt_timer = lo; in x2apic_init() 247 return current_cpu_datap()->cpu_soft_apic_lvt_timer; in x2apic_read() 257 current_cpu_datap()->cpu_soft_apic_lvt_timer = value; in x2apic_write() 366 current_cpu_datap()->cpu_soft_apic_lvt_timer = lo; in lapic_reinit() 443 current_cpu_datap()->cpu_phys_number = cpu_to_lapic[0]; in lapic_init()
|
| H A D | hibernate_i386.c | 267 if (current_cpu_datap()->cpu_hibernate) { in hibernate_vm_lock() 277 if (current_cpu_datap()->cpu_hibernate) { in hibernate_vm_unlock()
|
| H A D | machine_check.c | 246 mca_save_state(current_cpu_datap()->cpu_mca_state); in mca_check_save() 309 mca_state_t *mca_state = current_cpu_datap()->cpu_mca_state; in mca_dump()
|
| H A D | mp_native.c | 104 volatile int *my_word = ¤t_cpu_datap()->cpu_signals; in handle_pending_TLB_flushes()
|
| H A D | cpu_threads.h | 55 #define x86_lcpu() (¤t_cpu_datap()->lcpu)
|
| H A D | cpu_topology.c | 311 cachep = current_cpu_datap()->lcpu.caches[level - 1]; in ml_cpu_cache_size() 326 cachep = current_cpu_datap()->lcpu.caches[level - 1]; in ml_cpu_cache_sharing()
|
| /xnu-8019.80.24/osfmk/i386/vmx/ |
| H A D | vmx_cpu.c | 128 vmx_specs_t *specs = ¤t_cpu_datap()->cpu_vmx.specs; in vmx_cpu_init() 169 vmx_cpu_t *cpu = ¤t_cpu_datap()->cpu_vmx; in vmx_on() 209 vmx_cpu_t *cpu = ¤t_cpu_datap()->cpu_vmx; in vmx_off() 391 vmx_cpu_t *cpu = ¤t_cpu_datap()->cpu_vmx; in vmx_resume()
|
| /xnu-8019.80.24/osfmk/kern/ |
| H A D | kpc.h | 63 #define FIXED_RELOAD(ctr) (current_cpu_datap()->cpu_kpc_reload[(ctr)]) 65 #define CONFIGURABLE_RELOAD(ctr) (current_cpu_datap()->cpu_kpc_reload[(ctr) + kpc_fi… 69 #define FIXED_SHADOW(ctr) (current_cpu_datap()->cpu_kpc_shadow[(ctr)]) 71 #define CONFIGURABLE_SHADOW(ctr) (current_cpu_datap()->cpu_kpc_shadow[(ctr) + kpc_fi…
|
| H A D | kpc_thread.c | 129 cpu = current_cpu_datap(); in kpc_update_thread_counters()
|
| H A D | hv_support_kext.c | 263 return current_cpu_datap()->cpu_pending_ast != 0; in hv_ast_pending()
|
| /xnu-8019.80.24/osfmk/arm/ |
| H A D | model_dep.c | 346 cpu_data_t *current_cpu_datap = cpu_datap(cpu); in panic_display_last_pc_lr() local 348 if (current_cpu_datap == NULL) { in panic_display_last_pc_lr() 352 if (current_cpu_datap == getCpuDatap()) { in panic_display_last_pc_lr() 362 current_cpu_datap->ipi_pc, (uint64_t)VM_KERNEL_STRIP_PTR(current_cpu_datap->ipi_lr), in panic_display_last_pc_lr() 363 (uint64_t)VM_KERNEL_STRIP_PTR(current_cpu_datap->ipi_fp)); in panic_display_last_pc_lr() 1097 current_cpu_datap()->ipi_pc = (uint64_t)get_saved_state_pc(regs); in DebuggerXCall() 1098 current_cpu_datap()->ipi_lr = (uint64_t)get_saved_state_lr(regs); in DebuggerXCall() 1099 current_cpu_datap()->ipi_fp = (uint64_t)get_saved_state_fp(regs); in DebuggerXCall()
|
| H A D | cpu_data.h | 124 #define current_cpu_datap() getCpuDatap() macro
|
| /xnu-8019.80.24/osfmk/kdp/ml/x86_64/ |
| H A D | kdp_machdep.c | 434 if (current_cpu_datap()->cpu_fatal_trap_state) { in kdp_i386_trap() 435 current_cpu_datap()->cpu_post_fatal_trap_state = saved_state; in kdp_i386_trap() 436 saved_state = current_cpu_datap()->cpu_fatal_trap_state; in kdp_i386_trap()
|
| H A D | kdp_vm.c | 111 x86_saved_state64_t *cpstate = current_cpu_datap()->cpu_fatal_trap_state; in kern_collectth_state()
|