Home
last modified time | relevance | path

Searched refs:os_atomic_load_wide (Results 1 – 15 of 15) sorted by relevance

/xnu-8020.140.41/osfmk/kern/
H A Dcounter_common.c67 uint64_t current_value = os_atomic_load_wide(zpercpu_get(*counter), relaxed); in scalable_counter_static_init()
151 return os_atomic_load_wide(counter, relaxed); in counter_load()
160 value += os_atomic_load_wide(it, relaxed); in counter_load()
H A Dsched_prim.c148 return os_atomic_load_wide(&SCHED(rt_runq)(pset)->earliest_deadline, relaxed); in rt_runq_earliest_deadline()
221 assert(os_atomic_load_wide(&rt_run_queue->earliest_deadline, relaxed) == earliest_deadline); in check_rt_runq_consistency()
4063 if (earliest && (deadline < os_atomic_load_wide(&rt_run_queue->earliest_deadline, relaxed))) { in rt_runq_enqueue()
6612 uint64_t load_compute_deadline = os_atomic_load_wide(&sched_load_compute_deadline, relaxed); in sched_timeshare_consider_maintenance()
H A Dsched_clutch.c1396 …scb_cpu_data.scbcd_cpu_data_packed = os_atomic_load_wide(&clutch_bucket_group->scbg_cpu_data.scbcd… in sched_clutch_interactivity_from_cpu_data()
1770 …uint64_t bucket_group_run_count = os_atomic_load_wide(&clutch_bucket_group->scbg_blocked_data.scct… in sched_clutch_bucket_group_timeshare_update()
/xnu-8020.140.41/osfmk/vm/
H A Danalytics.c100 e->over_global_limit = os_atomic_load_wide(&vm_add_wire_count_over_global_limit, relaxed); in report_mlock_failures()
101 e->over_user_limit = os_atomic_load_wide(&vm_add_wire_count_over_user_limit, relaxed); in report_mlock_failures()
/xnu-8020.140.41/libkern/os/
H A Datomic_private.h369 #define os_atomic_load_wide(p, m) ({ \ macro
791 #define os_atomic_load_wide(p, m) _os_atomic_error_is_starvable(os_atomic_load_wide) macro
H A Dlog_queue.c159 os_atomic_load_wide(_v, dependency); \
/xnu-8020.140.41/bsd/kern/
H A Dcounter_test.c235 uint64_t value = os_atomic_load_wide(&atomic_counter, relaxed);
H A Dkern_exec.c5757 local_experiment_factors = os_atomic_load_wide(&libmalloc_experiment_factors, relaxed); in exec_add_apple_strings()
7262 uint64_t value = os_atomic_load_wide(&libmalloc_experiment_factors, relaxed);
H A Dkern_sysctl.c3907 old_value = os_atomic_load_wide(ptr, relaxed); in sysctl_r_64bit_atomic()
H A Dkern_event.c8882 buf[nknotes] = os_atomic_load_wide(&kn->kn_udata, relaxed); in klist_copy_udata()
/xnu-8020.140.41/osfmk/arm/
H A Dmachine_routines_common.c369 return os_atomic_load_wide(&perfcontrol_callout_stats[type][stat], relaxed) / in perfcontrol_callout_stat_avg()
370 os_atomic_load_wide(&perfcontrol_callout_count[type], relaxed); in perfcontrol_callout_stat_avg()
912 uint64_t const old_duration = os_atomic_load_wide(&thread->machine.int_time_mt, relaxed); in __ml_handle_interrupts_disabled_duration()
/xnu-8020.140.41/bsd/dev/
H A Dmonotonic.c361 uint64_t value = os_atomic_load_wide(&mt_retrograde, relaxed);
/xnu-8020.140.41/osfmk/arm/commpage/
H A Dcommpage.c759 uint64_t saved_data = os_atomic_load_wide(approx_time_base, relaxed); in commpage_update_mach_approximate_time()
/xnu-8020.140.41/doc/
H A Datomics.md165 compile to a plain load or store. `os_atomic_load_wide` and
/xnu-8020.140.41/bsd/pthread/
H A Dpthread_workqueue.c322 return os_atomic_load_wide(&wq->wq_thactive, relaxed); in _wq_thactive()
1823 uint64_t lastblocked_ts = os_atomic_load_wide(lastblocked_tsp, relaxed); in workq_thread_is_busy()