Home
last modified time | relevance | path

Searched refs:os_atomic_load_wide (Results 1 – 17 of 17) sorted by relevance

/xnu-10063.141.1/libsyscall/mach/
H A Dvm_reclaim.c136 idx = os_atomic_load_wide(&indices->tail, relaxed); in mach_vm_reclaim_mark_free()
137 head = os_atomic_load_wide(&indices->head, relaxed); in mach_vm_reclaim_mark_free()
146 head = os_atomic_load_wide(&indices->head, relaxed); in mach_vm_reclaim_mark_free()
177 head = os_atomic_load_wide(&indices->head, relaxed); in mach_vm_reclaim_mark_used()
188 original_tail = os_atomic_load_wide(&indices->tail, relaxed); in mach_vm_reclaim_mark_used()
193 busy = os_atomic_load_wide(&indices->busy, relaxed); in mach_vm_reclaim_mark_used()
235 uint64_t busy = os_atomic_load_wide(&indices->busy, relaxed); in mach_vm_reclaim_is_available()
254 uint64_t head = os_atomic_load_wide(&indices->head, relaxed); in mach_vm_reclaim_is_reclaimed()
/xnu-10063.141.1/osfmk/kern/
H A Dcounter_common.c67 uint64_t current_value = os_atomic_load_wide(zpercpu_get(*counter), relaxed); in scalable_counter_static_init()
151 return os_atomic_load_wide(counter, relaxed); in counter_load()
160 value += os_atomic_load_wide(it, relaxed); in counter_load()
H A Drecount.c1187 __assert_only uint64_t state_time = os_atomic_load_wide( in recount_processor_idle()
1218 uint64_t state = os_atomic_load_wide(&pr->rpr_state_last_abs_time, relaxed); in recount_processor_run()
1243 uint64_t idle_stamp = os_atomic_load_wide(&pr->rpr_state_last_abs_time, in recount_processor_usage()
H A Dsched_prim.c153 return os_atomic_load_wide(&SCHED(rt_runq)(pset)->earliest_deadline, relaxed); in rt_runq_earliest_deadline()
226 assert(os_atomic_load_wide(&rt_run_queue->earliest_deadline, relaxed) == earliest_deadline); in check_rt_runq_consistency()
4297 if (earliest && (deadline < os_atomic_load_wide(&rt_run_queue->earliest_deadline, relaxed))) { in rt_runq_enqueue()
7012 uint64_t load_compute_deadline = os_atomic_load_wide(&sched_load_compute_deadline, relaxed); in sched_timeshare_consider_maintenance()
H A Dsched_clutch.c1423 …scb_cpu_data.scbcd_cpu_data_packed = os_atomic_load_wide(&clutch_bucket_group->scbg_cpu_data.scbcd… in sched_clutch_interactivity_from_cpu_data()
1797 …uint64_t bucket_group_run_count = os_atomic_load_wide(&clutch_bucket_group->scbg_blocked_data.scct… in sched_clutch_bucket_group_timeshare_update()
/xnu-10063.141.1/osfmk/vm/
H A Danalytics.c105 e->over_global_limit = os_atomic_load_wide(&vm_add_wire_count_over_global_limit, relaxed); in report_mlock_failures()
106 e->over_user_limit = os_atomic_load_wide(&vm_add_wire_count_over_user_limit, relaxed); in report_mlock_failures()
/xnu-10063.141.1/bsd/kern/
H A Dcounter_test.c235 uint64_t value = os_atomic_load_wide(&atomic_counter, relaxed);
H A Dkern_exec.c6407 local_experiment_factors = os_atomic_load_wide(&libmalloc_experiment_factors, relaxed); in exec_add_apple_strings()
8043 uint64_t value = os_atomic_load_wide(&libmalloc_experiment_factors, relaxed);
H A Dkern_sysctl.c4278 old_value = os_atomic_load_wide(ptr, relaxed);
H A Dkern_event.c9184 buf[nknotes] = os_atomic_load_wide(&kn->kn_udata, relaxed); in klist_copy_udata()
/xnu-10063.141.1/libkern/os/
H A Datomic_private.h336 #define os_atomic_load_wide(p, m) ({ \ macro
H A Dlog_queue.c159 os_atomic_load_wide(_v, dependency); \
/xnu-10063.141.1/osfmk/arm/
H A Dmachine_routines_common.c369 return os_atomic_load_wide(&perfcontrol_callout_stats[type][stat], relaxed) / in perfcontrol_callout_stat_avg()
370 os_atomic_load_wide(&perfcontrol_callout_count[type], relaxed); in perfcontrol_callout_stat_avg()
948 uint64_t const old_duration = os_atomic_load_wide(&thread->machine.int_time_mt, relaxed); in __ml_handle_interrupts_disabled_duration()
/xnu-10063.141.1/bsd/dev/
H A Dmonotonic.c361 uint64_t value = os_atomic_load_wide(&mt_retrograde, relaxed);
/xnu-10063.141.1/osfmk/arm/commpage/
H A Dcommpage.c869 uint64_t saved_data = os_atomic_load_wide(approx_time_base, relaxed); in commpage_update_mach_approximate_time()
/xnu-10063.141.1/doc/primitives/
H A Datomics.md167 compile to a plain load or store. `os_atomic_load_wide` and
/xnu-10063.141.1/bsd/pthread/
H A Dpthread_workqueue.c322 return os_atomic_load_wide(&wq->wq_thactive, relaxed); in _wq_thactive()
1843 uint64_t lastblocked_ts = os_atomic_load_wide(lastblocked_tsp, relaxed); in workq_thread_is_busy()