Searched refs:zpercpu_get (Results 1 – 9 of 9) sorted by relevance
68 (*zpercpu_get(*counter)) += amount; in counter_add()77 (*zpercpu_get(*counter))++; in counter_inc()86 (*zpercpu_get(*counter))--; in counter_dec()94 (*zpercpu_get(*counter)) += amount; in counter_add_preemption_disabled()101 (*zpercpu_get(*counter))++; in counter_inc_preemption_disabled()108 (*zpercpu_get(*counter))--; in counter_dec_preemption_disabled()
40 os_atomic_add(zpercpu_get(*counter), amount, relaxed); in counter_add()47 os_atomic_inc(zpercpu_get(*counter), relaxed); in counter_inc()54 os_atomic_dec(zpercpu_get(*counter), relaxed); in counter_dec()
67 uint64_t current_value = os_atomic_load_wide(zpercpu_get(*counter), relaxed); in scalable_counter_static_init()73 os_atomic_store_wide(zpercpu_get(*counter), current_value, relaxed); in scalable_counter_static_init()
123 return zpercpu_get(smr_pcpu(smr))->c_rd_seq != SMR_SEQ_INVALID; in smr_entered_nopreempt()168 __smr_enter(smr, zpercpu_get(smr_pcpu(smr))); in smr_enter()175 smr_pcpu_t pcpu = zpercpu_get(smr_pcpu(smr)); in smr_leave()
404 zpercpu_get(zstats)->zs_mem_allocated += rounded_size; in gzalloc_alloc()541 zpercpu_get(zstats)->zs_mem_freed += rounded_size; in gzalloc_free()
1060 #define zpercpu_get(ptr) \ macro
2101 zone_stats_t zs = zpercpu_get(zone->z_stats); in zone_meta_find_and_clear_bit()5847 zone_cache_t cache = zpercpu_get(zone->z_pcpu_cache); in zfree_cached()5963 zpercpu_get(zstats)->zs_mem_freed += elem_size; in zfree_ext()6299 zpercpu_get(zstats)->zs_mem_allocated += esize; in zalloc_item_fast()6439 zpercpu_get(zstats)->zs_mem_allocated += esize; in zalloc_cached_fast()6668 cache = zpercpu_get(zone->z_pcpu_cache); in zalloc_cached()7060 zpercpu_get(zone->z_stats)->zs_mem_allocated += size; in _zalloc_permanent()
1502 counter = zpercpu_get(btlog->btl_sample); in btlog_sample()
73 return zpercpu_get(intr_samplev); in kperf_intr_sample_buffer()