Searched refs:kpc_fixed_count (Results 1 – 5 of 5) sorted by relevance
84 if (!mt_core_supported || ctr >= kpc_fixed_count()) { in mt_core_snap()106 if (!mt_core_supported || ctr >= kpc_fixed_count()) { in mt_core_set_snap()153 for (uint32_t i = 0; i < kpc_fixed_count(); i++) { in mt_fixed_counter_set_ctrl_mask()192 int fixed_count = kpc_fixed_count(); in enable_counters()234 for (uint32_t i = 0; i < kpc_fixed_count(); i++) { in core_up()277 for (uint32_t i = 0; i < kpc_fixed_count(); i++) { in mt_check_for_pmi()336 for (uint32_t i = 0; i < kpc_fixed_count(); i++) { in mt_microstackshot_start_remote()
128 kpc_fixed_count(void) in kpc_fixed_count() function235 for (i = 0; i < kpc_fixed_count(); i++) { in set_running_fixed()249 for (i = 0; i < kpc_fixed_count(); i++) { in set_running_fixed()664 for (ctr = 0; ctr < kpc_fixed_count(); ctr++) { in kpc_pmi_handler()697 kpc_sample_kperf_x86(ctr + kpc_fixed_count(), actionid, in kpc_pmi_handler()
60 #define CONFIGURABLE_ACTIONID(ctr) (kpc_actionid[(ctr) + kpc_fixed_count()])65 …GURABLE_RELOAD(ctr) (current_cpu_datap()->cpu_kpc_reload[(ctr) + kpc_fixed_count()])66 …CONFIGURABLE_RELOAD_CPU(cpu, ctr) (cpu_datap(cpu)->cpu_kpc_reload[(ctr) + kpc_fixed_count()])71 …GURABLE_SHADOW(ctr) (current_cpu_datap()->cpu_kpc_shadow[(ctr) + kpc_fixed_count()])72 …CONFIGURABLE_SHADOW_CPU(cpu, ctr) (cpu_datap(cpu)->cpu_kpc_shadow[(ctr) + kpc_fixed_count()])296 uint32_t kpc_fixed_count(void);
254 assert(ctr < (kpc_fixed_count() + kpc_configurable_count())); in kpc_controls_counter()256 if (ctr < kpc_fixed_count()) { in kpc_controls_counter()266 pmc_mask = (1ULL << (ctr - kpc_fixed_count())); in kpc_controls_counter()416 count += kpc_fixed_count(); in kpc_get_counter_count()
96 for (int i = 0; i < (int) kpc_fixed_count(); i++) { in mt_mtc_update_fixed_counts()