Lines Matching full:pmc

145 static unsigned int amd_pmc_perf_hw_id(struct kvm_pmc *pmc)  in amd_pmc_perf_hw_id()  argument
148 u8 event_select = pmc->eventsel & ARCH_PERFMON_EVENTSEL_EVENT; in amd_pmc_perf_hw_id()
149 u8 unit_mask = (pmc->eventsel & ARCH_PERFMON_EVENTSEL_UMASK) >> 8; in amd_pmc_perf_hw_id()
152 if (guest_cpuid_family(pmc->vcpu) >= 0x17) in amd_pmc_perf_hw_id()
174 /* check if a PMC is enabled by comparing it against global_ctrl bits. Because
177 static bool amd_pmc_is_enabled(struct kvm_pmc *pmc) in amd_pmc_is_enabled() argument
225 /* All MSRs refer to exactly one PMC, so msr_idx_to_pmc is enough. */ in amd_is_valid_msr()
232 struct kvm_pmc *pmc; in amd_msr_idx_to_pmc() local
234 pmc = get_gp_pmc_amd(pmu, msr, PMU_TYPE_COUNTER); in amd_msr_idx_to_pmc()
235 pmc = pmc ? pmc : get_gp_pmc_amd(pmu, msr, PMU_TYPE_EVNTSEL); in amd_msr_idx_to_pmc()
237 return pmc; in amd_msr_idx_to_pmc()
243 struct kvm_pmc *pmc; in amd_pmu_get_msr() local
247 pmc = get_gp_pmc_amd(pmu, msr, PMU_TYPE_COUNTER); in amd_pmu_get_msr()
248 if (pmc) { in amd_pmu_get_msr()
249 msr_info->data = pmc_read_counter(pmc); in amd_pmu_get_msr()
253 pmc = get_gp_pmc_amd(pmu, msr, PMU_TYPE_EVNTSEL); in amd_pmu_get_msr()
254 if (pmc) { in amd_pmu_get_msr()
255 msr_info->data = pmc->eventsel; in amd_pmu_get_msr()
265 struct kvm_pmc *pmc; in amd_pmu_set_msr() local
270 pmc = get_gp_pmc_amd(pmu, msr, PMU_TYPE_COUNTER); in amd_pmu_set_msr()
271 if (pmc) { in amd_pmu_set_msr()
272 pmc->counter += data - pmc_read_counter(pmc); in amd_pmu_set_msr()
276 pmc = get_gp_pmc_amd(pmu, msr, PMU_TYPE_EVNTSEL); in amd_pmu_set_msr()
277 if (pmc) { in amd_pmu_set_msr()
279 if (data != pmc->eventsel) in amd_pmu_set_msr()
280 reprogram_gp_counter(pmc, data); in amd_pmu_set_msr()
328 struct kvm_pmc *pmc = &pmu->gp_counters[i]; in amd_pmu_reset() local
330 pmc_stop_counter(pmc); in amd_pmu_reset()
331 pmc->counter = pmc->eventsel = 0; in amd_pmu_reset()