Lines Matching refs:pmu_ops
218 config = kvm_x86_ops.pmu_ops->pmc_perf_hw_id(pmc); in reprogram_gp_counter()
270 kvm_x86_ops.pmu_ops->find_fixed_event(idx), in reprogram_fixed_counter()
279 struct kvm_pmc *pmc = kvm_x86_ops.pmu_ops->pmc_idx_to_pmc(pmu, pmc_idx); in reprogram_counter()
301 struct kvm_pmc *pmc = kvm_x86_ops.pmu_ops->pmc_idx_to_pmc(pmu, bit); in kvm_pmu_handle_event()
323 return kvm_x86_ops.pmu_ops->is_valid_rdpmc_ecx(vcpu, idx); in kvm_pmu_is_valid_rdpmc_ecx()
373 pmc = kvm_x86_ops.pmu_ops->rdpmc_ecx_to_pmc(vcpu, idx, &mask); in kvm_pmu_rdpmc()
394 return kvm_x86_ops.pmu_ops->msr_idx_to_pmc(vcpu, msr) || in kvm_pmu_is_valid_msr()
395 kvm_x86_ops.pmu_ops->is_valid_msr(vcpu, msr); in kvm_pmu_is_valid_msr()
401 struct kvm_pmc *pmc = kvm_x86_ops.pmu_ops->msr_idx_to_pmc(vcpu, msr); in kvm_pmu_mark_pmc_in_use()
409 return kvm_x86_ops.pmu_ops->get_msr(vcpu, msr_info); in kvm_pmu_get_msr()
415 return kvm_x86_ops.pmu_ops->set_msr(vcpu, msr_info); in kvm_pmu_set_msr()
424 kvm_x86_ops.pmu_ops->refresh(vcpu); in kvm_pmu_refresh()
432 kvm_x86_ops.pmu_ops->reset(vcpu); in kvm_pmu_reset()
440 kvm_x86_ops.pmu_ops->init(vcpu); in kvm_pmu_init()
472 pmc = kvm_x86_ops.pmu_ops->pmc_idx_to_pmc(pmu, i); in kvm_pmu_cleanup()