Lines Matching refs:svm
189 void recalc_intercepts(struct vcpu_svm *svm);
217 static inline struct vmcb *get_host_vmcb(struct vcpu_svm *svm) in get_host_vmcb() argument
219 if (is_guest_mode(&svm->vcpu)) in get_host_vmcb()
220 return svm->nested.hsave; in get_host_vmcb()
222 return svm->vmcb; in get_host_vmcb()
243 static inline void set_dr_intercepts(struct vcpu_svm *svm) in set_dr_intercepts() argument
245 struct vmcb *vmcb = get_host_vmcb(svm); in set_dr_intercepts()
264 recalc_intercepts(svm); in set_dr_intercepts()
267 static inline void clr_dr_intercepts(struct vcpu_svm *svm) in clr_dr_intercepts() argument
269 struct vmcb *vmcb = get_host_vmcb(svm); in clr_dr_intercepts()
273 recalc_intercepts(svm); in clr_dr_intercepts()
276 static inline void set_exception_intercept(struct vcpu_svm *svm, u32 bit) in set_exception_intercept() argument
278 struct vmcb *vmcb = get_host_vmcb(svm); in set_exception_intercept()
283 recalc_intercepts(svm); in set_exception_intercept()
286 static inline void clr_exception_intercept(struct vcpu_svm *svm, u32 bit) in clr_exception_intercept() argument
288 struct vmcb *vmcb = get_host_vmcb(svm); in clr_exception_intercept()
293 recalc_intercepts(svm); in clr_exception_intercept()
296 static inline void svm_set_intercept(struct vcpu_svm *svm, int bit) in svm_set_intercept() argument
298 struct vmcb *vmcb = get_host_vmcb(svm); in svm_set_intercept()
302 recalc_intercepts(svm); in svm_set_intercept()
305 static inline void svm_clr_intercept(struct vcpu_svm *svm, int bit) in svm_clr_intercept() argument
307 struct vmcb *vmcb = get_host_vmcb(svm); in svm_clr_intercept()
311 recalc_intercepts(svm); in svm_clr_intercept()
314 static inline bool svm_is_intercept(struct vcpu_svm *svm, int bit) in svm_is_intercept() argument
316 return vmcb_is_intercept(&svm->vmcb->control, bit); in svm_is_intercept()
319 static inline bool vgif_enabled(struct vcpu_svm *svm) in vgif_enabled() argument
321 return !!(svm->vmcb->control.int_ctl & V_GIF_ENABLE_MASK); in vgif_enabled()
324 static inline void enable_gif(struct vcpu_svm *svm) in enable_gif() argument
326 if (vgif_enabled(svm)) in enable_gif()
327 svm->vmcb->control.int_ctl |= V_GIF_MASK; in enable_gif()
329 svm->vcpu.arch.hflags |= HF_GIF_MASK; in enable_gif()
332 static inline void disable_gif(struct vcpu_svm *svm) in disable_gif() argument
334 if (vgif_enabled(svm)) in disable_gif()
335 svm->vmcb->control.int_ctl &= ~V_GIF_MASK; in disable_gif()
337 svm->vcpu.arch.hflags &= ~HF_GIF_MASK; in disable_gif()
340 static inline bool gif_set(struct vcpu_svm *svm) in gif_set() argument
342 if (vgif_enabled(svm)) in gif_set()
343 return !!(svm->vmcb->control.int_ctl & V_GIF_MASK); in gif_set()
345 return !!(svm->vcpu.arch.hflags & HF_GIF_MASK); in gif_set()
360 void disable_nmi_singlestep(struct vcpu_svm *svm);
364 void svm_set_gif(struct vcpu_svm *svm, bool value);
374 struct vcpu_svm *svm = to_svm(vcpu); in nested_svm_virtualize_tpr() local
376 return is_guest_mode(vcpu) && (svm->nested.ctl.int_ctl & V_INTR_MASKING_MASK); in nested_svm_virtualize_tpr()
379 static inline bool nested_exit_on_smi(struct vcpu_svm *svm) in nested_exit_on_smi() argument
381 return vmcb_is_intercept(&svm->nested.ctl, INTERCEPT_SMI); in nested_exit_on_smi()
384 static inline bool nested_exit_on_intr(struct vcpu_svm *svm) in nested_exit_on_intr() argument
386 return vmcb_is_intercept(&svm->nested.ctl, INTERCEPT_INTR); in nested_exit_on_intr()
389 static inline bool nested_exit_on_nmi(struct vcpu_svm *svm) in nested_exit_on_nmi() argument
391 return vmcb_is_intercept(&svm->nested.ctl, INTERCEPT_NMI); in nested_exit_on_nmi()
394 int enter_svm_guest_mode(struct vcpu_svm *svm, u64 vmcb_gpa,
397 void svm_free_nested(struct vcpu_svm *svm);
398 int svm_allocate_nested(struct vcpu_svm *svm);
399 int nested_svm_vmrun(struct vcpu_svm *svm);
401 int nested_svm_vmexit(struct vcpu_svm *svm);
402 int nested_svm_exit_handled(struct vcpu_svm *svm);
403 int nested_svm_check_permissions(struct vcpu_svm *svm);
404 int nested_svm_check_exception(struct vcpu_svm *svm, unsigned nr,
406 int nested_svm_exit_special(struct vcpu_svm *svm);
407 void sync_nested_vmcb_control(struct vcpu_svm *svm);
426 static inline void avic_update_vapic_bar(struct vcpu_svm *svm, u64 data) in avic_update_vapic_bar() argument
428 svm->vmcb->control.avic_vapic_bar = data & VMCB_AVIC_APIC_BAR_MASK; in avic_update_vapic_bar()
429 vmcb_mark_dirty(svm->vmcb, VMCB_AVIC); in avic_update_vapic_bar()
434 struct vcpu_svm *svm = to_svm(vcpu); in avic_vcpu_is_running() local
435 u64 *entry = svm->avic_physical_id_cache; in avic_vcpu_is_running()
446 void avic_init_vmcb(struct vcpu_svm *svm);
448 int avic_incomplete_ipi_interception(struct vcpu_svm *svm);
449 int avic_unaccelerated_access_interception(struct vcpu_svm *svm);
450 int avic_init_vcpu(struct vcpu_svm *svm);
496 void pre_sev_run(struct vcpu_svm *svm, int cpu);