Lines Matching refs:gpte
100 static gfn_t gpte_to_gfn_lvl(pt_element_t gpte, int lvl) in gpte_to_gfn_lvl() argument
102 return (gpte & PT_LVL_ADDR_MASK(lvl)) >> PAGE_SHIFT; in gpte_to_gfn_lvl()
106 unsigned gpte) in FNAME()
118 mask |= (gpte >> (PT_GUEST_DIRTY_SHIFT - PT_WRITABLE_SHIFT)) & in FNAME()
132 static bool FNAME(is_bad_mt_xwr)(struct rsvd_bits_validate *rsvd_check, u64 gpte) in FNAME()
137 return __is_bad_mt_xwr(rsvd_check, gpte); in FNAME()
141 static bool FNAME(is_rsvd_bits_set)(struct kvm_mmu *mmu, u64 gpte, int level) in FNAME()
143 return __is_rsvd_bits_set(&mmu->guest_rsvd_check, gpte, level) || in FNAME()
144 FNAME(is_bad_mt_xwr)(&mmu->guest_rsvd_check, gpte); in FNAME()
188 u64 gpte) in FNAME()
190 if (!FNAME(is_present_gpte)(gpte)) in FNAME()
195 !(gpte & PT_GUEST_ACCESSED_MASK)) in FNAME()
198 if (FNAME(is_rsvd_bits_set)(vcpu->arch.mmu, gpte, PG_LEVEL_4K)) in FNAME()
214 static inline unsigned FNAME(gpte_access)(u64 gpte) in FNAME()
218 access = ((gpte & VMX_EPT_WRITABLE_MASK) ? ACC_WRITE_MASK : 0) | in FNAME()
219 ((gpte & VMX_EPT_EXECUTABLE_MASK) ? ACC_EXEC_MASK : 0) | in FNAME()
220 ((gpte & VMX_EPT_READABLE_MASK) ? ACC_USER_MASK : 0); in FNAME()
224 access = gpte & (PT_WRITABLE_MASK | PT_USER_MASK | PT_PRESENT_MASK); in FNAME()
226 access ^= (gpte >> PT64_NX_SHIFT); in FNAME()
294 static inline unsigned FNAME(gpte_pkeys)(struct kvm_vcpu *vcpu, u64 gpte) in FNAME()
298 pte_t pte = {.pte = gpte}; in FNAME()
529 u64 *spte, pt_element_t gpte, bool no_dirty_log) in FNAME()
535 if (FNAME(prefetch_invalid_gpte)(vcpu, sp, spte, gpte)) in FNAME()
538 pgprintk("%s: gpte %llx spte %p\n", __func__, (u64)gpte, spte); in FNAME()
540 gfn = gpte_to_gfn(gpte); in FNAME()
541 pte_access = sp->role.access & FNAME(gpte_access)(gpte); in FNAME()
542 FNAME(protect_clean_gpte)(vcpu->arch.mmu, &pte_access, gpte); in FNAME()
562 pt_element_t gpte = *(const pt_element_t *)pte; in FNAME() local
564 FNAME(prefetch_gpte)(vcpu, sp, spte, gpte, false); in FNAME()
930 pt_element_t gpte; in FNAME() local
947 if (kvm_vcpu_read_guest_atomic(vcpu, pte_gpa, &gpte, in FNAME()
951 FNAME(update_pte)(vcpu, sp, sptep, &gpte); in FNAME()
1033 pt_element_t gpte; in FNAME() local
1042 if (kvm_vcpu_read_guest_atomic(vcpu, pte_gpa, &gpte, in FNAME()
1046 if (FNAME(prefetch_invalid_gpte)(vcpu, sp, &sp->spt[i], gpte)) { in FNAME()
1057 gfn = gpte_to_gfn(gpte); in FNAME()
1059 pte_access &= FNAME(gpte_access)(gpte); in FNAME()
1060 FNAME(protect_clean_gpte)(vcpu->arch.mmu, &pte_access, gpte); in FNAME()