Lines Matching refs:vgpu

66 	size_t (*rw)(struct intel_vgpu *vgpu, char *buf,
68 void (*release)(struct intel_vgpu *vgpu,
93 struct intel_vgpu *vgpu; member
102 struct intel_vgpu *vgpu; member
112 struct intel_vgpu *vgpu; member
137 static inline struct kvmgt_vdev *kvmgt_vdev(struct intel_vgpu *vgpu) in kvmgt_vdev() argument
139 return intel_vgpu_vdev(vgpu); in kvmgt_vdev()
151 static void gvt_unpin_guest_page(struct intel_vgpu *vgpu, unsigned long gfn, in gvt_unpin_guest_page() argument
154 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in gvt_unpin_guest_page()
155 struct kvmgt_vdev *vdev = kvmgt_vdev(vgpu); in gvt_unpin_guest_page()
171 static int gvt_pin_guest_page(struct intel_vgpu *vgpu, unsigned long gfn, in gvt_pin_guest_page() argument
174 struct kvmgt_vdev *vdev = kvmgt_vdev(vgpu); in gvt_pin_guest_page()
217 gvt_unpin_guest_page(vgpu, gfn, npage * PAGE_SIZE); in gvt_pin_guest_page()
221 static int gvt_dma_map_page(struct intel_vgpu *vgpu, unsigned long gfn, in gvt_dma_map_page() argument
224 struct device *dev = &vgpu->gvt->gt->i915->drm.pdev->dev; in gvt_dma_map_page()
228 ret = gvt_pin_guest_page(vgpu, gfn, size, &page); in gvt_dma_map_page()
237 gvt_unpin_guest_page(vgpu, gfn, size); in gvt_dma_map_page()
244 static void gvt_dma_unmap_page(struct intel_vgpu *vgpu, unsigned long gfn, in gvt_dma_unmap_page() argument
247 struct device *dev = &vgpu->gvt->gt->i915->drm.pdev->dev; in gvt_dma_unmap_page()
250 gvt_unpin_guest_page(vgpu, gfn, size); in gvt_dma_unmap_page()
253 static struct gvt_dma *__gvt_cache_find_dma_addr(struct intel_vgpu *vgpu, in __gvt_cache_find_dma_addr() argument
256 struct rb_node *node = kvmgt_vdev(vgpu)->dma_addr_cache.rb_node; in __gvt_cache_find_dma_addr()
272 static struct gvt_dma *__gvt_cache_find_gfn(struct intel_vgpu *vgpu, gfn_t gfn) in __gvt_cache_find_gfn() argument
274 struct rb_node *node = kvmgt_vdev(vgpu)->gfn_cache.rb_node; in __gvt_cache_find_gfn()
290 static int __gvt_cache_add(struct intel_vgpu *vgpu, gfn_t gfn, in __gvt_cache_add() argument
295 struct kvmgt_vdev *vdev = kvmgt_vdev(vgpu); in __gvt_cache_add()
301 new->vgpu = vgpu; in __gvt_cache_add()
340 static void __gvt_cache_remove_entry(struct intel_vgpu *vgpu, in __gvt_cache_remove_entry() argument
343 struct kvmgt_vdev *vdev = kvmgt_vdev(vgpu); in __gvt_cache_remove_entry()
351 static void gvt_cache_destroy(struct intel_vgpu *vgpu) in gvt_cache_destroy() argument
355 struct kvmgt_vdev *vdev = kvmgt_vdev(vgpu); in gvt_cache_destroy()
365 gvt_dma_unmap_page(vgpu, dma->gfn, dma->dma_addr, dma->size); in gvt_cache_destroy()
366 __gvt_cache_remove_entry(vgpu, dma); in gvt_cache_destroy()
371 static void gvt_cache_init(struct intel_vgpu *vgpu) in gvt_cache_init() argument
373 struct kvmgt_vdev *vdev = kvmgt_vdev(vgpu); in gvt_cache_init()
449 static size_t intel_vgpu_reg_rw_opregion(struct intel_vgpu *vgpu, char *buf, in intel_vgpu_reg_rw_opregion() argument
452 struct kvmgt_vdev *vdev = kvmgt_vdev(vgpu); in intel_vgpu_reg_rw_opregion()
469 static void intel_vgpu_reg_release_opregion(struct intel_vgpu *vgpu, in intel_vgpu_reg_release_opregion() argument
479 static int handle_edid_regs(struct intel_vgpu *vgpu, in handle_edid_regs() argument
505 intel_gvt_ops->emulate_hotplug(vgpu, true); in handle_edid_regs()
507 intel_gvt_ops->emulate_hotplug(vgpu, false); in handle_edid_regs()
550 static size_t intel_vgpu_reg_rw_edid(struct intel_vgpu *vgpu, char *buf, in intel_vgpu_reg_rw_edid() argument
557 (struct vfio_edid_region *)kvmgt_vdev(vgpu)->region[i].data; in intel_vgpu_reg_rw_edid()
561 ret = handle_edid_regs(vgpu, region, buf, count, pos, iswrite); in intel_vgpu_reg_rw_edid()
573 static void intel_vgpu_reg_release_edid(struct intel_vgpu *vgpu, in intel_vgpu_reg_release_edid() argument
584 static int intel_vgpu_register_reg(struct intel_vgpu *vgpu, in intel_vgpu_register_reg() argument
589 struct kvmgt_vdev *vdev = kvmgt_vdev(vgpu); in intel_vgpu_register_reg()
611 struct intel_vgpu *vgpu = (struct intel_vgpu *)p_vgpu; in kvmgt_get_vfio_device() local
612 struct kvmgt_vdev *vdev = kvmgt_vdev(vgpu); in kvmgt_get_vfio_device()
626 struct intel_vgpu *vgpu = (struct intel_vgpu *)p_vgpu; in kvmgt_set_opregion() local
634 base = vgpu_opregion(vgpu)->va; in kvmgt_set_opregion()
643 ret = intel_vgpu_register_reg(vgpu, in kvmgt_set_opregion()
654 struct intel_vgpu *vgpu = (struct intel_vgpu *)p_vgpu; in kvmgt_set_edid() local
655 struct intel_vgpu_port *port = intel_vgpu_port(vgpu, port_num); in kvmgt_set_edid()
671 ret = intel_vgpu_register_reg(vgpu, in kvmgt_set_edid()
682 static void kvmgt_put_vfio_device(void *vgpu) in kvmgt_put_vfio_device() argument
684 struct kvmgt_vdev *vdev = kvmgt_vdev((struct intel_vgpu *)vgpu); in kvmgt_put_vfio_device()
694 struct intel_vgpu *vgpu = NULL; in intel_vgpu_create() local
711 vgpu = intel_gvt_ops->vgpu_create(gvt, type); in intel_vgpu_create()
712 if (IS_ERR_OR_NULL(vgpu)) { in intel_vgpu_create()
713 ret = vgpu == NULL ? -EFAULT : PTR_ERR(vgpu); in intel_vgpu_create()
718 INIT_WORK(&kvmgt_vdev(vgpu)->release_work, intel_vgpu_release_work); in intel_vgpu_create()
720 kvmgt_vdev(vgpu)->mdev = mdev; in intel_vgpu_create()
721 mdev_set_drvdata(mdev, vgpu); in intel_vgpu_create()
733 struct intel_vgpu *vgpu = mdev_get_drvdata(mdev); in intel_vgpu_remove() local
735 if (handle_valid(vgpu->handle)) in intel_vgpu_remove()
738 intel_gvt_ops->vgpu_destroy(vgpu); in intel_vgpu_remove()
748 struct intel_vgpu *vgpu = vdev->vgpu; in intel_vgpu_iommu_notifier() local
760 entry = __gvt_cache_find_gfn(vgpu, iov_pfn); in intel_vgpu_iommu_notifier()
764 gvt_dma_unmap_page(vgpu, entry->gfn, entry->dma_addr, in intel_vgpu_iommu_notifier()
766 __gvt_cache_remove_entry(vgpu, entry); in intel_vgpu_iommu_notifier()
794 struct intel_vgpu *vgpu = mdev_get_drvdata(mdev); in intel_vgpu_open() local
795 struct kvmgt_vdev *vdev = kvmgt_vdev(vgpu); in intel_vgpu_open()
841 intel_gvt_ops->vgpu_activate(vgpu); in intel_vgpu_open()
861 static void intel_vgpu_release_msi_eventfd_ctx(struct intel_vgpu *vgpu) in intel_vgpu_release_msi_eventfd_ctx() argument
863 struct kvmgt_vdev *vdev = kvmgt_vdev(vgpu); in intel_vgpu_release_msi_eventfd_ctx()
873 static void __intel_vgpu_release(struct intel_vgpu *vgpu) in __intel_vgpu_release() argument
875 struct kvmgt_vdev *vdev = kvmgt_vdev(vgpu); in __intel_vgpu_release()
876 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in __intel_vgpu_release()
880 if (!handle_valid(vgpu->handle)) in __intel_vgpu_release()
886 intel_gvt_ops->vgpu_release(vgpu); in __intel_vgpu_release()
901 info = (struct kvmgt_guest_info *)vgpu->handle; in __intel_vgpu_release()
904 intel_vgpu_release_msi_eventfd_ctx(vgpu); in __intel_vgpu_release()
908 vgpu->handle = 0; in __intel_vgpu_release()
913 struct intel_vgpu *vgpu = mdev_get_drvdata(mdev); in intel_vgpu_release() local
915 __intel_vgpu_release(vgpu); in intel_vgpu_release()
923 __intel_vgpu_release(vdev->vgpu); in intel_vgpu_release_work()
926 static u64 intel_vgpu_get_bar_addr(struct intel_vgpu *vgpu, int bar) in intel_vgpu_get_bar_addr() argument
931 start_lo = (*(u32 *)(vgpu->cfg_space.virtual_cfg_space + bar)) & in intel_vgpu_get_bar_addr()
933 mem_type = (*(u32 *)(vgpu->cfg_space.virtual_cfg_space + bar)) & in intel_vgpu_get_bar_addr()
938 start_hi = (*(u32 *)(vgpu->cfg_space.virtual_cfg_space in intel_vgpu_get_bar_addr()
953 static int intel_vgpu_bar_rw(struct intel_vgpu *vgpu, int bar, u64 off, in intel_vgpu_bar_rw() argument
956 u64 bar_start = intel_vgpu_get_bar_addr(vgpu, bar); in intel_vgpu_bar_rw()
960 ret = intel_gvt_ops->emulate_mmio_write(vgpu, in intel_vgpu_bar_rw()
963 ret = intel_gvt_ops->emulate_mmio_read(vgpu, in intel_vgpu_bar_rw()
968 static inline bool intel_vgpu_in_aperture(struct intel_vgpu *vgpu, u64 off) in intel_vgpu_in_aperture() argument
970 return off >= vgpu_aperture_offset(vgpu) && in intel_vgpu_in_aperture()
971 off < vgpu_aperture_offset(vgpu) + vgpu_aperture_sz(vgpu); in intel_vgpu_in_aperture()
974 static int intel_vgpu_aperture_rw(struct intel_vgpu *vgpu, u64 off, in intel_vgpu_aperture_rw() argument
979 if (!intel_vgpu_in_aperture(vgpu, off) || in intel_vgpu_aperture_rw()
980 !intel_vgpu_in_aperture(vgpu, off + count)) { in intel_vgpu_aperture_rw()
985 aperture_va = io_mapping_map_wc(&vgpu->gvt->gt->ggtt->iomap, in intel_vgpu_aperture_rw()
1004 struct intel_vgpu *vgpu = mdev_get_drvdata(mdev); in intel_vgpu_rw() local
1005 struct kvmgt_vdev *vdev = kvmgt_vdev(vgpu); in intel_vgpu_rw()
1019 ret = intel_gvt_ops->emulate_cfg_write(vgpu, pos, in intel_vgpu_rw()
1022 ret = intel_gvt_ops->emulate_cfg_read(vgpu, pos, in intel_vgpu_rw()
1026 ret = intel_vgpu_bar_rw(vgpu, PCI_BASE_ADDRESS_0, pos, in intel_vgpu_rw()
1030 ret = intel_vgpu_aperture_rw(vgpu, pos, buf, count, is_write); in intel_vgpu_rw()
1044 return vdev->region[index].ops->rw(vgpu, buf, count, in intel_vgpu_rw()
1053 struct intel_vgpu *vgpu = mdev_get_drvdata(mdev); in gtt_entry() local
1055 struct intel_gvt *gvt = vgpu->gvt; in gtt_entry()
1063 intel_vgpu_get_bar_gpa(vgpu, PCI_BASE_ADDRESS_0); in gtt_entry()
1222 struct intel_vgpu *vgpu = mdev_get_drvdata(mdev); in intel_vgpu_mmap() local
1242 if (!intel_vgpu_in_aperture(vgpu, req_start)) in intel_vgpu_mmap()
1245 vgpu_aperture_offset(vgpu) + vgpu_aperture_sz(vgpu)) in intel_vgpu_mmap()
1248 pgoff = (gvt_aperture_pa_base(vgpu->gvt) >> PAGE_SHIFT) + pgoff; in intel_vgpu_mmap()
1253 static int intel_vgpu_get_irq_count(struct intel_vgpu *vgpu, int type) in intel_vgpu_get_irq_count() argument
1261 static int intel_vgpu_set_intx_mask(struct intel_vgpu *vgpu, in intel_vgpu_set_intx_mask() argument
1269 static int intel_vgpu_set_intx_unmask(struct intel_vgpu *vgpu, in intel_vgpu_set_intx_unmask() argument
1276 static int intel_vgpu_set_intx_trigger(struct intel_vgpu *vgpu, in intel_vgpu_set_intx_trigger() argument
1283 static int intel_vgpu_set_msi_trigger(struct intel_vgpu *vgpu, in intel_vgpu_set_msi_trigger() argument
1297 kvmgt_vdev(vgpu)->msi_trigger = trigger; in intel_vgpu_set_msi_trigger()
1299 intel_vgpu_release_msi_eventfd_ctx(vgpu); in intel_vgpu_set_msi_trigger()
1304 static int intel_vgpu_set_irqs(struct intel_vgpu *vgpu, u32 flags, in intel_vgpu_set_irqs() argument
1308 int (*func)(struct intel_vgpu *vgpu, unsigned int index, in intel_vgpu_set_irqs()
1342 return func(vgpu, index, start, count, flags, data); in intel_vgpu_set_irqs()
1348 struct intel_vgpu *vgpu = mdev_get_drvdata(mdev); in intel_vgpu_ioctl() local
1349 struct kvmgt_vdev *vdev = kvmgt_vdev(vgpu); in intel_vgpu_ioctl()
1352 gvt_dbg_core("vgpu%d ioctl, cmd: %d\n", vgpu->id, cmd); in intel_vgpu_ioctl()
1394 info.size = vgpu->gvt->device_info.cfg_space_size; in intel_vgpu_ioctl()
1400 info.size = vgpu->cfg_space.bar[info.index].size; in intel_vgpu_ioctl()
1420 info.size = gvt_aperture_sz(vgpu->gvt); in intel_vgpu_ioctl()
1432 PAGE_ALIGN(vgpu_aperture_offset(vgpu)); in intel_vgpu_ioctl()
1433 sparse->areas[0].size = vgpu_aperture_sz(vgpu); in intel_vgpu_ioctl()
1546 info.count = intel_vgpu_get_irq_count(vgpu, info.index); in intel_vgpu_ioctl()
1568 int max = intel_vgpu_get_irq_count(vgpu, hdr.index); in intel_vgpu_ioctl()
1584 ret = intel_vgpu_set_irqs(vgpu, hdr.flags, hdr.index, in intel_vgpu_ioctl()
1590 intel_gvt_ops->vgpu_reset(vgpu); in intel_vgpu_ioctl()
1603 ret = intel_gvt_ops->vgpu_query_plane(vgpu, &dmabuf); in intel_vgpu_ioctl()
1616 dmabuf_fd = intel_gvt_ops->vgpu_get_dmabuf(vgpu, dmabuf_id); in intel_vgpu_ioctl()
1631 struct intel_vgpu *vgpu = (struct intel_vgpu *) in vgpu_id_show() local
1633 return sprintf(buf, "%d\n", vgpu->id); in vgpu_id_show()
1762 intel_gvt_ops->write_protect_handler(info->vgpu, gpa, in kvmgt_page_track_write()
1787 static bool __kvmgt_vgpu_exist(struct intel_vgpu *vgpu, struct kvm *kvm) in __kvmgt_vgpu_exist() argument
1794 mutex_lock(&vgpu->gvt->lock); in __kvmgt_vgpu_exist()
1795 for_each_active_vgpu(vgpu->gvt, itr, id) { in __kvmgt_vgpu_exist()
1806 mutex_unlock(&vgpu->gvt->lock); in __kvmgt_vgpu_exist()
1813 struct intel_vgpu *vgpu; in kvmgt_guest_init() local
1817 vgpu = mdev_get_drvdata(mdev); in kvmgt_guest_init()
1818 if (handle_valid(vgpu->handle)) in kvmgt_guest_init()
1821 vdev = kvmgt_vdev(vgpu); in kvmgt_guest_init()
1828 if (__kvmgt_vgpu_exist(vgpu, kvm)) in kvmgt_guest_init()
1835 vgpu->handle = (unsigned long)info; in kvmgt_guest_init()
1836 info->vgpu = vgpu; in kvmgt_guest_init()
1841 gvt_cache_init(vgpu); in kvmgt_guest_init()
1849 0444, vgpu->debugfs, in kvmgt_guest_init()
1861 gvt_cache_destroy(info->vgpu); in kvmgt_guest_exit()
1869 struct intel_vgpu *vgpu = (struct intel_vgpu *)p_vgpu; in kvmgt_attach_vgpu() local
1871 vgpu->vdev = kzalloc(sizeof(struct kvmgt_vdev), GFP_KERNEL); in kvmgt_attach_vgpu()
1873 if (!vgpu->vdev) in kvmgt_attach_vgpu()
1876 kvmgt_vdev(vgpu)->vgpu = vgpu; in kvmgt_attach_vgpu()
1884 struct intel_vgpu *vgpu = (struct intel_vgpu *)p_vgpu; in kvmgt_detach_vgpu() local
1885 struct kvmgt_vdev *vdev = kvmgt_vdev(vgpu); in kvmgt_detach_vgpu()
1892 vdev->region[i].ops->release(vgpu, in kvmgt_detach_vgpu()
1904 struct intel_vgpu *vgpu; in kvmgt_inject_msi() local
1911 vgpu = info->vgpu; in kvmgt_inject_msi()
1912 vdev = kvmgt_vdev(vgpu); in kvmgt_inject_msi()
1952 struct intel_vgpu *vgpu; in kvmgt_dma_map_guest_page() local
1960 vgpu = ((struct kvmgt_guest_info *)handle)->vgpu; in kvmgt_dma_map_guest_page()
1961 vdev = kvmgt_vdev(vgpu); in kvmgt_dma_map_guest_page()
1965 entry = __gvt_cache_find_gfn(vgpu, gfn); in kvmgt_dma_map_guest_page()
1967 ret = gvt_dma_map_page(vgpu, gfn, dma_addr, size); in kvmgt_dma_map_guest_page()
1971 ret = __gvt_cache_add(vgpu, gfn, *dma_addr, size); in kvmgt_dma_map_guest_page()
1976 gvt_dma_unmap_page(vgpu, gfn, entry->dma_addr, entry->size); in kvmgt_dma_map_guest_page()
1977 __gvt_cache_remove_entry(vgpu, entry); in kvmgt_dma_map_guest_page()
1979 ret = gvt_dma_map_page(vgpu, gfn, dma_addr, size); in kvmgt_dma_map_guest_page()
1983 ret = __gvt_cache_add(vgpu, gfn, *dma_addr, size); in kvmgt_dma_map_guest_page()
1995 gvt_dma_unmap_page(vgpu, gfn, *dma_addr, size); in kvmgt_dma_map_guest_page()
2012 vdev = kvmgt_vdev(info->vgpu); in kvmgt_dma_pin_guest_page()
2015 entry = __gvt_cache_find_dma_addr(info->vgpu, dma_addr); in kvmgt_dma_pin_guest_page()
2029 gvt_dma_unmap_page(entry->vgpu, entry->gfn, entry->dma_addr, in __gvt_dma_release()
2031 __gvt_cache_remove_entry(entry->vgpu, entry); in __gvt_dma_release()
2036 struct intel_vgpu *vgpu; in kvmgt_dma_unmap_guest_page() local
2043 vgpu = ((struct kvmgt_guest_info *)handle)->vgpu; in kvmgt_dma_unmap_guest_page()
2044 vdev = kvmgt_vdev(vgpu); in kvmgt_dma_unmap_guest_page()
2047 entry = __gvt_cache_find_dma_addr(vgpu, dma_addr); in kvmgt_dma_unmap_guest_page()
2063 return vfio_dma_rw(kvmgt_vdev(info->vgpu)->vfio_group, in kvmgt_rw_gpa()