Lines Matching refs:gvt

50 unsigned long intel_gvt_get_device_type(struct intel_gvt *gvt)  in intel_gvt_get_device_type()  argument
52 struct drm_i915_private *i915 = gvt->gt->i915; in intel_gvt_get_device_type()
68 bool intel_gvt_match_device(struct intel_gvt *gvt, in intel_gvt_match_device() argument
71 return intel_gvt_get_device_type(gvt) & device; in intel_gvt_match_device()
86 static struct intel_gvt_mmio_info *find_mmio_info(struct intel_gvt *gvt, in find_mmio_info() argument
91 hash_for_each_possible(gvt->mmio.mmio_info_table, e, node, offset) { in find_mmio_info()
98 static int new_mmio_info(struct intel_gvt *gvt, in new_mmio_info() argument
106 if (!intel_gvt_match_device(gvt, device)) in new_mmio_info()
121 p = find_mmio_info(gvt, info->offset); in new_mmio_info()
138 gvt->mmio.mmio_attribute[info->offset / 4] = flags; in new_mmio_info()
140 hash_add(gvt->mmio.mmio_info_table, &info->node, info->offset); in new_mmio_info()
141 gvt->mmio.num_tracked_mmio++; in new_mmio_info()
155 intel_gvt_render_mmio_to_engine(struct intel_gvt *gvt, unsigned int offset) in intel_gvt_render_mmio_to_engine() argument
161 for_each_engine(engine, gvt->gt, id) in intel_gvt_render_mmio_to_engine()
222 if (INTEL_GEN(vgpu->gvt->gt->i915) <= 10) { in gamw_echo_dev_rw_ia_write()
258 struct intel_gvt *gvt = vgpu->gvt; in fence_mmio_write() local
267 mmio_hw_access_pre(gvt->gt); in fence_mmio_write()
270 mmio_hw_access_post(gvt->gt); in fence_mmio_write()
288 if (INTEL_GEN(vgpu->gvt->gt->i915) >= 9) { in mul_force_wake_write()
350 engine_mask &= vgpu->gvt->gt->info.engine_mask; in gdrst_mmio_write()
460 intel_gvt_check_vblank_emulation(vgpu->gvt); in pipeconf_mmio_write()
524 intel_gvt_render_mmio_to_engine(vgpu->gvt, offset); in force_nonpriv_write()
760 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in pri_surf_mmio_write()
801 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in reg50080_mmio_write()
825 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in trigger_aux_channel_interrupt()
928 if ((INTEL_GEN(vgpu->gvt->gt->i915) >= 9) in dp_aux_ch_ctl_mmio_write()
932 } else if (IS_BROADWELL(vgpu->gvt->gt->i915) && in dp_aux_ch_ctl_mmio_write()
1248 struct kobject *kobj = &vgpu->gvt->gt->i915->drm.primary->kdev->kobj; in send_display_ready_uevent()
1309 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in pf_write()
1365 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in dma_ctrl_write()
1384 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in gen9_trtte_write()
1436 if (IS_SKYLAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1437 IS_KABYLAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1438 IS_COFFEELAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1439 IS_COMETLAKE(vgpu->gvt->gt->i915)) { in mailbox_write()
1449 } else if (IS_BROXTON(vgpu->gvt->gt->i915)) { in mailbox_write()
1462 if (IS_SKYLAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1463 IS_KABYLAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1464 IS_COFFEELAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1465 IS_COMETLAKE(vgpu->gvt->gt->i915)) in mailbox_write()
1490 intel_gvt_render_mmio_to_engine(vgpu->gvt, offset); in hws_pga_write()
1521 if (IS_BROXTON(vgpu->gvt->gt->i915)) in skl_power_well_ctl_write()
1695 struct intel_gvt *gvt = vgpu->gvt; in mmio_read_from_hw() local
1697 intel_gvt_render_mmio_to_engine(gvt, offset); in mmio_read_from_hw()
1707 vgpu == gvt->scheduler.engine_owner[engine->id] || in mmio_read_from_hw()
1710 mmio_hw_access_pre(gvt->gt); in mmio_read_from_hw()
1712 intel_uncore_read(gvt->gt->uncore, _MMIO(offset)); in mmio_read_from_hw()
1713 mmio_hw_access_post(gvt->gt); in mmio_read_from_hw()
1722 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in elsp_mmio_write()
1723 const struct intel_engine_cs *engine = intel_gvt_render_mmio_to_engine(vgpu->gvt, offset); in elsp_mmio_write()
1766 intel_gvt_render_mmio_to_engine(vgpu->gvt, offset); in ring_mode_mmio_write()
1771 if (IS_COFFEELAKE(vgpu->gvt->gt->i915) || in ring_mode_mmio_write()
1772 IS_COMETLAKE(vgpu->gvt->gt->i915)) in ring_mode_mmio_write()
1781 if ((IS_COFFEELAKE(vgpu->gvt->gt->i915) || in ring_mode_mmio_write()
1782 IS_COMETLAKE(vgpu->gvt->gt->i915)) && in ring_mode_mmio_write()
1886 ret = new_mmio_info(gvt, i915_mmio_reg_offset(reg), \
1915 if (HAS_ENGINE(gvt->gt, VCS1)) \
1934 static int init_generic_mmio_info(struct intel_gvt *gvt) in init_generic_mmio_info() argument
1936 struct drm_i915_private *dev_priv = gvt->gt->i915; in init_generic_mmio_info()
2752 static int init_bdw_mmio_info(struct intel_gvt *gvt) in init_bdw_mmio_info() argument
2754 struct drm_i915_private *dev_priv = gvt->gt->i915; in init_bdw_mmio_info()
2941 static int init_skl_mmio_info(struct intel_gvt *gvt) in init_skl_mmio_info() argument
2943 struct drm_i915_private *dev_priv = gvt->gt->i915; in init_skl_mmio_info()
3191 static int init_bxt_mmio_info(struct intel_gvt *gvt) in init_bxt_mmio_info() argument
3193 struct drm_i915_private *dev_priv = gvt->gt->i915; in init_bxt_mmio_info()
3378 static struct gvt_mmio_block *find_mmio_block(struct intel_gvt *gvt, in find_mmio_block() argument
3381 unsigned long device = intel_gvt_get_device_type(gvt); in find_mmio_block()
3382 struct gvt_mmio_block *block = gvt->mmio.mmio_block; in find_mmio_block()
3383 int num = gvt->mmio.num_mmio_block; in find_mmio_block()
3404 void intel_gvt_clean_mmio_info(struct intel_gvt *gvt) in intel_gvt_clean_mmio_info() argument
3410 hash_for_each_safe(gvt->mmio.mmio_info_table, i, tmp, e, node) in intel_gvt_clean_mmio_info()
3413 vfree(gvt->mmio.mmio_attribute); in intel_gvt_clean_mmio_info()
3414 gvt->mmio.mmio_attribute = NULL; in intel_gvt_clean_mmio_info()
3441 int intel_gvt_setup_mmio_info(struct intel_gvt *gvt) in intel_gvt_setup_mmio_info() argument
3443 struct intel_gvt_device_info *info = &gvt->device_info; in intel_gvt_setup_mmio_info()
3444 struct drm_i915_private *i915 = gvt->gt->i915; in intel_gvt_setup_mmio_info()
3445 int size = info->mmio_size / 4 * sizeof(*gvt->mmio.mmio_attribute); in intel_gvt_setup_mmio_info()
3448 gvt->mmio.mmio_attribute = vzalloc(size); in intel_gvt_setup_mmio_info()
3449 if (!gvt->mmio.mmio_attribute) in intel_gvt_setup_mmio_info()
3452 ret = init_generic_mmio_info(gvt); in intel_gvt_setup_mmio_info()
3457 ret = init_bdw_mmio_info(gvt); in intel_gvt_setup_mmio_info()
3464 ret = init_bdw_mmio_info(gvt); in intel_gvt_setup_mmio_info()
3467 ret = init_skl_mmio_info(gvt); in intel_gvt_setup_mmio_info()
3471 ret = init_bdw_mmio_info(gvt); in intel_gvt_setup_mmio_info()
3474 ret = init_skl_mmio_info(gvt); in intel_gvt_setup_mmio_info()
3477 ret = init_bxt_mmio_info(gvt); in intel_gvt_setup_mmio_info()
3482 gvt->mmio.mmio_block = mmio_blocks; in intel_gvt_setup_mmio_info()
3483 gvt->mmio.num_mmio_block = ARRAY_SIZE(mmio_blocks); in intel_gvt_setup_mmio_info()
3487 intel_gvt_clean_mmio_info(gvt); in intel_gvt_setup_mmio_info()
3500 int intel_gvt_for_each_tracked_mmio(struct intel_gvt *gvt, in intel_gvt_for_each_tracked_mmio() argument
3501 int (*handler)(struct intel_gvt *gvt, u32 offset, void *data), in intel_gvt_for_each_tracked_mmio() argument
3504 struct gvt_mmio_block *block = gvt->mmio.mmio_block; in intel_gvt_for_each_tracked_mmio()
3508 hash_for_each(gvt->mmio.mmio_info_table, i, e, node) { in intel_gvt_for_each_tracked_mmio()
3509 ret = handler(gvt, e->offset, data); in intel_gvt_for_each_tracked_mmio()
3514 for (i = 0; i < gvt->mmio.num_mmio_block; i++, block++) { in intel_gvt_for_each_tracked_mmio()
3520 ret = handler(gvt, in intel_gvt_for_each_tracked_mmio()
3599 bool intel_gvt_in_force_nonpriv_whitelist(struct intel_gvt *gvt, in intel_gvt_in_force_nonpriv_whitelist() argument
3619 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_vgpu_mmio_reg_rw()
3620 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_mmio_reg_rw() local
3632 mmio_block = find_mmio_block(gvt, offset); in intel_vgpu_mmio_reg_rw()
3643 mmio_info = find_mmio_info(gvt, offset); in intel_vgpu_mmio_reg_rw()
3656 if (intel_gvt_mmio_has_mode_mask(gvt, mmio_info->offset)) { in intel_vgpu_mmio_reg_rw()
3674 if (intel_gvt_mmio_has_mode_mask(gvt, mmio_info->offset)) { in intel_vgpu_mmio_reg_rw()