Lines Matching refs:gvt

83 	struct drm_i915_private *dev_priv = workload->vgpu->gvt->gt->i915;  in sr_oa_regs()
126 struct intel_gvt *gvt = vgpu->gvt; in populate_shadow_context() local
205 if (IS_BROADWELL(gvt->gt->i915) && workload->engine->id == RCS0) in populate_shadow_context()
272 struct intel_gvt *gvt = container_of(nb, struct intel_gvt, in shadow_context_status_change() local
274 struct intel_gvt_workload_scheduler *scheduler = &gvt->scheduler; in shadow_context_status_change()
504 struct intel_gvt *gvt = workload->vgpu->gvt; in prepare_shadow_batch_buffer() local
505 const int gmadr_bytes = gvt->device_info.gmadr_bytes_in_cmd; in prepare_shadow_batch_buffer()
799 pick_next_workload(struct intel_gvt *gvt, struct intel_engine_cs *engine) in pick_next_workload() argument
801 struct intel_gvt_workload_scheduler *scheduler = &gvt->scheduler; in pick_next_workload()
804 mutex_lock(&gvt->sched_lock); in pick_next_workload()
852 mutex_unlock(&gvt->sched_lock); in pick_next_workload()
1002 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in intel_vgpu_clean_workloads()
1018 static void complete_current_workload(struct intel_gvt *gvt, int ring_id) in complete_current_workload() argument
1020 struct intel_gvt_workload_scheduler *scheduler = &gvt->scheduler; in complete_current_workload()
1029 mutex_lock(&gvt->sched_lock); in complete_current_workload()
1095 if (gvt->scheduler.need_reschedule) in complete_current_workload()
1096 intel_gvt_request_service(gvt, INTEL_GVT_REQUEST_EVENT_SCHED); in complete_current_workload()
1098 mutex_unlock(&gvt->sched_lock); in complete_current_workload()
1106 struct intel_gvt *gvt = engine->i915->gvt; in workload_thread() local
1107 struct intel_gvt_workload_scheduler *scheduler = &gvt->scheduler; in workload_thread()
1120 workload = pick_next_workload(gvt, engine); in workload_thread()
1167 complete_current_workload(gvt, engine->id); in workload_thread()
1183 struct intel_gvt *gvt = vgpu->gvt; in intel_gvt_wait_vgpu_idle() local
1184 struct intel_gvt_workload_scheduler *scheduler = &gvt->scheduler; in intel_gvt_wait_vgpu_idle()
1194 void intel_gvt_clean_workload_scheduler(struct intel_gvt *gvt) in intel_gvt_clean_workload_scheduler() argument
1196 struct intel_gvt_workload_scheduler *scheduler = &gvt->scheduler; in intel_gvt_clean_workload_scheduler()
1202 for_each_engine(engine, gvt->gt, i) { in intel_gvt_clean_workload_scheduler()
1205 &gvt->shadow_ctx_notifier_block[i]); in intel_gvt_clean_workload_scheduler()
1210 int intel_gvt_init_workload_scheduler(struct intel_gvt *gvt) in intel_gvt_init_workload_scheduler() argument
1212 struct intel_gvt_workload_scheduler *scheduler = &gvt->scheduler; in intel_gvt_init_workload_scheduler()
1221 for_each_engine(engine, gvt->gt, i) { in intel_gvt_init_workload_scheduler()
1232 gvt->shadow_ctx_notifier_block[i].notifier_call = in intel_gvt_init_workload_scheduler()
1235 &gvt->shadow_ctx_notifier_block[i]); in intel_gvt_init_workload_scheduler()
1241 intel_gvt_clean_workload_scheduler(gvt); in intel_gvt_init_workload_scheduler()
1279 for_each_engine(engine, vgpu->gvt->gt, id) in intel_vgpu_clean_submission()
1336 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_vgpu_setup_submission()
1349 for_each_engine(engine, vgpu->gvt->gt, i) { in intel_vgpu_setup_submission()
1399 for_each_engine(engine, vgpu->gvt->gt, i) { in intel_vgpu_setup_submission()
1425 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_vgpu_select_submission_ops()
1739 intel_gvt_kick_schedule(workload->vgpu->gvt); in intel_vgpu_queue_workload()
1740 wake_up(&workload->vgpu->gvt->scheduler.waitq[workload->engine->id]); in intel_vgpu_queue_workload()