Lines Matching refs:vgpu
74 static void read_vreg(struct intel_vgpu *vgpu, unsigned int offset, in read_vreg() argument
77 memcpy(p_data, &vgpu_vreg(vgpu, offset), bytes); in read_vreg()
80 static void write_vreg(struct intel_vgpu *vgpu, unsigned int offset, in write_vreg() argument
83 memcpy(&vgpu_vreg(vgpu, offset), p_data, bytes); in write_vreg()
175 void enter_failsafe_mode(struct intel_vgpu *vgpu, int reason) in enter_failsafe_mode() argument
190 pr_err("Now vgpu %d will enter failsafe mode.\n", vgpu->id); in enter_failsafe_mode()
191 vgpu->failsafe = true; in enter_failsafe_mode()
194 static int sanitize_fence_mmio_access(struct intel_vgpu *vgpu, in sanitize_fence_mmio_access() argument
197 unsigned int max_fence = vgpu_fence_sz(vgpu); in sanitize_fence_mmio_access()
207 if (!vgpu->pv_notified) in sanitize_fence_mmio_access()
208 enter_failsafe_mode(vgpu, in sanitize_fence_mmio_access()
217 static int gamw_echo_dev_rw_ia_write(struct intel_vgpu *vgpu, in gamw_echo_dev_rw_ia_write() argument
222 if (INTEL_GEN(vgpu->gvt->gt->i915) <= 10) { in gamw_echo_dev_rw_ia_write()
224 gvt_dbg_core("vgpu%d: ips enabled\n", vgpu->id); in gamw_echo_dev_rw_ia_write()
226 gvt_dbg_core("vgpu%d: ips disabled\n", vgpu->id); in gamw_echo_dev_rw_ia_write()
238 write_vreg(vgpu, offset, p_data, bytes); in gamw_echo_dev_rw_ia_write()
242 static int fence_mmio_read(struct intel_vgpu *vgpu, unsigned int off, in fence_mmio_read() argument
247 ret = sanitize_fence_mmio_access(vgpu, offset_to_fence_num(off), in fence_mmio_read()
251 read_vreg(vgpu, off, p_data, bytes); in fence_mmio_read()
255 static int fence_mmio_write(struct intel_vgpu *vgpu, unsigned int off, in fence_mmio_write() argument
258 struct intel_gvt *gvt = vgpu->gvt; in fence_mmio_write()
262 ret = sanitize_fence_mmio_access(vgpu, fence_num, p_data, bytes); in fence_mmio_write()
265 write_vreg(vgpu, off, p_data, bytes); in fence_mmio_write()
268 intel_vgpu_write_fence(vgpu, fence_num, in fence_mmio_write()
269 vgpu_vreg64(vgpu, fence_num_to_offset(fence_num))); in fence_mmio_write()
279 static int mul_force_wake_write(struct intel_vgpu *vgpu, in mul_force_wake_write() argument
285 old = vgpu_vreg(vgpu, offset); in mul_force_wake_write()
288 if (INTEL_GEN(vgpu->gvt->gt->i915) >= 9) { in mul_force_wake_write()
308 vgpu_vreg(vgpu, offset) = new; in mul_force_wake_write()
309 vgpu_vreg(vgpu, ack_reg_offset) = (new & GENMASK(15, 0)); in mul_force_wake_write()
313 static int gdrst_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in gdrst_mmio_write() argument
319 write_vreg(vgpu, offset, p_data, bytes); in gdrst_mmio_write()
320 data = vgpu_vreg(vgpu, offset); in gdrst_mmio_write()
323 gvt_dbg_mmio("vgpu%d: request full GPU reset\n", vgpu->id); in gdrst_mmio_write()
327 gvt_dbg_mmio("vgpu%d: request RCS reset\n", vgpu->id); in gdrst_mmio_write()
331 gvt_dbg_mmio("vgpu%d: request VCS reset\n", vgpu->id); in gdrst_mmio_write()
335 gvt_dbg_mmio("vgpu%d: request BCS Reset\n", vgpu->id); in gdrst_mmio_write()
339 gvt_dbg_mmio("vgpu%d: request VECS Reset\n", vgpu->id); in gdrst_mmio_write()
343 gvt_dbg_mmio("vgpu%d: request VCS2 Reset\n", vgpu->id); in gdrst_mmio_write()
347 gvt_dbg_mmio("vgpu%d: request GUC Reset\n", vgpu->id); in gdrst_mmio_write()
348 vgpu_vreg_t(vgpu, GUC_STATUS) |= GS_MIA_IN_RESET; in gdrst_mmio_write()
350 engine_mask &= vgpu->gvt->gt->info.engine_mask; in gdrst_mmio_write()
354 intel_gvt_reset_vgpu_locked(vgpu, false, engine_mask); in gdrst_mmio_write()
357 vgpu_vreg(vgpu, offset) = 0; in gdrst_mmio_write()
362 static int gmbus_mmio_read(struct intel_vgpu *vgpu, unsigned int offset, in gmbus_mmio_read() argument
365 return intel_gvt_i2c_handle_gmbus_read(vgpu, offset, p_data, bytes); in gmbus_mmio_read()
368 static int gmbus_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in gmbus_mmio_write() argument
371 return intel_gvt_i2c_handle_gmbus_write(vgpu, offset, p_data, bytes); in gmbus_mmio_write()
374 static int pch_pp_control_mmio_write(struct intel_vgpu *vgpu, in pch_pp_control_mmio_write() argument
377 write_vreg(vgpu, offset, p_data, bytes); in pch_pp_control_mmio_write()
379 if (vgpu_vreg(vgpu, offset) & PANEL_POWER_ON) { in pch_pp_control_mmio_write()
380 vgpu_vreg_t(vgpu, PCH_PP_STATUS) |= PP_ON; in pch_pp_control_mmio_write()
381 vgpu_vreg_t(vgpu, PCH_PP_STATUS) |= PP_SEQUENCE_STATE_ON_IDLE; in pch_pp_control_mmio_write()
382 vgpu_vreg_t(vgpu, PCH_PP_STATUS) &= ~PP_SEQUENCE_POWER_DOWN; in pch_pp_control_mmio_write()
383 vgpu_vreg_t(vgpu, PCH_PP_STATUS) &= ~PP_CYCLE_DELAY_ACTIVE; in pch_pp_control_mmio_write()
386 vgpu_vreg_t(vgpu, PCH_PP_STATUS) &= in pch_pp_control_mmio_write()
392 static int transconf_mmio_write(struct intel_vgpu *vgpu, in transconf_mmio_write() argument
395 write_vreg(vgpu, offset, p_data, bytes); in transconf_mmio_write()
397 if (vgpu_vreg(vgpu, offset) & TRANS_ENABLE) in transconf_mmio_write()
398 vgpu_vreg(vgpu, offset) |= TRANS_STATE_ENABLE; in transconf_mmio_write()
400 vgpu_vreg(vgpu, offset) &= ~TRANS_STATE_ENABLE; in transconf_mmio_write()
404 static int lcpll_ctl_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in lcpll_ctl_mmio_write() argument
407 write_vreg(vgpu, offset, p_data, bytes); in lcpll_ctl_mmio_write()
409 if (vgpu_vreg(vgpu, offset) & LCPLL_PLL_DISABLE) in lcpll_ctl_mmio_write()
410 vgpu_vreg(vgpu, offset) &= ~LCPLL_PLL_LOCK; in lcpll_ctl_mmio_write()
412 vgpu_vreg(vgpu, offset) |= LCPLL_PLL_LOCK; in lcpll_ctl_mmio_write()
414 if (vgpu_vreg(vgpu, offset) & LCPLL_CD_SOURCE_FCLK) in lcpll_ctl_mmio_write()
415 vgpu_vreg(vgpu, offset) |= LCPLL_CD_SOURCE_FCLK_DONE; in lcpll_ctl_mmio_write()
417 vgpu_vreg(vgpu, offset) &= ~LCPLL_CD_SOURCE_FCLK_DONE; in lcpll_ctl_mmio_write()
422 static int dpy_reg_mmio_read(struct intel_vgpu *vgpu, unsigned int offset, in dpy_reg_mmio_read() argument
430 vgpu_vreg(vgpu, offset) = 1 << 17; in dpy_reg_mmio_read()
433 vgpu_vreg(vgpu, offset) = 0x3; in dpy_reg_mmio_read()
436 vgpu_vreg(vgpu, offset) = 0x2f << 16; in dpy_reg_mmio_read()
442 read_vreg(vgpu, offset, p_data, bytes); in dpy_reg_mmio_read()
446 static int pipeconf_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in pipeconf_mmio_write() argument
451 write_vreg(vgpu, offset, p_data, bytes); in pipeconf_mmio_write()
452 data = vgpu_vreg(vgpu, offset); in pipeconf_mmio_write()
455 vgpu_vreg(vgpu, offset) |= I965_PIPECONF_ACTIVE; in pipeconf_mmio_write()
457 vgpu_vreg(vgpu, offset) &= ~I965_PIPECONF_ACTIVE; in pipeconf_mmio_write()
459 mutex_unlock(&vgpu->vgpu_lock); in pipeconf_mmio_write()
460 intel_gvt_check_vblank_emulation(vgpu->gvt); in pipeconf_mmio_write()
461 mutex_lock(&vgpu->vgpu_lock); in pipeconf_mmio_write()
519 static int force_nonpriv_write(struct intel_vgpu *vgpu, in force_nonpriv_write() argument
524 intel_gvt_render_mmio_to_engine(vgpu->gvt, offset); in force_nonpriv_write()
528 vgpu->id, offset, bytes); in force_nonpriv_write()
535 vgpu->id, reg_nonpriv, offset); in force_nonpriv_write()
537 intel_vgpu_default_mmio_write(vgpu, offset, p_data, bytes); in force_nonpriv_write()
542 static int ddi_buf_ctl_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in ddi_buf_ctl_mmio_write() argument
545 write_vreg(vgpu, offset, p_data, bytes); in ddi_buf_ctl_mmio_write()
547 if (vgpu_vreg(vgpu, offset) & DDI_BUF_CTL_ENABLE) { in ddi_buf_ctl_mmio_write()
548 vgpu_vreg(vgpu, offset) &= ~DDI_BUF_IS_IDLE; in ddi_buf_ctl_mmio_write()
550 vgpu_vreg(vgpu, offset) |= DDI_BUF_IS_IDLE; in ddi_buf_ctl_mmio_write()
552 vgpu_vreg_t(vgpu, DP_TP_STATUS(PORT_E)) in ddi_buf_ctl_mmio_write()
558 static int fdi_rx_iir_mmio_write(struct intel_vgpu *vgpu, in fdi_rx_iir_mmio_write() argument
561 vgpu_vreg(vgpu, offset) &= ~*(u32 *)p_data; in fdi_rx_iir_mmio_write()
568 static int fdi_auto_training_started(struct intel_vgpu *vgpu) in fdi_auto_training_started() argument
570 u32 ddi_buf_ctl = vgpu_vreg_t(vgpu, DDI_BUF_CTL(PORT_E)); in fdi_auto_training_started()
571 u32 rx_ctl = vgpu_vreg(vgpu, _FDI_RXA_CTL); in fdi_auto_training_started()
572 u32 tx_ctl = vgpu_vreg_t(vgpu, DP_TP_CTL(PORT_E)); in fdi_auto_training_started()
584 static int check_fdi_rx_train_status(struct intel_vgpu *vgpu, in check_fdi_rx_train_status() argument
613 if (vgpu_vreg_t(vgpu, fdi_rx_imr) & fdi_iir_check_bits) in check_fdi_rx_train_status()
616 if (((vgpu_vreg_t(vgpu, fdi_tx_ctl) & fdi_tx_check_bits) in check_fdi_rx_train_status()
618 && ((vgpu_vreg_t(vgpu, fdi_rx_ctl) & fdi_rx_check_bits) in check_fdi_rx_train_status()
649 static int update_fdi_rx_iir_status(struct intel_vgpu *vgpu, in update_fdi_rx_iir_status() argument
667 write_vreg(vgpu, offset, p_data, bytes); in update_fdi_rx_iir_status()
671 ret = check_fdi_rx_train_status(vgpu, index, FDI_LINK_TRAIN_PATTERN1); in update_fdi_rx_iir_status()
675 vgpu_vreg_t(vgpu, fdi_rx_iir) |= FDI_RX_BIT_LOCK; in update_fdi_rx_iir_status()
677 ret = check_fdi_rx_train_status(vgpu, index, FDI_LINK_TRAIN_PATTERN2); in update_fdi_rx_iir_status()
681 vgpu_vreg_t(vgpu, fdi_rx_iir) |= FDI_RX_SYMBOL_LOCK; in update_fdi_rx_iir_status()
684 if (fdi_auto_training_started(vgpu)) in update_fdi_rx_iir_status()
685 vgpu_vreg_t(vgpu, DP_TP_STATUS(PORT_E)) |= in update_fdi_rx_iir_status()
693 static int dp_tp_ctl_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in dp_tp_ctl_mmio_write() argument
700 write_vreg(vgpu, offset, p_data, bytes); in dp_tp_ctl_mmio_write()
703 data = (vgpu_vreg(vgpu, offset) & GENMASK(10, 8)) >> 8; in dp_tp_ctl_mmio_write()
706 vgpu_vreg_t(vgpu, status_reg) |= (1 << 25); in dp_tp_ctl_mmio_write()
711 static int dp_tp_status_mmio_write(struct intel_vgpu *vgpu, in dp_tp_status_mmio_write() argument
720 vgpu_vreg(vgpu, offset) = (reg_val & ~sticky_mask) | in dp_tp_status_mmio_write()
721 (vgpu_vreg(vgpu, offset) & sticky_mask); in dp_tp_status_mmio_write()
722 vgpu_vreg(vgpu, offset) &= ~(reg_val & sticky_mask); in dp_tp_status_mmio_write()
726 static int pch_adpa_mmio_write(struct intel_vgpu *vgpu, in pch_adpa_mmio_write() argument
731 write_vreg(vgpu, offset, p_data, bytes); in pch_adpa_mmio_write()
732 data = vgpu_vreg(vgpu, offset); in pch_adpa_mmio_write()
735 vgpu_vreg(vgpu, offset) &= ~ADPA_CRT_HOTPLUG_FORCE_TRIGGER; in pch_adpa_mmio_write()
739 static int south_chicken2_mmio_write(struct intel_vgpu *vgpu, in south_chicken2_mmio_write() argument
744 write_vreg(vgpu, offset, p_data, bytes); in south_chicken2_mmio_write()
745 data = vgpu_vreg(vgpu, offset); in south_chicken2_mmio_write()
748 vgpu_vreg(vgpu, offset) |= FDI_MPHY_IOSFSB_RESET_STATUS; in south_chicken2_mmio_write()
750 vgpu_vreg(vgpu, offset) &= ~FDI_MPHY_IOSFSB_RESET_STATUS; in south_chicken2_mmio_write()
757 static int pri_surf_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in pri_surf_mmio_write() argument
760 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in pri_surf_mmio_write()
764 write_vreg(vgpu, offset, p_data, bytes); in pri_surf_mmio_write()
765 vgpu_vreg_t(vgpu, DSPSURFLIVE(pipe)) = vgpu_vreg(vgpu, offset); in pri_surf_mmio_write()
767 vgpu_vreg_t(vgpu, PIPE_FLIPCOUNT_G4X(pipe))++; in pri_surf_mmio_write()
769 if (vgpu_vreg_t(vgpu, DSPCNTR(pipe)) & PLANE_CTL_ASYNC_FLIP) in pri_surf_mmio_write()
770 intel_vgpu_trigger_virtual_event(vgpu, event); in pri_surf_mmio_write()
772 set_bit(event, vgpu->irq.flip_done_event[pipe]); in pri_surf_mmio_write()
780 static int spr_surf_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in spr_surf_mmio_write() argument
786 write_vreg(vgpu, offset, p_data, bytes); in spr_surf_mmio_write()
787 vgpu_vreg_t(vgpu, SPRSURFLIVE(pipe)) = vgpu_vreg(vgpu, offset); in spr_surf_mmio_write()
789 if (vgpu_vreg_t(vgpu, SPRCTL(pipe)) & PLANE_CTL_ASYNC_FLIP) in spr_surf_mmio_write()
790 intel_vgpu_trigger_virtual_event(vgpu, event); in spr_surf_mmio_write()
792 set_bit(event, vgpu->irq.flip_done_event[pipe]); in spr_surf_mmio_write()
797 static int reg50080_mmio_write(struct intel_vgpu *vgpu, in reg50080_mmio_write() argument
801 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in reg50080_mmio_write()
806 write_vreg(vgpu, offset, p_data, bytes); in reg50080_mmio_write()
808 vgpu_vreg_t(vgpu, DSPSURFLIVE(pipe)) = vgpu_vreg(vgpu, offset); in reg50080_mmio_write()
809 vgpu_vreg_t(vgpu, PIPE_FLIPCOUNT_G4X(pipe))++; in reg50080_mmio_write()
811 vgpu_vreg_t(vgpu, SPRSURFLIVE(pipe)) = vgpu_vreg(vgpu, offset); in reg50080_mmio_write()
814 if ((vgpu_vreg(vgpu, offset) & REG50080_FLIP_TYPE_MASK) == REG50080_FLIP_TYPE_ASYNC) in reg50080_mmio_write()
815 intel_vgpu_trigger_virtual_event(vgpu, event); in reg50080_mmio_write()
817 set_bit(event, vgpu->irq.flip_done_event[pipe]); in reg50080_mmio_write()
822 static int trigger_aux_channel_interrupt(struct intel_vgpu *vgpu, in trigger_aux_channel_interrupt() argument
825 struct drm_i915_private *dev_priv = vgpu->gvt->gt->i915; in trigger_aux_channel_interrupt()
844 intel_vgpu_trigger_virtual_event(vgpu, event); in trigger_aux_channel_interrupt()
848 static int dp_aux_ch_ctl_trans_done(struct intel_vgpu *vgpu, u32 value, in dp_aux_ch_ctl_trans_done() argument
864 vgpu_vreg(vgpu, reg) = value; in dp_aux_ch_ctl_trans_done()
867 return trigger_aux_channel_interrupt(vgpu, reg); in dp_aux_ch_ctl_trans_done()
910 static int dp_aux_ch_ctl_mmio_write(struct intel_vgpu *vgpu, in dp_aux_ch_ctl_mmio_write() argument
913 struct intel_vgpu_display *display = &vgpu->display; in dp_aux_ch_ctl_mmio_write()
925 write_vreg(vgpu, offset, p_data, bytes); in dp_aux_ch_ctl_mmio_write()
926 data = vgpu_vreg(vgpu, offset); in dp_aux_ch_ctl_mmio_write()
928 if ((INTEL_GEN(vgpu->gvt->gt->i915) >= 9) in dp_aux_ch_ctl_mmio_write()
932 } else if (IS_BROADWELL(vgpu->gvt->gt->i915) && in dp_aux_ch_ctl_mmio_write()
940 vgpu_vreg(vgpu, offset) = 0; in dp_aux_ch_ctl_mmio_write()
948 msg = vgpu_vreg(vgpu, offset + 4); in dp_aux_ch_ctl_mmio_write()
968 vgpu_vreg(vgpu, offset + 4) = AUX_NATIVE_REPLY_NAK; in dp_aux_ch_ctl_mmio_write()
969 dp_aux_ch_ctl_trans_done(vgpu, data, offset, 2, true); in dp_aux_ch_ctl_mmio_write()
985 u32 r = vgpu_vreg(vgpu, offset + 8 + t * 4); in dp_aux_ch_ctl_mmio_write()
1007 vgpu_vreg(vgpu, offset + 4) = 0; in dp_aux_ch_ctl_mmio_write()
1008 dp_aux_ch_ctl_trans_done(vgpu, data, offset, 1, in dp_aux_ch_ctl_mmio_write()
1026 vgpu_vreg(vgpu, offset + 4) = 0; in dp_aux_ch_ctl_mmio_write()
1027 vgpu_vreg(vgpu, offset + 8) = 0; in dp_aux_ch_ctl_mmio_write()
1028 vgpu_vreg(vgpu, offset + 12) = 0; in dp_aux_ch_ctl_mmio_write()
1029 vgpu_vreg(vgpu, offset + 16) = 0; in dp_aux_ch_ctl_mmio_write()
1030 vgpu_vreg(vgpu, offset + 20) = 0; in dp_aux_ch_ctl_mmio_write()
1032 dp_aux_ch_ctl_trans_done(vgpu, data, offset, len + 2, in dp_aux_ch_ctl_mmio_write()
1039 vgpu_vreg(vgpu, offset + 4 * idx) = 0; in dp_aux_ch_ctl_mmio_write()
1061 vgpu_vreg(vgpu, offset + in dp_aux_ch_ctl_mmio_write()
1067 dp_aux_ch_ctl_trans_done(vgpu, data, offset, len + 2, in dp_aux_ch_ctl_mmio_write()
1073 intel_gvt_i2c_handle_aux_ch_write(vgpu, port_index, offset, p_data); in dp_aux_ch_ctl_mmio_write()
1076 trigger_aux_channel_interrupt(vgpu, offset); in dp_aux_ch_ctl_mmio_write()
1080 static int mbctl_write(struct intel_vgpu *vgpu, unsigned int offset, in mbctl_write() argument
1084 write_vreg(vgpu, offset, p_data, bytes); in mbctl_write()
1088 static int vga_control_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in vga_control_mmio_write() argument
1093 write_vreg(vgpu, offset, p_data, bytes); in vga_control_mmio_write()
1094 vga_disable = vgpu_vreg(vgpu, offset) & VGA_DISP_DISABLE; in vga_control_mmio_write()
1096 gvt_dbg_core("vgpu%d: %s VGA mode\n", vgpu->id, in vga_control_mmio_write()
1101 static u32 read_virtual_sbi_register(struct intel_vgpu *vgpu, in read_virtual_sbi_register() argument
1104 struct intel_vgpu_display *display = &vgpu->display; in read_virtual_sbi_register()
1118 static void write_virtual_sbi_register(struct intel_vgpu *vgpu, in write_virtual_sbi_register() argument
1121 struct intel_vgpu_display *display = &vgpu->display; in write_virtual_sbi_register()
1142 static int sbi_data_mmio_read(struct intel_vgpu *vgpu, unsigned int offset, in sbi_data_mmio_read() argument
1145 if (((vgpu_vreg_t(vgpu, SBI_CTL_STAT) & SBI_OPCODE_MASK) >> in sbi_data_mmio_read()
1147 unsigned int sbi_offset = (vgpu_vreg_t(vgpu, SBI_ADDR) & in sbi_data_mmio_read()
1149 vgpu_vreg(vgpu, offset) = read_virtual_sbi_register(vgpu, in sbi_data_mmio_read()
1152 read_vreg(vgpu, offset, p_data, bytes); in sbi_data_mmio_read()
1156 static int sbi_ctl_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in sbi_ctl_mmio_write() argument
1161 write_vreg(vgpu, offset, p_data, bytes); in sbi_ctl_mmio_write()
1162 data = vgpu_vreg(vgpu, offset); in sbi_ctl_mmio_write()
1170 vgpu_vreg(vgpu, offset) = data; in sbi_ctl_mmio_write()
1172 if (((vgpu_vreg_t(vgpu, SBI_CTL_STAT) & SBI_OPCODE_MASK) >> in sbi_ctl_mmio_write()
1174 unsigned int sbi_offset = (vgpu_vreg_t(vgpu, SBI_ADDR) & in sbi_ctl_mmio_write()
1177 write_virtual_sbi_register(vgpu, sbi_offset, in sbi_ctl_mmio_write()
1178 vgpu_vreg_t(vgpu, SBI_DATA)); in sbi_ctl_mmio_write()
1186 static int pvinfo_mmio_read(struct intel_vgpu *vgpu, unsigned int offset, in pvinfo_mmio_read() argument
1191 read_vreg(vgpu, offset, p_data, bytes); in pvinfo_mmio_read()
1214 vgpu->pv_notified = true; in pvinfo_mmio_read()
1218 static int handle_g2v_notification(struct intel_vgpu *vgpu, int notification) in handle_g2v_notification() argument
1224 pdps = (u64 *)&vgpu_vreg64_t(vgpu, vgtif_reg(pdp[0])); in handle_g2v_notification()
1231 mm = intel_vgpu_get_ppgtt_mm(vgpu, root_entry_type, pdps); in handle_g2v_notification()
1235 return intel_vgpu_put_ppgtt_mm(vgpu, pdps); in handle_g2v_notification()
1246 static int send_display_ready_uevent(struct intel_vgpu *vgpu, int ready) in send_display_ready_uevent() argument
1248 struct kobject *kobj = &vgpu->gvt->gt->i915->drm.primary->kdev->kobj; in send_display_ready_uevent()
1256 snprintf(vmid_str, 20, "VMID=%d", vgpu->id); in send_display_ready_uevent()
1262 static int pvinfo_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in pvinfo_mmio_write() argument
1270 send_display_ready_uevent(vgpu, data ? 1 : 0); in pvinfo_mmio_write()
1273 handle_g2v_notification(vgpu, data); in pvinfo_mmio_write()
1291 enter_failsafe_mode(vgpu, GVT_FAILSAFE_INSUFFICIENT_RESOURCE); in pvinfo_mmio_write()
1301 write_vreg(vgpu, offset, p_data, bytes); in pvinfo_mmio_write()
1306 static int pf_write(struct intel_vgpu *vgpu, in pf_write() argument
1309 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in pf_write()
1317 vgpu->id); in pf_write()
1321 return intel_vgpu_default_mmio_write(vgpu, offset, p_data, bytes); in pf_write()
1324 static int power_well_ctl_mmio_write(struct intel_vgpu *vgpu, in power_well_ctl_mmio_write() argument
1327 write_vreg(vgpu, offset, p_data, bytes); in power_well_ctl_mmio_write()
1329 if (vgpu_vreg(vgpu, offset) & in power_well_ctl_mmio_write()
1331 vgpu_vreg(vgpu, offset) |= in power_well_ctl_mmio_write()
1334 vgpu_vreg(vgpu, offset) &= in power_well_ctl_mmio_write()
1339 static int gen9_dbuf_ctl_mmio_write(struct intel_vgpu *vgpu, in gen9_dbuf_ctl_mmio_write() argument
1342 write_vreg(vgpu, offset, p_data, bytes); in gen9_dbuf_ctl_mmio_write()
1344 if (vgpu_vreg(vgpu, offset) & DBUF_POWER_REQUEST) in gen9_dbuf_ctl_mmio_write()
1345 vgpu_vreg(vgpu, offset) |= DBUF_POWER_STATE; in gen9_dbuf_ctl_mmio_write()
1347 vgpu_vreg(vgpu, offset) &= ~DBUF_POWER_STATE; in gen9_dbuf_ctl_mmio_write()
1352 static int fpga_dbg_mmio_write(struct intel_vgpu *vgpu, in fpga_dbg_mmio_write() argument
1355 write_vreg(vgpu, offset, p_data, bytes); in fpga_dbg_mmio_write()
1357 if (vgpu_vreg(vgpu, offset) & FPGA_DBG_RM_NOCLAIM) in fpga_dbg_mmio_write()
1358 vgpu_vreg(vgpu, offset) &= ~FPGA_DBG_RM_NOCLAIM; in fpga_dbg_mmio_write()
1362 static int dma_ctrl_write(struct intel_vgpu *vgpu, unsigned int offset, in dma_ctrl_write() argument
1365 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in dma_ctrl_write()
1368 write_vreg(vgpu, offset, p_data, bytes); in dma_ctrl_write()
1369 mode = vgpu_vreg(vgpu, offset); in dma_ctrl_write()
1374 vgpu->id); in dma_ctrl_write()
1381 static int gen9_trtte_write(struct intel_vgpu *vgpu, unsigned int offset, in gen9_trtte_write() argument
1384 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in gen9_trtte_write()
1390 vgpu->id); in gen9_trtte_write()
1393 write_vreg(vgpu, offset, p_data, bytes); in gen9_trtte_write()
1398 static int gen9_trtt_chicken_write(struct intel_vgpu *vgpu, unsigned int offset, in gen9_trtt_chicken_write() argument
1401 write_vreg(vgpu, offset, p_data, bytes); in gen9_trtt_chicken_write()
1405 static int dpll_status_read(struct intel_vgpu *vgpu, unsigned int offset, in dpll_status_read() argument
1410 if (vgpu_vreg(vgpu, 0x46010) & (1 << 31)) in dpll_status_read()
1413 if (vgpu_vreg(vgpu, 0x46014) & (1 << 31)) in dpll_status_read()
1416 if (vgpu_vreg(vgpu, 0x46040) & (1 << 31)) in dpll_status_read()
1419 if (vgpu_vreg(vgpu, 0x46060) & (1 << 31)) in dpll_status_read()
1422 vgpu_vreg(vgpu, offset) = v; in dpll_status_read()
1424 return intel_vgpu_default_mmio_read(vgpu, offset, p_data, bytes); in dpll_status_read()
1427 static int mailbox_write(struct intel_vgpu *vgpu, unsigned int offset, in mailbox_write() argument
1432 u32 *data0 = &vgpu_vreg_t(vgpu, GEN6_PCODE_DATA); in mailbox_write()
1436 if (IS_SKYLAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1437 IS_KABYLAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1438 IS_COFFEELAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1439 IS_COMETLAKE(vgpu->gvt->gt->i915)) { in mailbox_write()
1449 } else if (IS_BROXTON(vgpu->gvt->gt->i915)) { in mailbox_write()
1462 if (IS_SKYLAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1463 IS_KABYLAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1464 IS_COFFEELAKE(vgpu->gvt->gt->i915) || in mailbox_write()
1465 IS_COMETLAKE(vgpu->gvt->gt->i915)) in mailbox_write()
1474 vgpu->id, value, *data0); in mailbox_write()
1482 return intel_vgpu_default_mmio_write(vgpu, offset, &value, bytes); in mailbox_write()
1485 static int hws_pga_write(struct intel_vgpu *vgpu, unsigned int offset, in hws_pga_write() argument
1490 intel_gvt_render_mmio_to_engine(vgpu->gvt, offset); in hws_pga_write()
1493 !intel_gvt_ggtt_validate_range(vgpu, value, I915_GTT_PAGE_SIZE)) { in hws_pga_write()
1509 vgpu->hws_pga[engine->id] = value; in hws_pga_write()
1511 vgpu->id, value, offset); in hws_pga_write()
1513 return intel_vgpu_default_mmio_write(vgpu, offset, &value, bytes); in hws_pga_write()
1516 static int skl_power_well_ctl_write(struct intel_vgpu *vgpu, in skl_power_well_ctl_write() argument
1521 if (IS_BROXTON(vgpu->gvt->gt->i915)) in skl_power_well_ctl_write()
1528 return intel_vgpu_default_mmio_write(vgpu, offset, &v, bytes); in skl_power_well_ctl_write()
1531 static int skl_lcpll_write(struct intel_vgpu *vgpu, unsigned int offset, in skl_lcpll_write() argument
1540 vgpu_vreg(vgpu, offset) = v; in skl_lcpll_write()
1545 static int bxt_de_pll_enable_write(struct intel_vgpu *vgpu, in bxt_de_pll_enable_write() argument
1553 vgpu_vreg(vgpu, offset) = v; in bxt_de_pll_enable_write()
1558 static int bxt_port_pll_enable_write(struct intel_vgpu *vgpu, in bxt_port_pll_enable_write() argument
1566 vgpu_vreg(vgpu, offset) = v; in bxt_port_pll_enable_write()
1571 static int bxt_phy_ctl_family_write(struct intel_vgpu *vgpu, in bxt_phy_ctl_family_write() argument
1579 vgpu_vreg(vgpu, _BXT_PHY_CTL_DDI_A) = data; in bxt_phy_ctl_family_write()
1582 vgpu_vreg(vgpu, _BXT_PHY_CTL_DDI_B) = data; in bxt_phy_ctl_family_write()
1583 vgpu_vreg(vgpu, _BXT_PHY_CTL_DDI_C) = data; in bxt_phy_ctl_family_write()
1587 vgpu_vreg(vgpu, offset) = v; in bxt_phy_ctl_family_write()
1592 static int bxt_port_tx_dw3_read(struct intel_vgpu *vgpu, in bxt_port_tx_dw3_read() argument
1595 u32 v = vgpu_vreg(vgpu, offset); in bxt_port_tx_dw3_read()
1599 vgpu_vreg(vgpu, offset) = v; in bxt_port_tx_dw3_read()
1601 return intel_vgpu_default_mmio_read(vgpu, offset, p_data, bytes); in bxt_port_tx_dw3_read()
1604 static int bxt_pcs_dw12_grp_write(struct intel_vgpu *vgpu, in bxt_pcs_dw12_grp_write() argument
1610 vgpu_vreg(vgpu, offset - 0x600) = v; in bxt_pcs_dw12_grp_write()
1611 vgpu_vreg(vgpu, offset - 0x800) = v; in bxt_pcs_dw12_grp_write()
1613 vgpu_vreg(vgpu, offset - 0x400) = v; in bxt_pcs_dw12_grp_write()
1614 vgpu_vreg(vgpu, offset - 0x600) = v; in bxt_pcs_dw12_grp_write()
1617 vgpu_vreg(vgpu, offset) = v; in bxt_pcs_dw12_grp_write()
1622 static int bxt_gt_disp_pwron_write(struct intel_vgpu *vgpu, in bxt_gt_disp_pwron_write() argument
1628 vgpu_vreg_t(vgpu, BXT_PORT_CL1CM_DW0(DPIO_PHY0)) &= in bxt_gt_disp_pwron_write()
1630 vgpu_vreg_t(vgpu, BXT_PORT_CL1CM_DW0(DPIO_PHY0)) |= in bxt_gt_disp_pwron_write()
1635 vgpu_vreg_t(vgpu, BXT_PORT_CL1CM_DW0(DPIO_PHY1)) &= in bxt_gt_disp_pwron_write()
1637 vgpu_vreg_t(vgpu, BXT_PORT_CL1CM_DW0(DPIO_PHY1)) |= in bxt_gt_disp_pwron_write()
1642 vgpu_vreg(vgpu, offset) = v; in bxt_gt_disp_pwron_write()
1647 static int edp_psr_imr_iir_write(struct intel_vgpu *vgpu, in edp_psr_imr_iir_write() argument
1650 vgpu_vreg(vgpu, offset) = 0; in edp_psr_imr_iir_write()
1664 static int bxt_ppat_low_write(struct intel_vgpu *vgpu, unsigned int offset, in bxt_ppat_low_write() argument
1677 vgpu_vreg(vgpu, offset) = lower_32_bits(pat); in bxt_ppat_low_write()
1682 static int guc_status_read(struct intel_vgpu *vgpu, in guc_status_read() argument
1687 read_vreg(vgpu, offset, p_data, bytes); in guc_status_read()
1688 vgpu_vreg(vgpu, offset) &= ~GS_MIA_IN_RESET; in guc_status_read()
1692 static int mmio_read_from_hw(struct intel_vgpu *vgpu, in mmio_read_from_hw() argument
1695 struct intel_gvt *gvt = vgpu->gvt; in mmio_read_from_hw()
1707 vgpu == gvt->scheduler.engine_owner[engine->id] || in mmio_read_from_hw()
1711 vgpu_vreg(vgpu, offset) = in mmio_read_from_hw()
1716 return intel_vgpu_default_mmio_read(vgpu, offset, p_data, bytes); in mmio_read_from_hw()
1719 static int elsp_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in elsp_mmio_write() argument
1722 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in elsp_mmio_write()
1723 const struct intel_engine_cs *engine = intel_gvt_render_mmio_to_engine(vgpu->gvt, offset); in elsp_mmio_write()
1743 if (vgpu->d3_entered) in elsp_mmio_write()
1744 vgpu->d3_entered = false; in elsp_mmio_write()
1746 execlist = &vgpu->submission.execlist[engine->id]; in elsp_mmio_write()
1750 ret = intel_vgpu_submit_execlist(vgpu, engine); in elsp_mmio_write()
1761 static int ring_mode_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in ring_mode_mmio_write() argument
1766 intel_gvt_render_mmio_to_engine(vgpu->gvt, offset); in ring_mode_mmio_write()
1771 if (IS_COFFEELAKE(vgpu->gvt->gt->i915) || in ring_mode_mmio_write()
1772 IS_COMETLAKE(vgpu->gvt->gt->i915)) in ring_mode_mmio_write()
1774 write_vreg(vgpu, offset, p_data, bytes); in ring_mode_mmio_write()
1777 enter_failsafe_mode(vgpu, GVT_FAILSAFE_UNSUPPORTED_GUEST); in ring_mode_mmio_write()
1781 if ((IS_COFFEELAKE(vgpu->gvt->gt->i915) || in ring_mode_mmio_write()
1782 IS_COMETLAKE(vgpu->gvt->gt->i915)) && in ring_mode_mmio_write()
1784 enter_failsafe_mode(vgpu, GVT_FAILSAFE_UNSUPPORTED_GUEST); in ring_mode_mmio_write()
1794 !vgpu->pv_notified) { in ring_mode_mmio_write()
1795 enter_failsafe_mode(vgpu, GVT_FAILSAFE_UNSUPPORTED_GUEST); in ring_mode_mmio_write()
1809 ret = intel_vgpu_select_submission_ops(vgpu, in ring_mode_mmio_write()
1815 intel_vgpu_start_schedule(vgpu); in ring_mode_mmio_write()
1820 static int gvt_reg_tlb_control_handler(struct intel_vgpu *vgpu, in gvt_reg_tlb_control_handler() argument
1825 write_vreg(vgpu, offset, p_data, bytes); in gvt_reg_tlb_control_handler()
1826 vgpu_vreg(vgpu, offset) = 0; in gvt_reg_tlb_control_handler()
1847 set_bit(id, (void *)vgpu->submission.tlb_handle_pending); in gvt_reg_tlb_control_handler()
1852 static int ring_reset_ctl_write(struct intel_vgpu *vgpu, in ring_reset_ctl_write() argument
1857 write_vreg(vgpu, offset, p_data, bytes); in ring_reset_ctl_write()
1858 data = vgpu_vreg(vgpu, offset); in ring_reset_ctl_write()
1865 vgpu_vreg(vgpu, offset) = data; in ring_reset_ctl_write()
1869 static int csfe_chicken1_mmio_write(struct intel_vgpu *vgpu, in csfe_chicken1_mmio_write() argument
1876 write_vreg(vgpu, offset, p_data, bytes); in csfe_chicken1_mmio_write()
1880 enter_failsafe_mode(vgpu, GVT_FAILSAFE_UNSUPPORTED_GUEST); in csfe_chicken1_mmio_write()
3540 int intel_vgpu_default_mmio_read(struct intel_vgpu *vgpu, unsigned int offset, in intel_vgpu_default_mmio_read() argument
3543 read_vreg(vgpu, offset, p_data, bytes); in intel_vgpu_default_mmio_read()
3557 int intel_vgpu_default_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in intel_vgpu_default_mmio_write() argument
3560 write_vreg(vgpu, offset, p_data, bytes); in intel_vgpu_default_mmio_write()
3574 int intel_vgpu_mask_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, in intel_vgpu_mask_mmio_write() argument
3579 old_vreg = vgpu_vreg(vgpu, offset); in intel_vgpu_mask_mmio_write()
3580 write_vreg(vgpu, offset, p_data, bytes); in intel_vgpu_mask_mmio_write()
3581 mask = vgpu_vreg(vgpu, offset) >> 16; in intel_vgpu_mask_mmio_write()
3582 vgpu_vreg(vgpu, offset) = (old_vreg & ~mask) | in intel_vgpu_mask_mmio_write()
3583 (vgpu_vreg(vgpu, offset) & mask); in intel_vgpu_mask_mmio_write()
3616 int intel_vgpu_mmio_reg_rw(struct intel_vgpu *vgpu, unsigned int offset, in intel_vgpu_mmio_reg_rw() argument
3619 struct drm_i915_private *i915 = vgpu->gvt->gt->i915; in intel_vgpu_mmio_reg_rw()
3620 struct intel_gvt *gvt = vgpu->gvt; in intel_vgpu_mmio_reg_rw()
3636 return func(vgpu, offset, pdata, bytes); in intel_vgpu_mmio_reg_rw()
3650 return mmio_info->read(vgpu, offset, pdata, bytes); in intel_vgpu_mmio_reg_rw()
3657 old_vreg = vgpu_vreg(vgpu, offset); in intel_vgpu_mmio_reg_rw()
3661 ret = mmio_info->write(vgpu, offset, pdata, bytes); in intel_vgpu_mmio_reg_rw()
3669 data |= vgpu_vreg(vgpu, offset) & ro_mask; in intel_vgpu_mmio_reg_rw()
3670 ret = mmio_info->write(vgpu, offset, &data, bytes); in intel_vgpu_mmio_reg_rw()
3675 u32 mask = vgpu_vreg(vgpu, offset) >> 16; in intel_vgpu_mmio_reg_rw()
3677 vgpu_vreg(vgpu, offset) = (old_vreg & ~mask) in intel_vgpu_mmio_reg_rw()
3678 | (vgpu_vreg(vgpu, offset) & mask); in intel_vgpu_mmio_reg_rw()
3686 intel_vgpu_default_mmio_read(vgpu, offset, pdata, bytes) : in intel_vgpu_mmio_reg_rw()
3687 intel_vgpu_default_mmio_write(vgpu, offset, pdata, bytes); in intel_vgpu_mmio_reg_rw()