Lines Matching refs:rdev
45 u32 tn_smc_rreg(struct radeon_device *rdev, u32 reg) in tn_smc_rreg() argument
50 spin_lock_irqsave(&rdev->smc_idx_lock, flags); in tn_smc_rreg()
53 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); in tn_smc_rreg()
57 void tn_smc_wreg(struct radeon_device *rdev, u32 reg, u32 v) in tn_smc_wreg() argument
61 spin_lock_irqsave(&rdev->smc_idx_lock, flags); in tn_smc_wreg()
64 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); in tn_smc_wreg()
193 extern bool evergreen_is_display_hung(struct radeon_device *rdev);
194 extern void evergreen_print_gpu_status_regs(struct radeon_device *rdev);
195 extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save);
196 extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save);
197 extern int evergreen_mc_wait_for_idle(struct radeon_device *rdev);
198 extern void evergreen_mc_program(struct radeon_device *rdev);
199 extern void evergreen_irq_suspend(struct radeon_device *rdev);
200 extern int evergreen_mc_init(struct radeon_device *rdev);
201 extern void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev);
202 extern void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
203 extern void evergreen_program_aspm(struct radeon_device *rdev);
204 extern void sumo_rlc_fini(struct radeon_device *rdev);
205 extern int sumo_rlc_init(struct radeon_device *rdev);
206 extern void evergreen_gpu_pci_config_reset(struct radeon_device *rdev);
456 static void ni_init_golden_registers(struct radeon_device *rdev) in ni_init_golden_registers() argument
458 switch (rdev->family) { in ni_init_golden_registers()
460 radeon_program_register_sequence(rdev, in ni_init_golden_registers()
463 radeon_program_register_sequence(rdev, in ni_init_golden_registers()
468 if ((rdev->pdev->device == 0x9900) || in ni_init_golden_registers()
469 (rdev->pdev->device == 0x9901) || in ni_init_golden_registers()
470 (rdev->pdev->device == 0x9903) || in ni_init_golden_registers()
471 (rdev->pdev->device == 0x9904) || in ni_init_golden_registers()
472 (rdev->pdev->device == 0x9905) || in ni_init_golden_registers()
473 (rdev->pdev->device == 0x9906) || in ni_init_golden_registers()
474 (rdev->pdev->device == 0x9907) || in ni_init_golden_registers()
475 (rdev->pdev->device == 0x9908) || in ni_init_golden_registers()
476 (rdev->pdev->device == 0x9909) || in ni_init_golden_registers()
477 (rdev->pdev->device == 0x990A) || in ni_init_golden_registers()
478 (rdev->pdev->device == 0x990B) || in ni_init_golden_registers()
479 (rdev->pdev->device == 0x990C) || in ni_init_golden_registers()
480 (rdev->pdev->device == 0x990D) || in ni_init_golden_registers()
481 (rdev->pdev->device == 0x990E) || in ni_init_golden_registers()
482 (rdev->pdev->device == 0x990F) || in ni_init_golden_registers()
483 (rdev->pdev->device == 0x9910) || in ni_init_golden_registers()
484 (rdev->pdev->device == 0x9913) || in ni_init_golden_registers()
485 (rdev->pdev->device == 0x9917) || in ni_init_golden_registers()
486 (rdev->pdev->device == 0x9918)) { in ni_init_golden_registers()
487 radeon_program_register_sequence(rdev, in ni_init_golden_registers()
490 radeon_program_register_sequence(rdev, in ni_init_golden_registers()
494 radeon_program_register_sequence(rdev, in ni_init_golden_registers()
497 radeon_program_register_sequence(rdev, in ni_init_golden_registers()
637 int ni_mc_load_microcode(struct radeon_device *rdev) in ni_mc_load_microcode() argument
644 if (!rdev->mc_fw) in ni_mc_load_microcode()
647 switch (rdev->family) { in ni_mc_load_microcode()
690 fw_data = (const __be32 *)rdev->mc_fw->data; in ni_mc_load_microcode()
700 for (i = 0; i < rdev->usec_timeout; i++) { in ni_mc_load_microcode()
713 int ni_init_microcode(struct radeon_device *rdev) in ni_init_microcode() argument
724 switch (rdev->family) { in ni_init_microcode()
776 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev); in ni_init_microcode()
779 if (rdev->pfp_fw->size != pfp_req_size) { in ni_init_microcode()
781 rdev->pfp_fw->size, fw_name); in ni_init_microcode()
787 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev); in ni_init_microcode()
790 if (rdev->me_fw->size != me_req_size) { in ni_init_microcode()
792 rdev->me_fw->size, fw_name); in ni_init_microcode()
797 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev); in ni_init_microcode()
800 if (rdev->rlc_fw->size != rlc_req_size) { in ni_init_microcode()
802 rdev->rlc_fw->size, fw_name); in ni_init_microcode()
807 if (!(rdev->flags & RADEON_IS_IGP)) { in ni_init_microcode()
809 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev); in ni_init_microcode()
812 if (rdev->mc_fw->size != mc_req_size) { in ni_init_microcode()
814 rdev->mc_fw->size, fw_name); in ni_init_microcode()
819 if ((rdev->family >= CHIP_BARTS) && (rdev->family <= CHIP_CAYMAN)) { in ni_init_microcode()
821 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev); in ni_init_microcode()
824 release_firmware(rdev->smc_fw); in ni_init_microcode()
825 rdev->smc_fw = NULL; in ni_init_microcode()
827 } else if (rdev->smc_fw->size != smc_req_size) { in ni_init_microcode()
829 rdev->mc_fw->size, fw_name); in ni_init_microcode()
839 release_firmware(rdev->pfp_fw); in ni_init_microcode()
840 rdev->pfp_fw = NULL; in ni_init_microcode()
841 release_firmware(rdev->me_fw); in ni_init_microcode()
842 rdev->me_fw = NULL; in ni_init_microcode()
843 release_firmware(rdev->rlc_fw); in ni_init_microcode()
844 rdev->rlc_fw = NULL; in ni_init_microcode()
845 release_firmware(rdev->mc_fw); in ni_init_microcode()
846 rdev->mc_fw = NULL; in ni_init_microcode()
861 int cayman_get_allowed_info_register(struct radeon_device *rdev, in cayman_get_allowed_info_register() argument
880 int tn_get_temp(struct radeon_device *rdev) in tn_get_temp() argument
891 static void cayman_gpu_init(struct radeon_device *rdev) in cayman_gpu_init() argument
904 switch (rdev->family) { in cayman_gpu_init()
906 rdev->config.cayman.max_shader_engines = 2; in cayman_gpu_init()
907 rdev->config.cayman.max_pipes_per_simd = 4; in cayman_gpu_init()
908 rdev->config.cayman.max_tile_pipes = 8; in cayman_gpu_init()
909 rdev->config.cayman.max_simds_per_se = 12; in cayman_gpu_init()
910 rdev->config.cayman.max_backends_per_se = 4; in cayman_gpu_init()
911 rdev->config.cayman.max_texture_channel_caches = 8; in cayman_gpu_init()
912 rdev->config.cayman.max_gprs = 256; in cayman_gpu_init()
913 rdev->config.cayman.max_threads = 256; in cayman_gpu_init()
914 rdev->config.cayman.max_gs_threads = 32; in cayman_gpu_init()
915 rdev->config.cayman.max_stack_entries = 512; in cayman_gpu_init()
916 rdev->config.cayman.sx_num_of_sets = 8; in cayman_gpu_init()
917 rdev->config.cayman.sx_max_export_size = 256; in cayman_gpu_init()
918 rdev->config.cayman.sx_max_export_pos_size = 64; in cayman_gpu_init()
919 rdev->config.cayman.sx_max_export_smx_size = 192; in cayman_gpu_init()
920 rdev->config.cayman.max_hw_contexts = 8; in cayman_gpu_init()
921 rdev->config.cayman.sq_num_cf_insts = 2; in cayman_gpu_init()
923 rdev->config.cayman.sc_prim_fifo_size = 0x100; in cayman_gpu_init()
924 rdev->config.cayman.sc_hiz_tile_fifo_size = 0x30; in cayman_gpu_init()
925 rdev->config.cayman.sc_earlyz_tile_fifo_size = 0x130; in cayman_gpu_init()
930 rdev->config.cayman.max_shader_engines = 1; in cayman_gpu_init()
931 rdev->config.cayman.max_pipes_per_simd = 4; in cayman_gpu_init()
932 rdev->config.cayman.max_tile_pipes = 2; in cayman_gpu_init()
933 if ((rdev->pdev->device == 0x9900) || in cayman_gpu_init()
934 (rdev->pdev->device == 0x9901) || in cayman_gpu_init()
935 (rdev->pdev->device == 0x9905) || in cayman_gpu_init()
936 (rdev->pdev->device == 0x9906) || in cayman_gpu_init()
937 (rdev->pdev->device == 0x9907) || in cayman_gpu_init()
938 (rdev->pdev->device == 0x9908) || in cayman_gpu_init()
939 (rdev->pdev->device == 0x9909) || in cayman_gpu_init()
940 (rdev->pdev->device == 0x990B) || in cayman_gpu_init()
941 (rdev->pdev->device == 0x990C) || in cayman_gpu_init()
942 (rdev->pdev->device == 0x990F) || in cayman_gpu_init()
943 (rdev->pdev->device == 0x9910) || in cayman_gpu_init()
944 (rdev->pdev->device == 0x9917) || in cayman_gpu_init()
945 (rdev->pdev->device == 0x9999) || in cayman_gpu_init()
946 (rdev->pdev->device == 0x999C)) { in cayman_gpu_init()
947 rdev->config.cayman.max_simds_per_se = 6; in cayman_gpu_init()
948 rdev->config.cayman.max_backends_per_se = 2; in cayman_gpu_init()
949 rdev->config.cayman.max_hw_contexts = 8; in cayman_gpu_init()
950 rdev->config.cayman.sx_max_export_size = 256; in cayman_gpu_init()
951 rdev->config.cayman.sx_max_export_pos_size = 64; in cayman_gpu_init()
952 rdev->config.cayman.sx_max_export_smx_size = 192; in cayman_gpu_init()
953 } else if ((rdev->pdev->device == 0x9903) || in cayman_gpu_init()
954 (rdev->pdev->device == 0x9904) || in cayman_gpu_init()
955 (rdev->pdev->device == 0x990A) || in cayman_gpu_init()
956 (rdev->pdev->device == 0x990D) || in cayman_gpu_init()
957 (rdev->pdev->device == 0x990E) || in cayman_gpu_init()
958 (rdev->pdev->device == 0x9913) || in cayman_gpu_init()
959 (rdev->pdev->device == 0x9918) || in cayman_gpu_init()
960 (rdev->pdev->device == 0x999D)) { in cayman_gpu_init()
961 rdev->config.cayman.max_simds_per_se = 4; in cayman_gpu_init()
962 rdev->config.cayman.max_backends_per_se = 2; in cayman_gpu_init()
963 rdev->config.cayman.max_hw_contexts = 8; in cayman_gpu_init()
964 rdev->config.cayman.sx_max_export_size = 256; in cayman_gpu_init()
965 rdev->config.cayman.sx_max_export_pos_size = 64; in cayman_gpu_init()
966 rdev->config.cayman.sx_max_export_smx_size = 192; in cayman_gpu_init()
967 } else if ((rdev->pdev->device == 0x9919) || in cayman_gpu_init()
968 (rdev->pdev->device == 0x9990) || in cayman_gpu_init()
969 (rdev->pdev->device == 0x9991) || in cayman_gpu_init()
970 (rdev->pdev->device == 0x9994) || in cayman_gpu_init()
971 (rdev->pdev->device == 0x9995) || in cayman_gpu_init()
972 (rdev->pdev->device == 0x9996) || in cayman_gpu_init()
973 (rdev->pdev->device == 0x999A) || in cayman_gpu_init()
974 (rdev->pdev->device == 0x99A0)) { in cayman_gpu_init()
975 rdev->config.cayman.max_simds_per_se = 3; in cayman_gpu_init()
976 rdev->config.cayman.max_backends_per_se = 1; in cayman_gpu_init()
977 rdev->config.cayman.max_hw_contexts = 4; in cayman_gpu_init()
978 rdev->config.cayman.sx_max_export_size = 128; in cayman_gpu_init()
979 rdev->config.cayman.sx_max_export_pos_size = 32; in cayman_gpu_init()
980 rdev->config.cayman.sx_max_export_smx_size = 96; in cayman_gpu_init()
982 rdev->config.cayman.max_simds_per_se = 2; in cayman_gpu_init()
983 rdev->config.cayman.max_backends_per_se = 1; in cayman_gpu_init()
984 rdev->config.cayman.max_hw_contexts = 4; in cayman_gpu_init()
985 rdev->config.cayman.sx_max_export_size = 128; in cayman_gpu_init()
986 rdev->config.cayman.sx_max_export_pos_size = 32; in cayman_gpu_init()
987 rdev->config.cayman.sx_max_export_smx_size = 96; in cayman_gpu_init()
989 rdev->config.cayman.max_texture_channel_caches = 2; in cayman_gpu_init()
990 rdev->config.cayman.max_gprs = 256; in cayman_gpu_init()
991 rdev->config.cayman.max_threads = 256; in cayman_gpu_init()
992 rdev->config.cayman.max_gs_threads = 32; in cayman_gpu_init()
993 rdev->config.cayman.max_stack_entries = 512; in cayman_gpu_init()
994 rdev->config.cayman.sx_num_of_sets = 8; in cayman_gpu_init()
995 rdev->config.cayman.sq_num_cf_insts = 2; in cayman_gpu_init()
997 rdev->config.cayman.sc_prim_fifo_size = 0x40; in cayman_gpu_init()
998 rdev->config.cayman.sc_hiz_tile_fifo_size = 0x30; in cayman_gpu_init()
999 rdev->config.cayman.sc_earlyz_tile_fifo_size = 0x130; in cayman_gpu_init()
1017 evergreen_fix_pci_max_read_req_size(rdev); in cayman_gpu_init()
1023 rdev->config.cayman.mem_row_size_in_kb = (4 * (1 << (8 + tmp))) / 1024; in cayman_gpu_init()
1024 if (rdev->config.cayman.mem_row_size_in_kb > 4) in cayman_gpu_init()
1025 rdev->config.cayman.mem_row_size_in_kb = 4; in cayman_gpu_init()
1027 rdev->config.cayman.shader_engine_tile_size = 32; in cayman_gpu_init()
1028 rdev->config.cayman.num_gpus = 1; in cayman_gpu_init()
1029 rdev->config.cayman.multi_gpu_tile_size = 64; in cayman_gpu_init()
1032 rdev->config.cayman.num_tile_pipes = (1 << tmp); in cayman_gpu_init()
1034 rdev->config.cayman.mem_max_burst_length_bytes = (tmp + 1) * 256; in cayman_gpu_init()
1036 rdev->config.cayman.num_shader_engines = tmp + 1; in cayman_gpu_init()
1038 rdev->config.cayman.num_gpus = tmp + 1; in cayman_gpu_init()
1040 rdev->config.cayman.multi_gpu_tile_size = 1 << tmp; in cayman_gpu_init()
1042 rdev->config.cayman.mem_row_size_in_kb = 1 << tmp; in cayman_gpu_init()
1052 rdev->config.cayman.tile_config = 0; in cayman_gpu_init()
1053 switch (rdev->config.cayman.num_tile_pipes) { in cayman_gpu_init()
1056 rdev->config.cayman.tile_config |= (0 << 0); in cayman_gpu_init()
1059 rdev->config.cayman.tile_config |= (1 << 0); in cayman_gpu_init()
1062 rdev->config.cayman.tile_config |= (2 << 0); in cayman_gpu_init()
1065 rdev->config.cayman.tile_config |= (3 << 0); in cayman_gpu_init()
1070 if (rdev->flags & RADEON_IS_IGP) in cayman_gpu_init()
1071 rdev->config.cayman.tile_config |= 1 << 4; in cayman_gpu_init()
1075 rdev->config.cayman.tile_config |= 0 << 4; in cayman_gpu_init()
1078 rdev->config.cayman.tile_config |= 1 << 4; in cayman_gpu_init()
1082 rdev->config.cayman.tile_config |= 2 << 4; in cayman_gpu_init()
1086 rdev->config.cayman.tile_config |= in cayman_gpu_init()
1088 rdev->config.cayman.tile_config |= in cayman_gpu_init()
1092 for (i = (rdev->config.cayman.max_shader_engines - 1); i >= 0; i--) { in cayman_gpu_init()
1104 …for (i = 0; i < (rdev->config.cayman.max_backends_per_se * rdev->config.cayman.max_shader_engines)… in cayman_gpu_init()
1108 …for (i = 0; i < (rdev->config.cayman.max_backends_per_se * rdev->config.cayman.max_shader_engines)… in cayman_gpu_init()
1112 for (i = 0; i < rdev->config.cayman.max_shader_engines; i++) { in cayman_gpu_init()
1118 simd_disable_bitmap |= 0xffffffff << rdev->config.cayman.max_simds_per_se; in cayman_gpu_init()
1122 rdev->config.cayman.active_simds = hweight32(~tmp); in cayman_gpu_init()
1129 if (ASIC_IS_DCE6(rdev)) in cayman_gpu_init()
1138 if ((rdev->config.cayman.max_backends_per_se == 1) && in cayman_gpu_init()
1139 (rdev->flags & RADEON_IS_IGP)) { in cayman_gpu_init()
1149 tmp = r6xx_remap_render_backend(rdev, tmp, in cayman_gpu_init()
1150 rdev->config.cayman.max_backends_per_se * in cayman_gpu_init()
1151 rdev->config.cayman.max_shader_engines, in cayman_gpu_init()
1154 rdev->config.cayman.backend_map = tmp; in cayman_gpu_init()
1158 for (i = 0; i < rdev->config.cayman.max_texture_channel_caches; i++) in cayman_gpu_init()
1180 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.cayman.sx_num_of_sets); in cayman_gpu_init()
1196 …WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.cayman.sx_max_export_size / 4) - 1… in cayman_gpu_init()
1197 POSITION_BUFFER_SIZE((rdev->config.cayman.sx_max_export_pos_size / 4) - 1) | in cayman_gpu_init()
1198 SMX_BUFFER_SIZE((rdev->config.cayman.sx_max_export_smx_size / 4) - 1))); in cayman_gpu_init()
1200 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.cayman.sc_prim_fifo_size) | in cayman_gpu_init()
1201 SC_HIZ_TILE_FIFO_SIZE(rdev->config.cayman.sc_hiz_tile_fifo_size) | in cayman_gpu_init()
1202 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.cayman.sc_earlyz_tile_fifo_size))); in cayman_gpu_init()
1209 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.cayman.sq_num_cf_insts) | in cayman_gpu_init()
1252 if (rdev->family == CHIP_ARUBA) { in cayman_gpu_init()
1265 void cayman_pcie_gart_tlb_flush(struct radeon_device *rdev) in cayman_pcie_gart_tlb_flush() argument
1274 static int cayman_pcie_gart_enable(struct radeon_device *rdev) in cayman_pcie_gart_enable() argument
1278 if (rdev->gart.robj == NULL) { in cayman_pcie_gart_enable()
1279 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n"); in cayman_pcie_gart_enable()
1282 r = radeon_gart_table_vram_pin(rdev); in cayman_pcie_gart_enable()
1305 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12); in cayman_pcie_gart_enable()
1306 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12); in cayman_pcie_gart_enable()
1307 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); in cayman_pcie_gart_enable()
1309 (u32)(rdev->dummy_page.addr >> 12)); in cayman_pcie_gart_enable()
1326 rdev->vm_manager.max_pfn - 1); in cayman_pcie_gart_enable()
1328 rdev->vm_manager.saved_table_addr[i]); in cayman_pcie_gart_enable()
1333 (u32)(rdev->dummy_page.addr >> 12)); in cayman_pcie_gart_enable()
1350 cayman_pcie_gart_tlb_flush(rdev); in cayman_pcie_gart_enable()
1352 (unsigned)(rdev->mc.gtt_size >> 20), in cayman_pcie_gart_enable()
1353 (unsigned long long)rdev->gart.table_addr); in cayman_pcie_gart_enable()
1354 rdev->gart.ready = true; in cayman_pcie_gart_enable()
1358 static void cayman_pcie_gart_disable(struct radeon_device *rdev) in cayman_pcie_gart_disable() argument
1363 rdev->vm_manager.saved_table_addr[i] = RREG32( in cayman_pcie_gart_disable()
1382 radeon_gart_table_vram_unpin(rdev); in cayman_pcie_gart_disable()
1385 static void cayman_pcie_gart_fini(struct radeon_device *rdev) in cayman_pcie_gart_fini() argument
1387 cayman_pcie_gart_disable(rdev); in cayman_pcie_gart_fini()
1388 radeon_gart_table_vram_free(rdev); in cayman_pcie_gart_fini()
1389 radeon_gart_fini(rdev); in cayman_pcie_gart_fini()
1392 void cayman_cp_int_cntl_setup(struct radeon_device *rdev, in cayman_cp_int_cntl_setup() argument
1402 void cayman_fence_ring_emit(struct radeon_device *rdev, in cayman_fence_ring_emit() argument
1405 struct radeon_ring *ring = &rdev->ring[fence->ring]; in cayman_fence_ring_emit()
1406 u64 addr = rdev->fence_drv[fence->ring].gpu_addr; in cayman_fence_ring_emit()
1425 void cayman_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) in cayman_ring_ib_execute() argument
1427 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cayman_ring_ib_execute()
1461 static void cayman_cp_enable(struct radeon_device *rdev, bool enable) in cayman_cp_enable() argument
1466 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) in cayman_cp_enable()
1467 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size); in cayman_cp_enable()
1470 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; in cayman_cp_enable()
1474 u32 cayman_gfx_get_rptr(struct radeon_device *rdev, in cayman_gfx_get_rptr() argument
1479 if (rdev->wb.enabled) in cayman_gfx_get_rptr()
1480 rptr = rdev->wb.wb[ring->rptr_offs/4]; in cayman_gfx_get_rptr()
1493 u32 cayman_gfx_get_wptr(struct radeon_device *rdev, in cayman_gfx_get_wptr() argument
1508 void cayman_gfx_set_wptr(struct radeon_device *rdev, in cayman_gfx_set_wptr() argument
1523 static int cayman_cp_load_microcode(struct radeon_device *rdev) in cayman_cp_load_microcode() argument
1528 if (!rdev->me_fw || !rdev->pfp_fw) in cayman_cp_load_microcode()
1531 cayman_cp_enable(rdev, false); in cayman_cp_load_microcode()
1533 fw_data = (const __be32 *)rdev->pfp_fw->data; in cayman_cp_load_microcode()
1539 fw_data = (const __be32 *)rdev->me_fw->data; in cayman_cp_load_microcode()
1550 static int cayman_cp_start(struct radeon_device *rdev) in cayman_cp_start() argument
1552 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in cayman_cp_start()
1555 r = radeon_ring_lock(rdev, ring, 7); in cayman_cp_start()
1563 radeon_ring_write(ring, rdev->config.cayman.max_hw_contexts - 1); in cayman_cp_start()
1567 radeon_ring_unlock_commit(rdev, ring, false); in cayman_cp_start()
1569 cayman_cp_enable(rdev, true); in cayman_cp_start()
1571 r = radeon_ring_lock(rdev, ring, cayman_default_size + 19); in cayman_cp_start()
1609 radeon_ring_unlock_commit(rdev, ring, false); in cayman_cp_start()
1616 static void cayman_cp_fini(struct radeon_device *rdev) in cayman_cp_fini() argument
1618 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in cayman_cp_fini()
1619 cayman_cp_enable(rdev, false); in cayman_cp_fini()
1620 radeon_ring_fini(rdev, ring); in cayman_cp_fini()
1621 radeon_scratch_free(rdev, ring->rptr_save_reg); in cayman_cp_fini()
1624 static int cayman_cp_resume(struct radeon_device *rdev) in cayman_cp_resume() argument
1685 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF); in cayman_cp_resume()
1693 ring = &rdev->ring[ridx[i]]; in cayman_cp_resume()
1702 addr = rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET; in cayman_cp_resume()
1709 ring = &rdev->ring[ridx[i]]; in cayman_cp_resume()
1715 ring = &rdev->ring[ridx[i]]; in cayman_cp_resume()
1727 cayman_cp_start(rdev); in cayman_cp_resume()
1728 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true; in cayman_cp_resume()
1729 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; in cayman_cp_resume()
1730 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; in cayman_cp_resume()
1732 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]); in cayman_cp_resume()
1734 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false; in cayman_cp_resume()
1735 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false; in cayman_cp_resume()
1736 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false; in cayman_cp_resume()
1740 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX) in cayman_cp_resume()
1741 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size); in cayman_cp_resume()
1746 u32 cayman_gpu_check_soft_reset(struct radeon_device *rdev) in cayman_gpu_check_soft_reset() argument
1807 if (evergreen_is_display_hung(rdev)) in cayman_gpu_check_soft_reset()
1824 static void cayman_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask) in cayman_gpu_soft_reset() argument
1833 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask); in cayman_gpu_soft_reset()
1835 evergreen_print_gpu_status_regs(rdev); in cayman_gpu_soft_reset()
1836 dev_info(rdev->dev, " VM_CONTEXT0_PROTECTION_FAULT_ADDR 0x%08X\n", in cayman_gpu_soft_reset()
1838 dev_info(rdev->dev, " VM_CONTEXT0_PROTECTION_FAULT_STATUS 0x%08X\n", in cayman_gpu_soft_reset()
1840 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n", in cayman_gpu_soft_reset()
1842 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n", in cayman_gpu_soft_reset()
1864 evergreen_mc_stop(rdev, &save); in cayman_gpu_soft_reset()
1865 if (evergreen_mc_wait_for_idle(rdev)) { in cayman_gpu_soft_reset()
1866 dev_warn(rdev->dev, "Wait for MC idle timedout !\n"); in cayman_gpu_soft_reset()
1914 if (!(rdev->flags & RADEON_IS_IGP)) { in cayman_gpu_soft_reset()
1922 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp); in cayman_gpu_soft_reset()
1936 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp); in cayman_gpu_soft_reset()
1950 evergreen_mc_resume(rdev, &save); in cayman_gpu_soft_reset()
1953 evergreen_print_gpu_status_regs(rdev); in cayman_gpu_soft_reset()
1956 int cayman_asic_reset(struct radeon_device *rdev, bool hard) in cayman_asic_reset() argument
1961 evergreen_gpu_pci_config_reset(rdev); in cayman_asic_reset()
1965 reset_mask = cayman_gpu_check_soft_reset(rdev); in cayman_asic_reset()
1968 r600_set_bios_scratch_engine_hung(rdev, true); in cayman_asic_reset()
1970 cayman_gpu_soft_reset(rdev, reset_mask); in cayman_asic_reset()
1972 reset_mask = cayman_gpu_check_soft_reset(rdev); in cayman_asic_reset()
1975 evergreen_gpu_pci_config_reset(rdev); in cayman_asic_reset()
1977 r600_set_bios_scratch_engine_hung(rdev, false); in cayman_asic_reset()
1991 bool cayman_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) in cayman_gfx_is_lockup() argument
1993 u32 reset_mask = cayman_gpu_check_soft_reset(rdev); in cayman_gfx_is_lockup()
1998 radeon_ring_lockup_update(rdev, ring); in cayman_gfx_is_lockup()
2001 return radeon_ring_test_lockup(rdev, ring); in cayman_gfx_is_lockup()
2004 static void cayman_uvd_init(struct radeon_device *rdev) in cayman_uvd_init() argument
2008 if (!rdev->has_uvd) in cayman_uvd_init()
2011 r = radeon_uvd_init(rdev); in cayman_uvd_init()
2013 dev_err(rdev->dev, "failed UVD (%d) init.\n", r); in cayman_uvd_init()
2020 rdev->has_uvd = false; in cayman_uvd_init()
2023 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL; in cayman_uvd_init()
2024 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096); in cayman_uvd_init()
2027 static void cayman_uvd_start(struct radeon_device *rdev) in cayman_uvd_start() argument
2031 if (!rdev->has_uvd) in cayman_uvd_start()
2034 r = uvd_v2_2_resume(rdev); in cayman_uvd_start()
2036 dev_err(rdev->dev, "failed UVD resume (%d).\n", r); in cayman_uvd_start()
2039 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX); in cayman_uvd_start()
2041 dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r); in cayman_uvd_start()
2047 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0; in cayman_uvd_start()
2050 static void cayman_uvd_resume(struct radeon_device *rdev) in cayman_uvd_resume() argument
2055 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size) in cayman_uvd_resume()
2058 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; in cayman_uvd_resume()
2059 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0)); in cayman_uvd_resume()
2061 dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r); in cayman_uvd_resume()
2064 r = uvd_v1_0_init(rdev); in cayman_uvd_resume()
2066 dev_err(rdev->dev, "failed initializing UVD (%d).\n", r); in cayman_uvd_resume()
2071 static void cayman_vce_init(struct radeon_device *rdev) in cayman_vce_init() argument
2076 if (!rdev->has_vce) in cayman_vce_init()
2079 r = radeon_vce_init(rdev); in cayman_vce_init()
2081 dev_err(rdev->dev, "failed VCE (%d) init.\n", r); in cayman_vce_init()
2088 rdev->has_vce = false; in cayman_vce_init()
2091 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_obj = NULL; in cayman_vce_init()
2092 r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE1_INDEX], 4096); in cayman_vce_init()
2093 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_obj = NULL; in cayman_vce_init()
2094 r600_ring_init(rdev, &rdev->ring[TN_RING_TYPE_VCE2_INDEX], 4096); in cayman_vce_init()
2097 static void cayman_vce_start(struct radeon_device *rdev) in cayman_vce_start() argument
2101 if (!rdev->has_vce) in cayman_vce_start()
2104 r = radeon_vce_resume(rdev); in cayman_vce_start()
2106 dev_err(rdev->dev, "failed VCE resume (%d).\n", r); in cayman_vce_start()
2109 r = vce_v1_0_resume(rdev); in cayman_vce_start()
2111 dev_err(rdev->dev, "failed VCE resume (%d).\n", r); in cayman_vce_start()
2114 r = radeon_fence_driver_start_ring(rdev, TN_RING_TYPE_VCE1_INDEX); in cayman_vce_start()
2116 dev_err(rdev->dev, "failed initializing VCE1 fences (%d).\n", r); in cayman_vce_start()
2119 r = radeon_fence_driver_start_ring(rdev, TN_RING_TYPE_VCE2_INDEX); in cayman_vce_start()
2121 dev_err(rdev->dev, "failed initializing VCE2 fences (%d).\n", r); in cayman_vce_start()
2127 rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size = 0; in cayman_vce_start()
2128 rdev->ring[TN_RING_TYPE_VCE2_INDEX].ring_size = 0; in cayman_vce_start()
2131 static void cayman_vce_resume(struct radeon_device *rdev) in cayman_vce_resume() argument
2136 if (!rdev->has_vce || !rdev->ring[TN_RING_TYPE_VCE1_INDEX].ring_size) in cayman_vce_resume()
2139 ring = &rdev->ring[TN_RING_TYPE_VCE1_INDEX]; in cayman_vce_resume()
2140 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, 0x0); in cayman_vce_resume()
2142 dev_err(rdev->dev, "failed initializing VCE1 ring (%d).\n", r); in cayman_vce_resume()
2145 ring = &rdev->ring[TN_RING_TYPE_VCE2_INDEX]; in cayman_vce_resume()
2146 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, 0x0); in cayman_vce_resume()
2148 dev_err(rdev->dev, "failed initializing VCE1 ring (%d).\n", r); in cayman_vce_resume()
2151 r = vce_v1_0_init(rdev); in cayman_vce_resume()
2153 dev_err(rdev->dev, "failed initializing VCE (%d).\n", r); in cayman_vce_resume()
2158 static int cayman_startup(struct radeon_device *rdev) in cayman_startup() argument
2160 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in cayman_startup()
2164 evergreen_pcie_gen2_enable(rdev); in cayman_startup()
2166 evergreen_program_aspm(rdev); in cayman_startup()
2169 r = r600_vram_scratch_init(rdev); in cayman_startup()
2173 evergreen_mc_program(rdev); in cayman_startup()
2175 if (!(rdev->flags & RADEON_IS_IGP) && !rdev->pm.dpm_enabled) { in cayman_startup()
2176 r = ni_mc_load_microcode(rdev); in cayman_startup()
2183 r = cayman_pcie_gart_enable(rdev); in cayman_startup()
2186 cayman_gpu_init(rdev); in cayman_startup()
2189 if (rdev->flags & RADEON_IS_IGP) { in cayman_startup()
2190 rdev->rlc.reg_list = tn_rlc_save_restore_register_list; in cayman_startup()
2191 rdev->rlc.reg_list_size = in cayman_startup()
2193 rdev->rlc.cs_data = cayman_cs_data; in cayman_startup()
2194 r = sumo_rlc_init(rdev); in cayman_startup()
2202 r = radeon_wb_init(rdev); in cayman_startup()
2206 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX); in cayman_startup()
2208 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); in cayman_startup()
2212 cayman_uvd_start(rdev); in cayman_startup()
2213 cayman_vce_start(rdev); in cayman_startup()
2215 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP1_INDEX); in cayman_startup()
2217 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); in cayman_startup()
2221 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP2_INDEX); in cayman_startup()
2223 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r); in cayman_startup()
2227 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX); in cayman_startup()
2229 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); in cayman_startup()
2233 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_DMA1_INDEX); in cayman_startup()
2235 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r); in cayman_startup()
2240 if (!rdev->irq.installed) { in cayman_startup()
2241 r = radeon_irq_kms_init(rdev); in cayman_startup()
2246 r = r600_irq_init(rdev); in cayman_startup()
2249 radeon_irq_kms_fini(rdev); in cayman_startup()
2252 evergreen_irq_set(rdev); in cayman_startup()
2254 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET, in cayman_startup()
2259 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in cayman_startup()
2260 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET, in cayman_startup()
2265 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; in cayman_startup()
2266 r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET, in cayman_startup()
2271 r = cayman_cp_load_microcode(rdev); in cayman_startup()
2274 r = cayman_cp_resume(rdev); in cayman_startup()
2278 r = cayman_dma_resume(rdev); in cayman_startup()
2282 cayman_uvd_resume(rdev); in cayman_startup()
2283 cayman_vce_resume(rdev); in cayman_startup()
2285 r = radeon_ib_pool_init(rdev); in cayman_startup()
2287 dev_err(rdev->dev, "IB initialization failed (%d).\n", r); in cayman_startup()
2291 r = radeon_vm_manager_init(rdev); in cayman_startup()
2293 dev_err(rdev->dev, "vm manager initialization failed (%d).\n", r); in cayman_startup()
2297 r = radeon_audio_init(rdev); in cayman_startup()
2304 int cayman_resume(struct radeon_device *rdev) in cayman_resume() argument
2313 atom_asic_init(rdev->mode_info.atom_context); in cayman_resume()
2316 ni_init_golden_registers(rdev); in cayman_resume()
2318 if (rdev->pm.pm_method == PM_METHOD_DPM) in cayman_resume()
2319 radeon_pm_resume(rdev); in cayman_resume()
2321 rdev->accel_working = true; in cayman_resume()
2322 r = cayman_startup(rdev); in cayman_resume()
2325 rdev->accel_working = false; in cayman_resume()
2331 int cayman_suspend(struct radeon_device *rdev) in cayman_suspend() argument
2333 radeon_pm_suspend(rdev); in cayman_suspend()
2334 radeon_audio_fini(rdev); in cayman_suspend()
2335 radeon_vm_manager_fini(rdev); in cayman_suspend()
2336 cayman_cp_enable(rdev, false); in cayman_suspend()
2337 cayman_dma_stop(rdev); in cayman_suspend()
2338 if (rdev->has_uvd) { in cayman_suspend()
2339 uvd_v1_0_fini(rdev); in cayman_suspend()
2340 radeon_uvd_suspend(rdev); in cayman_suspend()
2342 evergreen_irq_suspend(rdev); in cayman_suspend()
2343 radeon_wb_disable(rdev); in cayman_suspend()
2344 cayman_pcie_gart_disable(rdev); in cayman_suspend()
2354 int cayman_init(struct radeon_device *rdev) in cayman_init() argument
2356 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]; in cayman_init()
2360 if (!radeon_get_bios(rdev)) { in cayman_init()
2361 if (ASIC_IS_AVIVO(rdev)) in cayman_init()
2365 if (!rdev->is_atom_bios) { in cayman_init()
2366 dev_err(rdev->dev, "Expecting atombios for cayman GPU\n"); in cayman_init()
2369 r = radeon_atombios_init(rdev); in cayman_init()
2374 if (!radeon_card_posted(rdev)) { in cayman_init()
2375 if (!rdev->bios) { in cayman_init()
2376 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n"); in cayman_init()
2380 atom_asic_init(rdev->mode_info.atom_context); in cayman_init()
2383 ni_init_golden_registers(rdev); in cayman_init()
2385 r600_scratch_init(rdev); in cayman_init()
2387 radeon_surface_init(rdev); in cayman_init()
2389 radeon_get_clock_info(rdev->ddev); in cayman_init()
2391 r = radeon_fence_driver_init(rdev); in cayman_init()
2395 r = evergreen_mc_init(rdev); in cayman_init()
2399 r = radeon_bo_init(rdev); in cayman_init()
2403 if (rdev->flags & RADEON_IS_IGP) { in cayman_init()
2404 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) { in cayman_init()
2405 r = ni_init_microcode(rdev); in cayman_init()
2412 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) { in cayman_init()
2413 r = ni_init_microcode(rdev); in cayman_init()
2422 radeon_pm_init(rdev); in cayman_init()
2425 r600_ring_init(rdev, ring, 1024 * 1024); in cayman_init()
2427 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; in cayman_init()
2429 r600_ring_init(rdev, ring, 64 * 1024); in cayman_init()
2431 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX]; in cayman_init()
2433 r600_ring_init(rdev, ring, 64 * 1024); in cayman_init()
2435 cayman_uvd_init(rdev); in cayman_init()
2436 cayman_vce_init(rdev); in cayman_init()
2438 rdev->ih.ring_obj = NULL; in cayman_init()
2439 r600_ih_ring_init(rdev, 64 * 1024); in cayman_init()
2441 r = r600_pcie_gart_init(rdev); in cayman_init()
2445 rdev->accel_working = true; in cayman_init()
2446 r = cayman_startup(rdev); in cayman_init()
2448 dev_err(rdev->dev, "disabling GPU acceleration\n"); in cayman_init()
2449 cayman_cp_fini(rdev); in cayman_init()
2450 cayman_dma_fini(rdev); in cayman_init()
2451 r600_irq_fini(rdev); in cayman_init()
2452 if (rdev->flags & RADEON_IS_IGP) in cayman_init()
2453 sumo_rlc_fini(rdev); in cayman_init()
2454 radeon_wb_fini(rdev); in cayman_init()
2455 radeon_ib_pool_fini(rdev); in cayman_init()
2456 radeon_vm_manager_fini(rdev); in cayman_init()
2457 radeon_irq_kms_fini(rdev); in cayman_init()
2458 cayman_pcie_gart_fini(rdev); in cayman_init()
2459 rdev->accel_working = false; in cayman_init()
2469 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) { in cayman_init()
2477 void cayman_fini(struct radeon_device *rdev) in cayman_fini() argument
2479 radeon_pm_fini(rdev); in cayman_fini()
2480 cayman_cp_fini(rdev); in cayman_fini()
2481 cayman_dma_fini(rdev); in cayman_fini()
2482 r600_irq_fini(rdev); in cayman_fini()
2483 if (rdev->flags & RADEON_IS_IGP) in cayman_fini()
2484 sumo_rlc_fini(rdev); in cayman_fini()
2485 radeon_wb_fini(rdev); in cayman_fini()
2486 radeon_vm_manager_fini(rdev); in cayman_fini()
2487 radeon_ib_pool_fini(rdev); in cayman_fini()
2488 radeon_irq_kms_fini(rdev); in cayman_fini()
2489 uvd_v1_0_fini(rdev); in cayman_fini()
2490 radeon_uvd_fini(rdev); in cayman_fini()
2491 if (rdev->has_vce) in cayman_fini()
2492 radeon_vce_fini(rdev); in cayman_fini()
2493 cayman_pcie_gart_fini(rdev); in cayman_fini()
2494 r600_vram_scratch_fini(rdev); in cayman_fini()
2495 radeon_gem_fini(rdev); in cayman_fini()
2496 radeon_fence_driver_fini(rdev); in cayman_fini()
2497 radeon_bo_fini(rdev); in cayman_fini()
2498 radeon_atombios_fini(rdev); in cayman_fini()
2499 kfree(rdev->bios); in cayman_fini()
2500 rdev->bios = NULL; in cayman_fini()
2506 int cayman_vm_init(struct radeon_device *rdev) in cayman_vm_init() argument
2509 rdev->vm_manager.nvm = 8; in cayman_vm_init()
2511 if (rdev->flags & RADEON_IS_IGP) { in cayman_vm_init()
2514 rdev->vm_manager.vram_base_offset = tmp; in cayman_vm_init()
2516 rdev->vm_manager.vram_base_offset = 0; in cayman_vm_init()
2520 void cayman_vm_fini(struct radeon_device *rdev) in cayman_vm_fini() argument
2533 void cayman_vm_decode_fault(struct radeon_device *rdev, in cayman_vm_decode_fault() argument
2693 void cayman_vm_flush(struct radeon_device *rdev, struct radeon_ring *ring, in cayman_vm_flush() argument
2722 int tn_set_vce_clocks(struct radeon_device *rdev, u32 evclk, u32 ecclk) in tn_set_vce_clocks() argument
2727 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, in tn_set_vce_clocks()