| /OK3568_Linux_fs/kernel/drivers/gpu/drm/amd/amdgpu/ |
| H A D | amdgpu_xgmi.c | 220 return snprintf(buf, PAGE_SIZE, "%llu\n", adev->gmc.xgmi.node_id); in amdgpu_xgmi_show_device_id() 330 if (!adev->gmc.xgmi.hive_id) in amdgpu_get_xgmi_hive() 342 if (hive->hive_id == adev->gmc.xgmi.hive_id) in amdgpu_get_xgmi_hive() 367 hive->hive_id = adev->gmc.xgmi.hive_id; in amdgpu_get_xgmi_hive() 433 request_adev->gmc.xgmi.node_id, in amdgpu_xgmi_set_pstate() 434 request_adev->gmc.xgmi.hive_id, ret); in amdgpu_xgmi_set_pstate() 462 adev->gmc.xgmi.node_id, in amdgpu_xgmi_update_topology() 463 adev->gmc.xgmi.hive_id, ret); in amdgpu_xgmi_update_topology() 483 if (top->nodes[i].node_id == peer_adev->gmc.xgmi.node_id) in amdgpu_xgmi_get_hops_count() 497 if (!adev->gmc.xgmi.supported) in amdgpu_xgmi_add_device() [all …]
|
| H A D | gmc_v10_0.c | 150 adev->gmc.vm_fault.num_types = 1; in gmc_v10_0_set_irq_funcs() 151 adev->gmc.vm_fault.funcs = &gmc_v10_0_irq_funcs; in gmc_v10_0_set_irq_funcs() 154 adev->gmc.ecc_irq.num_types = 1; in gmc_v10_0_set_irq_funcs() 155 adev->gmc.ecc_irq.funcs = &gmc_v10_0_ecc_funcs; in gmc_v10_0_set_irq_funcs() 205 spin_lock(&adev->gmc.invalidate_lock); in gmc_v10_0_flush_vm_hub() 257 spin_unlock(&adev->gmc.invalidate_lock); in gmc_v10_0_flush_vm_hub() 539 adev->gmc.vram_start; in gmc_v10_0_get_vm_pde() 542 if (!adev->gmc.translate_further) in gmc_v10_0_get_vm_pde() 612 if (adev->gmc.gmc_funcs == NULL) in gmc_v10_0_set_gmc_funcs() 613 adev->gmc.gmc_funcs = &gmc_v10_0_gmc_funcs; in gmc_v10_0_set_gmc_funcs() [all …]
|
| H A D | gmc_v7_0.c | 159 err = request_firmware(&adev->gmc.fw, fw_name, adev->dev); in gmc_v7_0_init_microcode() 162 err = amdgpu_ucode_validate(adev->gmc.fw); in gmc_v7_0_init_microcode() 167 release_firmware(adev->gmc.fw); in gmc_v7_0_init_microcode() 168 adev->gmc.fw = NULL; in gmc_v7_0_init_microcode() 189 if (!adev->gmc.fw) in gmc_v7_0_mc_load_microcode() 192 hdr = (const struct mc_firmware_header_v1_0 *)adev->gmc.fw->data; in gmc_v7_0_mc_load_microcode() 195 adev->gmc.fw_version = le32_to_cpu(hdr->header.ucode_version); in gmc_v7_0_mc_load_microcode() 198 (adev->gmc.fw->data + le32_to_cpu(hdr->io_debug_array_offset_bytes)); in gmc_v7_0_mc_load_microcode() 201 (adev->gmc.fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in gmc_v7_0_mc_load_microcode() 291 adev->gmc.vram_start >> 12); in gmc_v7_0_mc_program() [all …]
|
| H A D | gmc_v8_0.c | 276 err = request_firmware(&adev->gmc.fw, fw_name, adev->dev); in gmc_v8_0_init_microcode() 279 err = amdgpu_ucode_validate(adev->gmc.fw); in gmc_v8_0_init_microcode() 284 release_firmware(adev->gmc.fw); in gmc_v8_0_init_microcode() 285 adev->gmc.fw = NULL; in gmc_v8_0_init_microcode() 314 if (!adev->gmc.fw) in gmc_v8_0_tonga_mc_load_microcode() 317 hdr = (const struct mc_firmware_header_v1_0 *)adev->gmc.fw->data; in gmc_v8_0_tonga_mc_load_microcode() 320 adev->gmc.fw_version = le32_to_cpu(hdr->header.ucode_version); in gmc_v8_0_tonga_mc_load_microcode() 323 (adev->gmc.fw->data + le32_to_cpu(hdr->io_debug_array_offset_bytes)); in gmc_v8_0_tonga_mc_load_microcode() 326 (adev->gmc.fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in gmc_v8_0_tonga_mc_load_microcode() 383 if (!adev->gmc.fw) in gmc_v8_0_polaris_mc_load_microcode() [all …]
|
| H A D | gmc_v9_0.c | 645 adev->gmc.vm_fault.num_types = 1; in gmc_v9_0_set_irq_funcs() 646 adev->gmc.vm_fault.funcs = &gmc_v9_0_irq_funcs; in gmc_v9_0_set_irq_funcs() 649 adev->gmc.ecc_irq.num_types = 1; in gmc_v9_0_set_irq_funcs() 650 adev->gmc.ecc_irq.funcs = &gmc_v9_0_ecc_funcs; in gmc_v9_0_set_irq_funcs() 729 if (adev->gmc.xgmi.num_physical_nodes && in gmc_v9_0_flush_gpu_tlb() 759 spin_lock(&adev->gmc.invalidate_lock); in gmc_v9_0_flush_gpu_tlb() 817 spin_unlock(&adev->gmc.invalidate_lock); in gmc_v9_0_flush_gpu_tlb() 856 bool vega20_xgmi_wa = (adev->gmc.xgmi.num_physical_nodes && in gmc_v9_0_flush_gpu_tlb_pasid() 1038 adev->gmc.vram_start; in gmc_v9_0_get_vm_pde() 1041 if (!adev->gmc.translate_further) in gmc_v9_0_get_vm_pde() [all …]
|
| H A D | amdgpu_gmc.c | 131 if (ttm->dma_address[0] + PAGE_SIZE >= adev->gmc.agp_size) in amdgpu_gmc_agp_addr() 134 return adev->gmc.agp_start + ttm->dma_address[0]; in amdgpu_gmc_agp_addr() 182 u64 max_mc_address = min(adev->gmc.mc_mask, AMDGPU_GMC_HOLE_START - 1); in amdgpu_gmc_gart_location() 272 struct amdgpu_gmc *gmc = &adev->gmc; in amdgpu_gmc_filter_faults() local 281 if (gmc->fault_ring[gmc->last_fault].timestamp >= stamp) in amdgpu_gmc_filter_faults() 286 fault = &gmc->fault_ring[gmc->fault_hash[hash].idx]; in amdgpu_gmc_filter_faults() 294 fault = &gmc->fault_ring[fault->next]; in amdgpu_gmc_filter_faults() 302 fault = &gmc->fault_ring[gmc->last_fault]; in amdgpu_gmc_filter_faults() 307 fault->next = gmc->fault_hash[hash].idx; in amdgpu_gmc_filter_faults() 308 gmc->fault_hash[hash].idx = gmc->last_fault++; in amdgpu_gmc_filter_faults() [all …]
|
| H A D | gmc_v6_0.c | 134 err = request_firmware(&adev->gmc.fw, fw_name, adev->dev); in gmc_v6_0_init_microcode() 138 err = amdgpu_ucode_validate(adev->gmc.fw); in gmc_v6_0_init_microcode() 145 release_firmware(adev->gmc.fw); in gmc_v6_0_init_microcode() 146 adev->gmc.fw = NULL; in gmc_v6_0_init_microcode() 159 if (!adev->gmc.fw) in gmc_v6_0_mc_load_microcode() 162 hdr = (const struct mc_firmware_header_v1_0 *)adev->gmc.fw->data; in gmc_v6_0_mc_load_microcode() 166 adev->gmc.fw_version = le32_to_cpu(hdr->header.ucode_version); in gmc_v6_0_mc_load_microcode() 169 (adev->gmc.fw->data + le32_to_cpu(hdr->io_debug_array_offset_bytes)); in gmc_v6_0_mc_load_microcode() 172 (adev->gmc.fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in gmc_v6_0_mc_load_microcode() 257 adev->gmc.vram_start >> 12); in gmc_v6_0_mc_program() [all …]
|
| H A D | amdgpu_gmc.h | 245 #define amdgpu_gmc_flush_gpu_tlb(adev, vmid, vmhub, type) ((adev)->gmc.gmc_funcs->flush_gpu_tlb((ad… 247 ((adev)->gmc.gmc_funcs->flush_gpu_tlb_pasid \ 249 #define amdgpu_gmc_emit_flush_gpu_tlb(r, vmid, addr) (r)->adev->gmc.gmc_funcs->emit_flush_gpu_tlb((… 250 #define amdgpu_gmc_emit_pasid_mapping(r, vmid, pasid) (r)->adev->gmc.gmc_funcs->emit_pasid_mapping(… 251 #define amdgpu_gmc_map_mtype(adev, flags) (adev)->gmc.gmc_funcs->map_mtype((adev),(flags)) 252 #define amdgpu_gmc_get_vm_pde(adev, level, dst, flags) (adev)->gmc.gmc_funcs->get_vm_pde((adev), (l… 253 #define amdgpu_gmc_get_vm_pte(adev, mapping, flags) (adev)->gmc.gmc_funcs->get_vm_pte((adev), (mapp… 254 #define amdgpu_gmc_get_vbios_fb_size(adev) (adev)->gmc.gmc_funcs->get_vbios_fb_size((adev)) 264 static inline bool amdgpu_gmc_vram_full_visible(struct amdgpu_gmc *gmc) in amdgpu_gmc_vram_full_visible() argument 266 WARN_ON(gmc->real_vram_size < gmc->visible_vram_size); in amdgpu_gmc_vram_full_visible() [all …]
|
| H A D | gfxhub_v1_0.c | 59 (u32)(adev->gmc.gart_start >> 12)); in gfxhub_v1_0_init_gart_aperture_regs() 61 (u32)(adev->gmc.gart_start >> 44)); in gfxhub_v1_0_init_gart_aperture_regs() 64 (u32)(adev->gmc.gart_end >> 12)); in gfxhub_v1_0_init_gart_aperture_regs() 66 (u32)(adev->gmc.gart_end >> 44)); in gfxhub_v1_0_init_gart_aperture_regs() 75 WREG32_SOC15_RLC(GC, 0, mmMC_VM_AGP_BOT, adev->gmc.agp_start >> 24); in gfxhub_v1_0_init_system_aperture_regs() 76 WREG32_SOC15_RLC(GC, 0, mmMC_VM_AGP_TOP, adev->gmc.agp_end >> 24); in gfxhub_v1_0_init_system_aperture_regs() 81 min(adev->gmc.fb_start, adev->gmc.agp_start) >> 18); in gfxhub_v1_0_init_system_aperture_regs() 93 max((adev->gmc.fb_end >> 18) + 0x1, in gfxhub_v1_0_init_system_aperture_regs() 94 adev->gmc.agp_end >> 18)); in gfxhub_v1_0_init_system_aperture_regs() 98 max(adev->gmc.fb_end, adev->gmc.agp_end) >> 18); in gfxhub_v1_0_init_system_aperture_regs() [all …]
|
| H A D | gfxhub_v1_1.c | 55 adev->gmc.xgmi.num_physical_nodes = max_region + 1; in gfxhub_v1_1_get_xgmi_info() 56 if (adev->gmc.xgmi.num_physical_nodes > max_num_physical_nodes) in gfxhub_v1_1_get_xgmi_info() 59 adev->gmc.xgmi.physical_node_id = in gfxhub_v1_1_get_xgmi_info() 61 if (adev->gmc.xgmi.physical_node_id > max_physical_node_id) in gfxhub_v1_1_get_xgmi_info() 63 adev->gmc.xgmi.node_segment_size = REG_GET_FIELD( in gfxhub_v1_1_get_xgmi_info()
|
| H A D | gfxhub_v2_1.c | 141 (u32)(adev->gmc.gart_start >> 12)); in gfxhub_v2_1_init_gart_aperture_regs() 143 (u32)(adev->gmc.gart_start >> 44)); in gfxhub_v2_1_init_gart_aperture_regs() 146 (u32)(adev->gmc.gart_end >> 12)); in gfxhub_v2_1_init_gart_aperture_regs() 148 (u32)(adev->gmc.gart_end >> 44)); in gfxhub_v2_1_init_gart_aperture_regs() 162 adev->gmc.vram_start >> 18); in gfxhub_v2_1_init_system_aperture_regs() 164 adev->gmc.vram_end >> 18); in gfxhub_v2_1_init_system_aperture_regs() 167 value = adev->vram_scratch.gpu_addr - adev->gmc.vram_start in gfxhub_v2_1_init_system_aperture_regs() 235 if (adev->gmc.translate_further) { in gfxhub_v2_1_init_cache_regs() 322 !adev->gmc.noretry); in gfxhub_v2_1_setup_vmid_config() 360 adev->gmc.vram_start >> 24); in gfxhub_v2_1_gart_enable() [all …]
|
| H A D | mmhub_v1_0.c | 48 adev->gmc.fb_start = base; in mmhub_v1_0_get_fb_location() 49 adev->gmc.fb_end = top; in mmhub_v1_0_get_fb_location() 75 (u32)(adev->gmc.gart_start >> 12)); in mmhub_v1_0_init_gart_aperture_regs() 77 (u32)(adev->gmc.gart_start >> 44)); in mmhub_v1_0_init_gart_aperture_regs() 80 (u32)(adev->gmc.gart_end >> 12)); in mmhub_v1_0_init_gart_aperture_regs() 82 (u32)(adev->gmc.gart_end >> 44)); in mmhub_v1_0_init_gart_aperture_regs() 92 WREG32_SOC15(MMHUB, 0, mmMC_VM_AGP_BOT, adev->gmc.agp_start >> 24); in mmhub_v1_0_init_system_aperture_regs() 93 WREG32_SOC15(MMHUB, 0, mmMC_VM_AGP_TOP, adev->gmc.agp_end >> 24); in mmhub_v1_0_init_system_aperture_regs() 97 min(adev->gmc.fb_start, adev->gmc.agp_start) >> 18); in mmhub_v1_0_init_system_aperture_regs() 107 max((adev->gmc.fb_end >> 18) + 0x1, in mmhub_v1_0_init_system_aperture_regs() [all …]
|
| H A D | amdgpu_amdkfd.c | 76 amdgpu_amdkfd_total_mem_size += adev->gmc.real_vram_size; in amdgpu_amdkfd_device_probe() 401 resource_size_t aper_limit = adev->gmc.aper_base + adev->gmc.aper_size; in amdgpu_amdkfd_get_local_mem_info() 404 if (!(adev->gmc.aper_base & address_mask || aper_limit & address_mask)) { in amdgpu_amdkfd_get_local_mem_info() 405 mem_info->local_mem_size_public = adev->gmc.visible_vram_size; in amdgpu_amdkfd_get_local_mem_info() 406 mem_info->local_mem_size_private = adev->gmc.real_vram_size - in amdgpu_amdkfd_get_local_mem_info() 407 adev->gmc.visible_vram_size; in amdgpu_amdkfd_get_local_mem_info() 410 mem_info->local_mem_size_private = adev->gmc.real_vram_size; in amdgpu_amdkfd_get_local_mem_info() 412 mem_info->vram_width = adev->gmc.vram_width; in amdgpu_amdkfd_get_local_mem_info() 415 &adev->gmc.aper_base, &aper_limit, in amdgpu_amdkfd_get_local_mem_info() 544 return adev->gmc.xgmi.hive_id; in amdgpu_amdkfd_get_hive_id() [all …]
|
| H A D | amdgpu_vram_mgr.c | 55 return snprintf(buf, PAGE_SIZE, "%llu\n", adev->gmc.real_vram_size); in amdgpu_mem_info_vram_total_show() 72 return snprintf(buf, PAGE_SIZE, "%llu\n", adev->gmc.visible_vram_size); in amdgpu_mem_info_vis_vram_total_show() 120 switch (adev->gmc.vram_vendor) { in amdgpu_mem_info_vram_vendor() 181 ttm_resource_manager_init(man, adev->gmc.real_vram_size >> PAGE_SHIFT); in amdgpu_vram_mgr_init() 242 if (start >= adev->gmc.visible_vram_size) in amdgpu_vram_mgr_vis_size() 245 return (end > adev->gmc.visible_vram_size ? in amdgpu_vram_mgr_vis_size() 246 adev->gmc.visible_vram_size : end) - start; in amdgpu_vram_mgr_vis_size() 265 if (amdgpu_gmc_vram_full_visible(&adev->gmc)) in amdgpu_vram_mgr_bo_visible_size() 268 if (mem->start >= adev->gmc.visible_vram_size >> PAGE_SHIFT) in amdgpu_vram_mgr_bo_visible_size() 328 max_bytes = adev->gmc.mc_vram_size; in amdgpu_vram_mgr_new() [all …]
|
| H A D | amdgpu_test.c | 47 n = adev->gmc.gart_size - AMDGPU_IB_POOL_SIZE; in amdgpu_do_test_moves() 159 (gart_addr - adev->gmc.gart_start + in amdgpu_do_test_moves() 162 (vram_addr - adev->gmc.vram_start + in amdgpu_do_test_moves() 205 (vram_addr - adev->gmc.vram_start + in amdgpu_do_test_moves() 208 (gart_addr - adev->gmc.gart_start + in amdgpu_do_test_moves() 218 gart_addr - adev->gmc.gart_start); in amdgpu_do_test_moves()
|
| H A D | amdgpu_object.c | 135 unsigned visible_pfn = adev->gmc.visible_vram_size >> PAGE_SHIFT; in amdgpu_bo_placement_from_domain() 594 if (!amdgpu_gmc_vram_full_visible(&adev->gmc) && in amdgpu_bo_do_create() 596 bo->tbo.mem.start < adev->gmc.visible_vram_size >> PAGE_SHIFT) in amdgpu_bo_do_create() 1090 arch_io_reserve_memtype_wc(adev->gmc.aper_base, in amdgpu_bo_init() 1091 adev->gmc.aper_size); in amdgpu_bo_init() 1094 adev->gmc.vram_mtrr = arch_phys_wc_add(adev->gmc.aper_base, in amdgpu_bo_init() 1095 adev->gmc.aper_size); in amdgpu_bo_init() 1097 adev->gmc.mc_vram_size >> 20, in amdgpu_bo_init() 1098 (unsigned long long)adev->gmc.aper_size >> 20); in amdgpu_bo_init() 1100 adev->gmc.vram_width, amdgpu_vram_names[adev->gmc.vram_type]); in amdgpu_bo_init() [all …]
|
| H A D | gfxhub_v2_0.c | 141 (u32)(adev->gmc.gart_start >> 12)); in gfxhub_v2_0_init_gart_aperture_regs() 143 (u32)(adev->gmc.gart_start >> 44)); in gfxhub_v2_0_init_gart_aperture_regs() 146 (u32)(adev->gmc.gart_end >> 12)); in gfxhub_v2_0_init_gart_aperture_regs() 148 (u32)(adev->gmc.gart_end >> 44)); in gfxhub_v2_0_init_gart_aperture_regs() 163 adev->gmc.vram_start >> 18); in gfxhub_v2_0_init_system_aperture_regs() 165 adev->gmc.vram_end >> 18); in gfxhub_v2_0_init_system_aperture_regs() 168 value = adev->vram_scratch.gpu_addr - adev->gmc.vram_start in gfxhub_v2_0_init_system_aperture_regs() 235 if (adev->gmc.translate_further) { in gfxhub_v2_0_init_cache_regs() 316 !adev->gmc.noretry); in gfxhub_v2_0_setup_vmid_config()
|
| H A D | amdgpu_xgmi.h | 73 adev->gmc.xgmi.hive_id && in amdgpu_xgmi_same_hive() 74 adev->gmc.xgmi.hive_id == bo_adev->gmc.xgmi.hive_id); in amdgpu_xgmi_same_hive()
|
| H A D | amdgpu_ttm.c | 127 } else if (!amdgpu_gmc_vram_full_visible(&adev->gmc) && in amdgpu_evict_flags() 138 abo->placements[0].fpfn = adev->gmc.visible_vram_size >> PAGE_SHIFT; in amdgpu_evict_flags() 264 *addr = adev->gmc.gart_start; in amdgpu_ttm_map_buffer() 646 <= adev->gmc.visible_vram_size; in amdgpu_mem_visible() 757 if ((mem->bus.offset + bus_size) > adev->gmc.visible_vram_size) in amdgpu_ttm_io_mem_reserve() 768 mem->bus.offset += adev->gmc.aper_base; in amdgpu_ttm_io_mem_reserve() 785 offset += adev->gmc.aper_base; in amdgpu_ttm_io_mem_pfn() 802 return adev->gmc.gart_start; in amdgpu_ttm_domain_start() 804 return adev->gmc.vram_start; in amdgpu_ttm_domain_start() 1197 placements.lpfn = adev->gmc.gart_size >> PAGE_SHIFT; in amdgpu_ttm_alloc_gart() [all …]
|
| H A D | mmhub_v2_0.c | 183 (u32)(adev->gmc.gart_start >> 12)); in mmhub_v2_0_init_gart_aperture_regs() 185 (u32)(adev->gmc.gart_start >> 44)); in mmhub_v2_0_init_gart_aperture_regs() 188 (u32)(adev->gmc.gart_end >> 12)); in mmhub_v2_0_init_gart_aperture_regs() 190 (u32)(adev->gmc.gart_end >> 44)); in mmhub_v2_0_init_gart_aperture_regs() 206 adev->gmc.vram_start >> 18); in mmhub_v2_0_init_system_aperture_regs() 208 adev->gmc.vram_end >> 18); in mmhub_v2_0_init_system_aperture_regs() 212 value = adev->vram_scratch.gpu_addr - adev->gmc.vram_start + in mmhub_v2_0_init_system_aperture_regs() 281 if (adev->gmc.translate_further) { in mmhub_v2_0_init_cache_regs() 372 !adev->gmc.noretry); in mmhub_v2_0_setup_vmid_config()
|
| H A D | amdgpu_fb.c | 248 tmp = amdgpu_bo_gpu_offset(abo) - adev->gmc.vram_start; in amdgpufb_create() 249 info->fix.smem_start = adev->gmc.aper_base + tmp; in amdgpufb_create() 258 info->apertures->ranges[0].size = adev->gmc.aper_size; in amdgpufb_create() 268 DRM_INFO("vram apper at 0x%lX\n", (unsigned long)adev->gmc.aper_base); in amdgpufb_create() 328 if (adev->gmc.real_vram_size <= (32*1024*1024)) in amdgpu_fbdev_init()
|
| H A D | mmhub_v9_4.c | 51 adev->gmc.fb_start = base; in mmhub_v9_4_get_fb_location() 52 adev->gmc.fb_end = top; in mmhub_v9_4_get_fb_location() 84 (u32)(adev->gmc.gart_start >> 12)); in mmhub_v9_4_init_gart_aperture_regs() 88 (u32)(adev->gmc.gart_start >> 44)); in mmhub_v9_4_init_gart_aperture_regs() 93 (u32)(adev->gmc.gart_end >> 12)); in mmhub_v9_4_init_gart_aperture_regs() 97 (u32)(adev->gmc.gart_end >> 44)); in mmhub_v9_4_init_gart_aperture_regs() 122 adev->gmc.agp_end >> 24); in mmhub_v9_4_init_system_aperture_regs() 125 adev->gmc.agp_start >> 24); in mmhub_v9_4_init_system_aperture_regs() 132 min(adev->gmc.fb_start, adev->gmc.agp_start) >> 18); in mmhub_v9_4_init_system_aperture_regs() 136 max(adev->gmc.fb_end, adev->gmc.agp_end) >> 18); in mmhub_v9_4_init_system_aperture_regs() [all …]
|
| H A D | amdgpu_device.c | 264 last = min(pos + size, adev->gmc.visible_vram_size); in amdgpu_device_vram_access() 1086 u64 space_needed = roundup_pow_of_two(adev->gmc.real_vram_size); in amdgpu_device_resize_fb_bar() 1099 if (adev->gmc.real_vram_size && in amdgpu_device_resize_fb_bar() 1100 (pci_resource_len(adev->pdev, 0) >= adev->gmc.real_vram_size)) in amdgpu_device_resize_fb_bar() 2263 if (adev->gmc.xgmi.num_physical_nodes > 1) in amdgpu_device_ip_init() 2475 if (adev->gmc.xgmi.num_physical_nodes > 1) { in amdgpu_device_ip_late_init() 2491 if (mgpu_info.num_dgpu == adev->gmc.xgmi.num_physical_nodes) { in amdgpu_device_ip_late_init() 2532 if (adev->gmc.xgmi.num_physical_nodes > 1) in amdgpu_device_ip_fini() 3196 adev->gmc.gart_size = 512 * 1024 * 1024; in amdgpu_device_init() 3203 adev->gmc.gmc_funcs = NULL; in amdgpu_device_init() [all …]
|
| H A D | amdgpu_kms.c | 180 if (!adev->gmc.noretry) in amdgpu_driver_load_kms() 239 fw_info->ver = adev->gmc.fw_version; in amdgpu_firmware_info() 622 vram_gtt.vram_size = adev->gmc.real_vram_size - in amdgpu_info_ioctl() 626 min(adev->gmc.visible_vram_size - in amdgpu_info_ioctl() 642 mem.vram.total_heap_size = adev->gmc.real_vram_size; in amdgpu_info_ioctl() 643 mem.vram.usable_heap_size = adev->gmc.real_vram_size - in amdgpu_info_ioctl() 651 adev->gmc.visible_vram_size; in amdgpu_info_ioctl() 653 min(adev->gmc.visible_vram_size - in amdgpu_info_ioctl() 779 dev_info.vram_type = adev->gmc.vram_type; in amdgpu_info_ioctl() 780 dev_info.vram_bit_width = adev->gmc.vram_width; in amdgpu_info_ioctl()
|
| /OK3568_Linux_fs/kernel/drivers/video/fbdev/ |
| H A D | w100fb.c | 296 union dp_gui_master_cntl_u gmc; in w100_init_graphic_engine() local 322 gmc.val = 0; in w100_init_graphic_engine() 323 gmc.f.gmc_src_pitch_offset_cntl = 1; in w100_init_graphic_engine() 324 gmc.f.gmc_dst_pitch_offset_cntl = 1; in w100_init_graphic_engine() 325 gmc.f.gmc_src_clipping = 1; in w100_init_graphic_engine() 326 gmc.f.gmc_dst_clipping = 1; in w100_init_graphic_engine() 327 gmc.f.gmc_brush_datatype = GMC_BRUSH_NONE; in w100_init_graphic_engine() 328 gmc.f.gmc_dst_datatype = 3; /* from DstType_16Bpp_444 */ in w100_init_graphic_engine() 329 gmc.f.gmc_src_datatype = SRC_DATATYPE_EQU_DST; in w100_init_graphic_engine() 330 gmc.f.gmc_byte_pix_order = 1; in w100_init_graphic_engine() [all …]
|