Lines Matching refs:adev
66 void amdgpu_amdkfd_device_probe(struct amdgpu_device *adev) in amdgpu_amdkfd_device_probe() argument
68 bool vf = amdgpu_sriov_vf(adev); in amdgpu_amdkfd_device_probe()
73 adev->kfd.dev = kgd2kfd_probe((struct kgd_dev *)adev, in amdgpu_amdkfd_device_probe()
74 adev->pdev, adev->asic_type, vf); in amdgpu_amdkfd_device_probe()
76 if (adev->kfd.dev) in amdgpu_amdkfd_device_probe()
77 amdgpu_amdkfd_total_mem_size += adev->gmc.real_vram_size; in amdgpu_amdkfd_device_probe()
93 static void amdgpu_doorbell_get_kfd_info(struct amdgpu_device *adev, in amdgpu_doorbell_get_kfd_info() argument
102 if (adev->doorbell.size > adev->doorbell.num_doorbells * sizeof(u32)) { in amdgpu_doorbell_get_kfd_info()
103 *aperture_base = adev->doorbell.base; in amdgpu_doorbell_get_kfd_info()
104 *aperture_size = adev->doorbell.size; in amdgpu_doorbell_get_kfd_info()
105 *start_offset = adev->doorbell.num_doorbells * sizeof(u32); in amdgpu_doorbell_get_kfd_info()
113 void amdgpu_amdkfd_device_init(struct amdgpu_device *adev) in amdgpu_amdkfd_device_init() argument
118 if (adev->kfd.dev) { in amdgpu_amdkfd_device_init()
122 ((1 << adev->vm_manager.first_kfd_vmid) - 1), in amdgpu_amdkfd_device_init()
123 .num_pipe_per_mec = adev->gfx.mec.num_pipe_per_mec, in amdgpu_amdkfd_device_init()
124 .num_queue_per_pipe = adev->gfx.mec.num_queue_per_pipe, in amdgpu_amdkfd_device_init()
125 .gpuvm_size = min(adev->vm_manager.max_pfn in amdgpu_amdkfd_device_init()
128 .drm_render_minor = adev_to_drm(adev)->render->index, in amdgpu_amdkfd_device_init()
129 .sdma_doorbell_idx = adev->doorbell_index.sdma_engine, in amdgpu_amdkfd_device_init()
137 adev->gfx.mec.queue_bitmap, in amdgpu_amdkfd_device_init()
144 * adev->gfx.mec.num_pipe_per_mec in amdgpu_amdkfd_device_init()
145 * adev->gfx.mec.num_queue_per_pipe; in amdgpu_amdkfd_device_init()
149 amdgpu_doorbell_get_kfd_info(adev, in amdgpu_amdkfd_device_init()
162 if (adev->asic_type >= CHIP_VEGA10) { in amdgpu_amdkfd_device_init()
164 adev->doorbell_index.first_non_cp; in amdgpu_amdkfd_device_init()
166 adev->doorbell_index.last_non_cp; in amdgpu_amdkfd_device_init()
169 adev->kfd.init_complete = kgd2kfd_device_init(adev->kfd.dev, in amdgpu_amdkfd_device_init()
170 adev_to_drm(adev), &gpu_resources); in amdgpu_amdkfd_device_init()
174 void amdgpu_amdkfd_device_fini_sw(struct amdgpu_device *adev) in amdgpu_amdkfd_device_fini_sw() argument
176 if (adev->kfd.dev) { in amdgpu_amdkfd_device_fini_sw()
177 kgd2kfd_device_exit(adev->kfd.dev); in amdgpu_amdkfd_device_fini_sw()
178 adev->kfd.dev = NULL; in amdgpu_amdkfd_device_fini_sw()
182 void amdgpu_amdkfd_interrupt(struct amdgpu_device *adev, in amdgpu_amdkfd_interrupt() argument
185 if (adev->kfd.dev) in amdgpu_amdkfd_interrupt()
186 kgd2kfd_interrupt(adev->kfd.dev, ih_ring_entry); in amdgpu_amdkfd_interrupt()
189 void amdgpu_amdkfd_suspend(struct amdgpu_device *adev, bool run_pm) in amdgpu_amdkfd_suspend() argument
191 if (adev->kfd.dev) in amdgpu_amdkfd_suspend()
192 kgd2kfd_suspend(adev->kfd.dev, run_pm); in amdgpu_amdkfd_suspend()
195 int amdgpu_amdkfd_resume_iommu(struct amdgpu_device *adev) in amdgpu_amdkfd_resume_iommu() argument
199 if (adev->kfd.dev) in amdgpu_amdkfd_resume_iommu()
200 r = kgd2kfd_resume_iommu(adev->kfd.dev); in amdgpu_amdkfd_resume_iommu()
205 int amdgpu_amdkfd_resume(struct amdgpu_device *adev, bool run_pm) in amdgpu_amdkfd_resume() argument
209 if (adev->kfd.dev) in amdgpu_amdkfd_resume()
210 r = kgd2kfd_resume(adev->kfd.dev, run_pm); in amdgpu_amdkfd_resume()
215 int amdgpu_amdkfd_pre_reset(struct amdgpu_device *adev) in amdgpu_amdkfd_pre_reset() argument
219 if (adev->kfd.dev) in amdgpu_amdkfd_pre_reset()
220 r = kgd2kfd_pre_reset(adev->kfd.dev); in amdgpu_amdkfd_pre_reset()
225 int amdgpu_amdkfd_post_reset(struct amdgpu_device *adev) in amdgpu_amdkfd_post_reset() argument
229 if (adev->kfd.dev) in amdgpu_amdkfd_post_reset()
230 r = kgd2kfd_post_reset(adev->kfd.dev); in amdgpu_amdkfd_post_reset()
237 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_gpu_reset() local
239 if (amdgpu_device_should_recover_gpu(adev)) in amdgpu_amdkfd_gpu_reset()
240 amdgpu_device_gpu_recover(adev, NULL); in amdgpu_amdkfd_gpu_reset()
247 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_alloc_gtt_mem() local
265 r = amdgpu_bo_create(adev, &bp, &bo); in amdgpu_amdkfd_alloc_gtt_mem()
267 dev_err(adev->dev, in amdgpu_amdkfd_alloc_gtt_mem()
275 dev_err(adev->dev, "(%d) failed to reserve bo for amdkfd\n", r); in amdgpu_amdkfd_alloc_gtt_mem()
281 dev_err(adev->dev, "(%d) failed to pin bo for amdkfd\n", r); in amdgpu_amdkfd_alloc_gtt_mem()
287 dev_err(adev->dev, "%p bind failed\n", bo); in amdgpu_amdkfd_alloc_gtt_mem()
293 dev_err(adev->dev, in amdgpu_amdkfd_alloc_gtt_mem()
330 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_alloc_gws() local
345 r = amdgpu_bo_create_user(adev, &bp, &ubo); in amdgpu_amdkfd_alloc_gws()
347 dev_err(adev->dev, in amdgpu_amdkfd_alloc_gws()
367 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_get_fw_version() local
371 return adev->gfx.pfp_fw_version; in amdgpu_amdkfd_get_fw_version()
374 return adev->gfx.me_fw_version; in amdgpu_amdkfd_get_fw_version()
377 return adev->gfx.ce_fw_version; in amdgpu_amdkfd_get_fw_version()
380 return adev->gfx.mec_fw_version; in amdgpu_amdkfd_get_fw_version()
383 return adev->gfx.mec2_fw_version; in amdgpu_amdkfd_get_fw_version()
386 return adev->gfx.rlc_fw_version; in amdgpu_amdkfd_get_fw_version()
389 return adev->sdma.instance[0].fw_version; in amdgpu_amdkfd_get_fw_version()
392 return adev->sdma.instance[1].fw_version; in amdgpu_amdkfd_get_fw_version()
404 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_get_local_mem_info() local
408 mem_info->local_mem_size_public = adev->gmc.visible_vram_size; in amdgpu_amdkfd_get_local_mem_info()
409 mem_info->local_mem_size_private = adev->gmc.real_vram_size - in amdgpu_amdkfd_get_local_mem_info()
410 adev->gmc.visible_vram_size; in amdgpu_amdkfd_get_local_mem_info()
412 mem_info->vram_width = adev->gmc.vram_width; in amdgpu_amdkfd_get_local_mem_info()
415 &adev->gmc.aper_base, in amdgpu_amdkfd_get_local_mem_info()
419 if (amdgpu_sriov_vf(adev)) in amdgpu_amdkfd_get_local_mem_info()
420 mem_info->mem_clk_max = adev->clock.default_mclk / 100; in amdgpu_amdkfd_get_local_mem_info()
421 else if (adev->pm.dpm_enabled) { in amdgpu_amdkfd_get_local_mem_info()
425 mem_info->mem_clk_max = amdgpu_dpm_get_mclk(adev, false) / 100; in amdgpu_amdkfd_get_local_mem_info()
432 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_get_gpu_clock_counter() local
434 if (adev->gfx.funcs->get_gpu_clock_counter) in amdgpu_amdkfd_get_gpu_clock_counter()
435 return adev->gfx.funcs->get_gpu_clock_counter(adev); in amdgpu_amdkfd_get_gpu_clock_counter()
441 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_get_max_engine_clock_in_mhz() local
444 if (amdgpu_sriov_vf(adev)) in amdgpu_amdkfd_get_max_engine_clock_in_mhz()
445 return adev->clock.default_sclk / 100; in amdgpu_amdkfd_get_max_engine_clock_in_mhz()
446 else if (adev->pm.dpm_enabled) in amdgpu_amdkfd_get_max_engine_clock_in_mhz()
447 return amdgpu_dpm_get_sclk(adev, false) / 100; in amdgpu_amdkfd_get_max_engine_clock_in_mhz()
454 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_get_cu_info() local
455 struct amdgpu_cu_info acu_info = adev->gfx.cu_info; in amdgpu_amdkfd_get_cu_info()
465 cu_info->num_shader_engines = adev->gfx.config.max_shader_engines; in amdgpu_amdkfd_get_cu_info()
466 cu_info->num_shader_arrays_per_engine = adev->gfx.config.max_sh_per_se; in amdgpu_amdkfd_get_cu_info()
467 cu_info->num_cu_per_sh = adev->gfx.config.max_cu_per_sh; in amdgpu_amdkfd_get_cu_info()
481 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_get_dmabuf_info() local
497 if (obj->dev->driver != adev_to_drm(adev)->driver) in amdgpu_amdkfd_get_dmabuf_info()
501 adev = drm_to_adev(obj->dev); in amdgpu_amdkfd_get_dmabuf_info()
510 *dma_buf_kgd = (struct kgd_dev *)adev; in amdgpu_amdkfd_get_dmabuf_info()
532 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_get_vram_usage() local
533 struct ttm_resource_manager *vram_man = ttm_manager_type(&adev->mman.bdev, TTM_PL_VRAM); in amdgpu_amdkfd_get_vram_usage()
540 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_get_hive_id() local
542 return adev->gmc.xgmi.hive_id; in amdgpu_amdkfd_get_hive_id()
547 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_get_unique_id() local
549 return adev->unique_id; in amdgpu_amdkfd_get_unique_id()
555 struct amdgpu_device *adev = (struct amdgpu_device *)dst; in amdgpu_amdkfd_get_xgmi_hops_count() local
556 int ret = amdgpu_xgmi_get_hops_count(adev, peer_adev); in amdgpu_amdkfd_get_xgmi_hops_count()
560 adev->gmc.xgmi.physical_node_id, in amdgpu_amdkfd_get_xgmi_hops_count()
569 struct amdgpu_device *adev = (struct amdgpu_device *)dst, *peer_adev; in amdgpu_amdkfd_get_xgmi_bandwidth_mbytes() local
572 if (adev->asic_type != CHIP_ALDEBARAN) in amdgpu_amdkfd_get_xgmi_bandwidth_mbytes()
579 num_links = is_min ? 1 : amdgpu_xgmi_get_num_links(adev, peer_adev); in amdgpu_amdkfd_get_xgmi_bandwidth_mbytes()
582 adev->gmc.xgmi.physical_node_id, in amdgpu_amdkfd_get_xgmi_bandwidth_mbytes()
593 struct amdgpu_device *adev = (struct amdgpu_device *)dev; in amdgpu_amdkfd_get_pcie_bandwidth_mbytes() local
594 int num_lanes_shift = (is_min ? ffs(adev->pm.pcie_mlw_mask) : in amdgpu_amdkfd_get_pcie_bandwidth_mbytes()
595 fls(adev->pm.pcie_mlw_mask)) - 1; in amdgpu_amdkfd_get_pcie_bandwidth_mbytes()
596 int gen_speed_shift = (is_min ? ffs(adev->pm.pcie_gen_mask & in amdgpu_amdkfd_get_pcie_bandwidth_mbytes()
598 fls(adev->pm.pcie_gen_mask & in amdgpu_amdkfd_get_pcie_bandwidth_mbytes()
651 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_get_mmio_remap_phys_addr() local
653 return adev->rmmio_remap.bus_addr; in amdgpu_amdkfd_get_mmio_remap_phys_addr()
658 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_get_num_gws() local
660 return adev->gds.gws_size; in amdgpu_amdkfd_get_num_gws()
665 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_get_asic_rev_id() local
667 return adev->rev_id; in amdgpu_amdkfd_get_asic_rev_id()
672 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_get_noretry() local
674 return adev->gmc.noretry; in amdgpu_amdkfd_get_noretry()
681 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_submit_ib() local
690 ring = &adev->gfx.compute_ring[0]; in amdgpu_amdkfd_submit_ib()
693 ring = &adev->sdma.instance[0].ring; in amdgpu_amdkfd_submit_ib()
696 ring = &adev->sdma.instance[1].ring; in amdgpu_amdkfd_submit_ib()
704 ret = amdgpu_job_alloc(adev, 1, &job, NULL); in amdgpu_amdkfd_submit_ib()
734 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_set_compute_idle() local
736 amdgpu_dpm_switch_power_profile(adev, in amdgpu_amdkfd_set_compute_idle()
741 bool amdgpu_amdkfd_is_kfd_vmid(struct amdgpu_device *adev, u32 vmid) in amdgpu_amdkfd_is_kfd_vmid() argument
743 if (adev->kfd.dev) in amdgpu_amdkfd_is_kfd_vmid()
744 return vmid >= adev->vm_manager.first_kfd_vmid; in amdgpu_amdkfd_is_kfd_vmid()
751 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_flush_gpu_tlb_vmid() local
753 if (adev->family == AMDGPU_FAMILY_AI) { in amdgpu_amdkfd_flush_gpu_tlb_vmid()
756 for (i = 0; i < adev->num_vmhubs; i++) in amdgpu_amdkfd_flush_gpu_tlb_vmid()
757 amdgpu_gmc_flush_gpu_tlb(adev, vmid, i, 0); in amdgpu_amdkfd_flush_gpu_tlb_vmid()
759 amdgpu_gmc_flush_gpu_tlb(adev, vmid, AMDGPU_GFXHUB_0, 0); in amdgpu_amdkfd_flush_gpu_tlb_vmid()
768 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_flush_gpu_tlb_pasid() local
771 if (adev->family == AMDGPU_FAMILY_AI || in amdgpu_amdkfd_flush_gpu_tlb_pasid()
772 adev->family == AMDGPU_FAMILY_RV) in amdgpu_amdkfd_flush_gpu_tlb_pasid()
775 return amdgpu_gmc_flush_gpu_tlb_pasid(adev, pasid, flush_type, all_hub); in amdgpu_amdkfd_flush_gpu_tlb_pasid()
780 struct amdgpu_device *adev = (struct amdgpu_device *)kgd; in amdgpu_amdkfd_have_atomics_support() local
782 return adev->have_atomics_support; in amdgpu_amdkfd_have_atomics_support()