Home
last modified time | relevance | path

Searched refs:kfd (Results 1 – 22 of 22) sorted by relevance

/drivers/gpu/drm/amd/amdkfd/
Dkfd_device.c527 static int kfd_gtt_sa_init(struct kfd_dev *kfd, unsigned int buf_size,
529 static void kfd_gtt_sa_fini(struct kfd_dev *kfd);
531 static int kfd_resume(struct kfd_dev *kfd);
536 struct kfd_dev *kfd; in kgd2kfd_probe() local
555 kfd = kzalloc(sizeof(*kfd), GFP_KERNEL); in kgd2kfd_probe()
556 if (!kfd) in kgd2kfd_probe()
563 kfd->pci_atomic_requested = amdgpu_amdkfd_have_atomics_support(kgd); in kgd2kfd_probe()
565 !kfd->pci_atomic_requested) { in kgd2kfd_probe()
569 kfree(kfd); in kgd2kfd_probe()
573 kfd->kgd = kgd; in kgd2kfd_probe()
[all …]
Dkfd_doorbell.c49 size_t kfd_doorbell_process_slice(struct kfd_dev *kfd) in kfd_doorbell_process_slice() argument
51 return roundup(kfd->device_info->doorbell_size * in kfd_doorbell_process_slice()
57 int kfd_doorbell_init(struct kfd_dev *kfd) in kfd_doorbell_init() argument
70 roundup(kfd->shared_resources.doorbell_start_offset, in kfd_doorbell_init()
71 kfd_doorbell_process_slice(kfd)); in kfd_doorbell_init()
74 rounddown(kfd->shared_resources.doorbell_aperture_size, in kfd_doorbell_init()
75 kfd_doorbell_process_slice(kfd)); in kfd_doorbell_init()
80 kfd_doorbell_process_slice(kfd); in kfd_doorbell_init()
84 if (!kfd->max_doorbell_slices || in kfd_doorbell_init()
85 doorbell_process_limit < kfd->max_doorbell_slices) in kfd_doorbell_init()
[all …]
Dkfd_interrupt.c52 int kfd_interrupt_init(struct kfd_dev *kfd) in kfd_interrupt_init() argument
56 r = kfifo_alloc(&kfd->ih_fifo, in kfd_interrupt_init()
57 KFD_IH_NUM_ENTRIES * kfd->device_info->ih_ring_entry_size, in kfd_interrupt_init()
64 kfd->ih_wq = alloc_workqueue("KFD IH", WQ_HIGHPRI, 1); in kfd_interrupt_init()
65 if (unlikely(!kfd->ih_wq)) { in kfd_interrupt_init()
66 kfifo_free(&kfd->ih_fifo); in kfd_interrupt_init()
70 spin_lock_init(&kfd->interrupt_lock); in kfd_interrupt_init()
72 INIT_WORK(&kfd->interrupt_work, interrupt_wq); in kfd_interrupt_init()
74 kfd->interrupts_active = true; in kfd_interrupt_init()
86 void kfd_interrupt_exit(struct kfd_dev *kfd) in kfd_interrupt_exit() argument
[all …]
Dkfd_iommu.c43 int kfd_iommu_check_device(struct kfd_dev *kfd) in kfd_iommu_check_device() argument
48 if (!kfd->use_iommu_v2) in kfd_iommu_check_device()
52 err = amd_iommu_device_info(kfd->pdev, &iommu_info); in kfd_iommu_check_device()
64 int kfd_iommu_device_init(struct kfd_dev *kfd) in kfd_iommu_device_init() argument
70 if (!kfd->use_iommu_v2) in kfd_iommu_device_init()
74 err = amd_iommu_device_info(kfd->pdev, &iommu_info); in kfd_iommu_device_init()
92 (unsigned int)(1 << kfd->device_info->max_pasid_bits), in kfd_iommu_device_init()
218 static int kfd_bind_processes_to_device(struct kfd_dev *kfd) in kfd_bind_processes_to_device() argument
229 pdd = kfd_get_process_device_data(kfd, p); in kfd_bind_processes_to_device()
236 err = amd_iommu_bind_pasid(kfd->pdev, p->pasid, in kfd_bind_processes_to_device()
[all …]
Dkfd_iommu.h32 int kfd_iommu_check_device(struct kfd_dev *kfd);
33 int kfd_iommu_device_init(struct kfd_dev *kfd);
38 void kfd_iommu_suspend(struct kfd_dev *kfd);
39 int kfd_iommu_resume(struct kfd_dev *kfd);
45 static inline int kfd_iommu_check_device(struct kfd_dev *kfd) in kfd_iommu_check_device() argument
49 static inline int kfd_iommu_device_init(struct kfd_dev *kfd) in kfd_iommu_device_init() argument
67 static inline void kfd_iommu_suspend(struct kfd_dev *kfd) in kfd_iommu_suspend() argument
71 static inline int kfd_iommu_resume(struct kfd_dev *kfd) in kfd_iommu_resume() argument
Dkfd_priv.h890 size_t kfd_doorbell_process_slice(struct kfd_dev *kfd);
891 int kfd_doorbell_init(struct kfd_dev *kfd);
892 void kfd_doorbell_fini(struct kfd_dev *kfd);
895 void __iomem *kfd_get_kernel_doorbell(struct kfd_dev *kfd,
897 void kfd_release_kernel_doorbell(struct kfd_dev *kfd, u32 __iomem *db_addr);
901 unsigned int kfd_get_doorbell_dw_offset_in_bar(struct kfd_dev *kfd,
905 int kfd_alloc_process_doorbells(struct kfd_dev *kfd,
907 void kfd_free_process_doorbells(struct kfd_dev *kfd,
911 int kfd_gtt_sa_allocate(struct kfd_dev *kfd, unsigned int size,
914 int kfd_gtt_sa_free(struct kfd_dev *kfd, struct kfd_mem_obj *mem_obj);
[all …]
Dkfd_mqd_manager_v9.c84 static struct kfd_mem_obj *allocate_mqd(struct kfd_dev *kfd, in allocate_mqd() argument
106 if (kfd->cwsr_enabled && (q->type == KFD_QUEUE_TYPE_COMPUTE)) { in allocate_mqd()
110 retval = amdgpu_amdkfd_alloc_gtt_mem(kfd->kgd, in allocate_mqd()
122 retval = kfd_gtt_sa_allocate(kfd, sizeof(struct v9_mqd), in allocate_mqd()
293 struct kfd_dev *kfd = mm->dev; in free_mqd() local
296 amdgpu_amdkfd_free_gtt_mem(kfd->kgd, mqd_mem_obj->gtt_mem); in free_mqd()
Dkfd_mqd_manager.h70 struct kfd_mem_obj* (*allocate_mqd)(struct kfd_dev *kfd,
Dkfd_packet_manager_vi.c79 struct kfd_dev *kfd = pm->dqm->dev; in pm_runlist_vi() local
94 kfd->max_proc_per_quantum); in pm_runlist_vi()
Dkfd_packet_manager_v9.c82 struct kfd_dev *kfd = pm->dqm->dev; in pm_runlist_v9() local
94 kfd->max_proc_per_quantum); in pm_runlist_v9()
Dkfd_mqd_manager_cik.c75 static struct kfd_mem_obj *allocate_mqd(struct kfd_dev *kfd, in allocate_mqd() argument
80 if (kfd_gtt_sa_allocate(kfd, sizeof(struct cik_mqd), in allocate_mqd()
Dkfd_mqd_manager_v10.c75 static struct kfd_mem_obj *allocate_mqd(struct kfd_dev *kfd, in allocate_mqd() argument
80 if (kfd_gtt_sa_allocate(kfd, sizeof(struct v10_compute_mqd), in allocate_mqd()
Dkfd_mqd_manager_vi.c78 static struct kfd_mem_obj *allocate_mqd(struct kfd_dev *kfd, in allocate_mqd() argument
83 if (kfd_gtt_sa_allocate(kfd, sizeof(struct vi_mqd), in allocate_mqd()
Dkfd_chardev.c1030 struct kfd_dev *kfd; in kfd_ioctl_create_event() local
1040 kfd = kfd_device_by_id(GET_GPU_ID(args->event_page_offset)); in kfd_ioctl_create_event()
1041 if (!kfd) { in kfd_ioctl_create_event()
1047 pdd = kfd_bind_process_to_device(kfd, p); in kfd_ioctl_create_event()
1063 err = amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel(kfd->kgd, in kfd_ioctl_create_event()
/drivers/gpu/drm/amd/amdgpu/
Damdgpu_amdkfd.h304 bool kgd2kfd_device_init(struct kfd_dev *kfd,
307 void kgd2kfd_device_exit(struct kfd_dev *kfd);
308 void kgd2kfd_suspend(struct kfd_dev *kfd, bool run_pm);
309 int kgd2kfd_resume_iommu(struct kfd_dev *kfd);
310 int kgd2kfd_resume(struct kfd_dev *kfd, bool run_pm);
311 int kgd2kfd_pre_reset(struct kfd_dev *kfd);
312 int kgd2kfd_post_reset(struct kfd_dev *kfd);
313 void kgd2kfd_interrupt(struct kfd_dev *kfd, const void *ih_ring_entry);
314 void kgd2kfd_set_sram_ecc_flag(struct kfd_dev *kfd);
315 void kgd2kfd_smi_event_throttle(struct kfd_dev *kfd, uint32_t throttle_bitmask);
[all …]
Damdgpu_amdkfd.c72 adev->kfd.dev = kgd2kfd_probe((struct kgd_dev *)adev, in amdgpu_amdkfd_device_probe()
75 if (adev->kfd.dev) in amdgpu_amdkfd_device_probe()
117 if (adev->kfd.dev) { in amdgpu_amdkfd_device_init()
168 kgd2kfd_device_init(adev->kfd.dev, adev_to_drm(adev), &gpu_resources); in amdgpu_amdkfd_device_init()
174 if (adev->kfd.dev) { in amdgpu_amdkfd_device_fini()
175 kgd2kfd_device_exit(adev->kfd.dev); in amdgpu_amdkfd_device_fini()
176 adev->kfd.dev = NULL; in amdgpu_amdkfd_device_fini()
183 if (adev->kfd.dev) in amdgpu_amdkfd_interrupt()
184 kgd2kfd_interrupt(adev->kfd.dev, ih_ring_entry); in amdgpu_amdkfd_interrupt()
189 if (adev->kfd.dev) in amdgpu_amdkfd_suspend()
[all …]
Damdgpu_umc.c97 kgd2kfd_set_sram_ecc_flag(adev->kfd.dev); in amdgpu_umc_process_ras_data_cb()
Damdgpu_sdma.c162 kgd2kfd_set_sram_ecc_flag(adev->kfd.dev); in amdgpu_sdma_process_ras_data_cb()
Damdgpu_amdkfd_gpuvm.c153 (adev->kfd.vram_used + vram_needed > in amdgpu_amdkfd_reserve_mem_limit()
159 adev->kfd.vram_used += vram_needed; in amdgpu_amdkfd_reserve_mem_limit()
185 adev->kfd.vram_used -= size; in unreserve_mem_limit()
186 WARN_ONCE(adev->kfd.vram_used < 0, in unreserve_mem_limit()
Damdgpu_gfx.c679 kgd2kfd_set_sram_ecc_flag(adev->kfd.dev); in amdgpu_gfx_process_ras_data_cb()
Damdgpu.h917 struct amdgpu_kfd_dev kfd; member
/drivers/gpu/drm/amd/pm/swsmu/smu11/
Darcturus_ppt.c2252 kgd2kfd_smi_event_throttle(smu->adev->kfd.dev, throttler_status); in arcturus_log_thermal_throttling_event()