Home
last modified time | relevance | path

Searched refs:adev (Results 1 – 25 of 752) sorted by relevance

12345678910>>...31

/drivers/gpu/drm/amd/amdgpu/
Damdgpu_device.c157 struct amdgpu_device *adev = drm_to_adev(ddev); in amdgpu_device_get_pcie_replay_count() local
158 uint64_t cnt = amdgpu_asic_get_pcie_replay_count(adev); in amdgpu_device_get_pcie_replay_count()
166 static void amdgpu_device_get_pcie_info(struct amdgpu_device *adev);
179 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_supports_px() local
181 if ((adev->flags & AMD_IS_PX) && !amdgpu_is_atpx_hybrid()) in amdgpu_device_supports_px()
196 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_supports_boco() local
198 if (adev->has_pr3 || in amdgpu_device_supports_boco()
199 ((adev->flags & AMD_IS_PX) && amdgpu_is_atpx_hybrid())) in amdgpu_device_supports_boco()
214 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_supports_baco() local
216 return amdgpu_asic_supports_baco(adev); in amdgpu_device_supports_baco()
[all …]
Dgmc_v9_0.c415 static int gmc_v9_0_ecc_interrupt_state(struct amdgpu_device *adev, in gmc_v9_0_ecc_interrupt_state() argument
425 if (adev->asic_type >= CHIP_VEGA20) in gmc_v9_0_ecc_interrupt_state()
466 static int gmc_v9_0_vm_fault_interrupt_state(struct amdgpu_device *adev, in gmc_v9_0_vm_fault_interrupt_state() argument
484 for_each_set_bit(j, adev->vmhubs_mask, AMDGPU_MAX_VMHUBS) { in gmc_v9_0_vm_fault_interrupt_state()
485 hub = &adev->vmhub[j]; in gmc_v9_0_vm_fault_interrupt_state()
494 if (adev->in_s0ix && (j == AMDGPU_GFXHUB(0))) in gmc_v9_0_vm_fault_interrupt_state()
512 for_each_set_bit(j, adev->vmhubs_mask, AMDGPU_MAX_VMHUBS) { in gmc_v9_0_vm_fault_interrupt_state()
513 hub = &adev->vmhub[j]; in gmc_v9_0_vm_fault_interrupt_state()
522 if (adev->in_s0ix && (j == AMDGPU_GFXHUB(0))) in gmc_v9_0_vm_fault_interrupt_state()
546 static int gmc_v9_0_process_interrupt(struct amdgpu_device *adev, in gmc_v9_0_process_interrupt() argument
[all …]
Dgmc_v10_0.c56 static int gmc_v10_0_ecc_interrupt_state(struct amdgpu_device *adev, in gmc_v10_0_ecc_interrupt_state() argument
65 gmc_v10_0_vm_fault_interrupt_state(struct amdgpu_device *adev, in gmc_v10_0_vm_fault_interrupt_state() argument
72 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_MMHUB0(0), false); in gmc_v10_0_vm_fault_interrupt_state()
79 if (!adev->in_s0ix) in gmc_v10_0_vm_fault_interrupt_state()
80 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_GFXHUB(0), false); in gmc_v10_0_vm_fault_interrupt_state()
84 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_MMHUB0(0), true); in gmc_v10_0_vm_fault_interrupt_state()
91 if (!adev->in_s0ix) in gmc_v10_0_vm_fault_interrupt_state()
92 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_GFXHUB(0), true); in gmc_v10_0_vm_fault_interrupt_state()
101 static int gmc_v10_0_process_interrupt(struct amdgpu_device *adev, in gmc_v10_0_process_interrupt() argument
107 struct amdgpu_vmhub *hub = &adev->vmhub[vmhub_index]; in gmc_v10_0_process_interrupt()
[all …]
Dgmc_v11_0.c51 static int gmc_v11_0_ecc_interrupt_state(struct amdgpu_device *adev, in gmc_v11_0_ecc_interrupt_state() argument
60 gmc_v11_0_vm_fault_interrupt_state(struct amdgpu_device *adev, in gmc_v11_0_vm_fault_interrupt_state() argument
67 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_MMHUB0(0), false); in gmc_v11_0_vm_fault_interrupt_state()
74 if (!adev->in_s0ix) in gmc_v11_0_vm_fault_interrupt_state()
75 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_GFXHUB(0), false); in gmc_v11_0_vm_fault_interrupt_state()
79 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_MMHUB0(0), true); in gmc_v11_0_vm_fault_interrupt_state()
86 if (!adev->in_s0ix) in gmc_v11_0_vm_fault_interrupt_state()
87 amdgpu_gmc_set_vm_fault_masks(adev, AMDGPU_GFXHUB(0), true); in gmc_v11_0_vm_fault_interrupt_state()
96 static int gmc_v11_0_process_interrupt(struct amdgpu_device *adev, in gmc_v11_0_process_interrupt() argument
102 struct amdgpu_vmhub *hub = &adev->vmhub[vmhub_index]; in gmc_v11_0_process_interrupt()
[all …]
Dsoc15.c174 static int soc15_query_video_codecs(struct amdgpu_device *adev, bool encode, in soc15_query_video_codecs() argument
177 if (adev->ip_versions[VCE_HWIP][0]) { in soc15_query_video_codecs()
178 switch (adev->ip_versions[VCE_HWIP][0]) { in soc15_query_video_codecs()
190 switch (adev->ip_versions[UVD_HWIP][0]) { in soc15_query_video_codecs()
218 static u32 soc15_uvd_ctx_rreg(struct amdgpu_device *adev, u32 reg) in soc15_uvd_ctx_rreg() argument
226 spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_rreg()
229 spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_rreg()
233 static void soc15_uvd_ctx_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in soc15_uvd_ctx_wreg() argument
240 spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_wreg()
243 spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_wreg()
[all …]
Dsoc21.c150 static int soc21_query_video_codecs(struct amdgpu_device *adev, bool encode, in soc21_query_video_codecs() argument
153 if (adev->vcn.num_vcn_inst == hweight8(adev->vcn.harvest_config)) in soc21_query_video_codecs()
156 switch (adev->ip_versions[UVD_HWIP][0]) { in soc21_query_video_codecs()
160 if (amdgpu_sriov_vf(adev)) { in soc21_query_video_codecs()
161 if ((adev->vcn.harvest_config & AMDGPU_VCN_HARVEST_VCN0) || in soc21_query_video_codecs()
162 !amdgpu_sriov_is_av1_support(adev)) { in soc21_query_video_codecs()
174 if ((adev->vcn.harvest_config & AMDGPU_VCN_HARVEST_VCN0)) { in soc21_query_video_codecs()
192 static u32 soc21_didt_rreg(struct amdgpu_device *adev, u32 reg) in soc21_didt_rreg() argument
200 spin_lock_irqsave(&adev->didt_idx_lock, flags); in soc21_didt_rreg()
203 spin_unlock_irqrestore(&adev->didt_idx_lock, flags); in soc21_didt_rreg()
[all …]
Damdgpu_discovery.c209 static int amdgpu_discovery_read_binary_from_sysmem(struct amdgpu_device *adev, uint8_t *binary) in amdgpu_discovery_read_binary_from_sysmem() argument
215 ret = amdgpu_acpi_get_tmr_info(adev, &tmr_offset, &tmr_size); in amdgpu_discovery_read_binary_from_sysmem()
222 discv_regn = memremap(pos, adev->mman.discovery_tmr_size, MEMREMAP_WC); in amdgpu_discovery_read_binary_from_sysmem()
224 memcpy(binary, discv_regn, adev->mman.discovery_tmr_size); in amdgpu_discovery_read_binary_from_sysmem()
232 static int amdgpu_discovery_read_binary_from_mem(struct amdgpu_device *adev, in amdgpu_discovery_read_binary_from_mem() argument
246 if (dev_is_removable(&adev->pdev->dev)) { in amdgpu_discovery_read_binary_from_mem()
258 amdgpu_device_vram_access(adev, pos, (uint32_t *)binary, in amdgpu_discovery_read_binary_from_mem()
259 adev->mman.discovery_tmr_size, false); in amdgpu_discovery_read_binary_from_mem()
261 ret = amdgpu_discovery_read_binary_from_sysmem(adev, binary); in amdgpu_discovery_read_binary_from_mem()
267 static int amdgpu_discovery_read_binary_from_file(struct amdgpu_device *adev, uint8_t *binary) in amdgpu_discovery_read_binary_from_file() argument
[all …]
Dnv.c211 static int nv_query_video_codecs(struct amdgpu_device *adev, bool encode, in nv_query_video_codecs() argument
214 if (adev->vcn.num_vcn_inst == hweight8(adev->vcn.harvest_config)) in nv_query_video_codecs()
217 switch (adev->ip_versions[UVD_HWIP][0]) { in nv_query_video_codecs()
221 if (amdgpu_sriov_vf(adev)) { in nv_query_video_codecs()
222 if (adev->vcn.harvest_config & AMDGPU_VCN_HARVEST_VCN0) { in nv_query_video_codecs()
234 if (adev->vcn.harvest_config & AMDGPU_VCN_HARVEST_VCN0) { in nv_query_video_codecs()
279 static u32 nv_didt_rreg(struct amdgpu_device *adev, u32 reg) in nv_didt_rreg() argument
287 spin_lock_irqsave(&adev->didt_idx_lock, flags); in nv_didt_rreg()
290 spin_unlock_irqrestore(&adev->didt_idx_lock, flags); in nv_didt_rreg()
294 static void nv_didt_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in nv_didt_wreg() argument
[all …]
Damdgpu_rlc.c38 void amdgpu_gfx_rlc_enter_safe_mode(struct amdgpu_device *adev, int xcc_id) in amdgpu_gfx_rlc_enter_safe_mode() argument
40 if (adev->gfx.rlc.in_safe_mode[xcc_id]) in amdgpu_gfx_rlc_enter_safe_mode()
44 if (!adev->gfx.rlc.funcs->is_rlc_enabled(adev)) in amdgpu_gfx_rlc_enter_safe_mode()
47 if (adev->cg_flags & in amdgpu_gfx_rlc_enter_safe_mode()
50 adev->gfx.rlc.funcs->set_safe_mode(adev, xcc_id); in amdgpu_gfx_rlc_enter_safe_mode()
51 adev->gfx.rlc.in_safe_mode[xcc_id] = true; in amdgpu_gfx_rlc_enter_safe_mode()
63 void amdgpu_gfx_rlc_exit_safe_mode(struct amdgpu_device *adev, int xcc_id) in amdgpu_gfx_rlc_exit_safe_mode() argument
65 if (!(adev->gfx.rlc.in_safe_mode[xcc_id])) in amdgpu_gfx_rlc_exit_safe_mode()
69 if (!adev->gfx.rlc.funcs->is_rlc_enabled(adev)) in amdgpu_gfx_rlc_exit_safe_mode()
72 if (adev->cg_flags & in amdgpu_gfx_rlc_exit_safe_mode()
[all …]
Damdgpu_acp.c103 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in acp_sw_init() local
105 adev->acp.parent = adev->dev; in acp_sw_init()
107 adev->acp.cgs_device = in acp_sw_init()
108 amdgpu_cgs_create_device(adev); in acp_sw_init()
109 if (!adev->acp.cgs_device) in acp_sw_init()
117 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in acp_sw_fini() local
119 if (adev->acp.cgs_device) in acp_sw_fini()
120 amdgpu_cgs_destroy_device(adev->acp.cgs_device); in acp_sw_fini()
126 void *adev; member
133 struct amdgpu_device *adev; in acp_poweroff() local
[all …]
Damdgpu_ras.c128 static bool amdgpu_ras_check_bad_page(struct amdgpu_device *adev,
131 static void amdgpu_register_bad_pages_mca_notifier(struct amdgpu_device *adev);
139 void amdgpu_ras_set_error_query_ready(struct amdgpu_device *adev, bool ready) in amdgpu_ras_set_error_query_ready() argument
141 if (adev && amdgpu_ras_get_context(adev)) in amdgpu_ras_set_error_query_ready()
142 amdgpu_ras_get_context(adev)->error_query_ready = ready; in amdgpu_ras_set_error_query_ready()
145 static bool amdgpu_ras_get_error_query_ready(struct amdgpu_device *adev) in amdgpu_ras_get_error_query_ready() argument
147 if (adev && amdgpu_ras_get_context(adev)) in amdgpu_ras_get_error_query_ready()
148 return amdgpu_ras_get_context(adev)->error_query_ready; in amdgpu_ras_get_error_query_ready()
153 static int amdgpu_reserve_page_direct(struct amdgpu_device *adev, uint64_t address) in amdgpu_reserve_page_direct() argument
158 if ((address >= adev->gmc.mc_vram_size) || in amdgpu_reserve_page_direct()
[all …]
Damdgpu_virt.c45 bool amdgpu_virt_mmio_blocked(struct amdgpu_device *adev) in amdgpu_virt_mmio_blocked() argument
53 void amdgpu_virt_init_setting(struct amdgpu_device *adev) in amdgpu_virt_init_setting() argument
55 struct drm_device *ddev = adev_to_drm(adev); in amdgpu_virt_init_setting()
58 if (adev->asic_type != CHIP_ALDEBARAN && in amdgpu_virt_init_setting()
59 adev->asic_type != CHIP_ARCTURUS && in amdgpu_virt_init_setting()
60 ((adev->pdev->class >> 8) != PCI_CLASS_ACCELERATOR_PROCESSING)) { in amdgpu_virt_init_setting()
61 if (adev->mode_info.num_crtc == 0) in amdgpu_virt_init_setting()
62 adev->mode_info.num_crtc = 1; in amdgpu_virt_init_setting()
63 adev->enable_virtual_display = true; in amdgpu_virt_init_setting()
66 adev->cg_flags = 0; in amdgpu_virt_init_setting()
[all …]
Damdgpu_gfx.c42 int amdgpu_gfx_mec_queue_to_bit(struct amdgpu_device *adev, int mec, in amdgpu_gfx_mec_queue_to_bit() argument
47 bit += mec * adev->gfx.mec.num_pipe_per_mec in amdgpu_gfx_mec_queue_to_bit()
48 * adev->gfx.mec.num_queue_per_pipe; in amdgpu_gfx_mec_queue_to_bit()
49 bit += pipe * adev->gfx.mec.num_queue_per_pipe; in amdgpu_gfx_mec_queue_to_bit()
55 void amdgpu_queue_mask_bit_to_mec_queue(struct amdgpu_device *adev, int bit, in amdgpu_queue_mask_bit_to_mec_queue() argument
58 *queue = bit % adev->gfx.mec.num_queue_per_pipe; in amdgpu_queue_mask_bit_to_mec_queue()
59 *pipe = (bit / adev->gfx.mec.num_queue_per_pipe) in amdgpu_queue_mask_bit_to_mec_queue()
60 % adev->gfx.mec.num_pipe_per_mec; in amdgpu_queue_mask_bit_to_mec_queue()
61 *mec = (bit / adev->gfx.mec.num_queue_per_pipe) in amdgpu_queue_mask_bit_to_mec_queue()
62 / adev->gfx.mec.num_pipe_per_mec; in amdgpu_queue_mask_bit_to_mec_queue()
[all …]
Dvi.c257 static int vi_query_video_codecs(struct amdgpu_device *adev, bool encode, in vi_query_video_codecs() argument
260 switch (adev->asic_type) { in vi_query_video_codecs()
298 static u32 vi_pcie_rreg(struct amdgpu_device *adev, u32 reg) in vi_pcie_rreg() argument
303 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in vi_pcie_rreg()
307 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in vi_pcie_rreg()
311 static void vi_pcie_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in vi_pcie_wreg() argument
315 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in vi_pcie_wreg()
320 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in vi_pcie_wreg()
323 static u32 vi_smc_rreg(struct amdgpu_device *adev, u32 reg) in vi_smc_rreg() argument
328 spin_lock_irqsave(&adev->smc_idx_lock, flags); in vi_smc_rreg()
[all …]
Damdgpu_amdkfd.c69 void amdgpu_amdkfd_device_probe(struct amdgpu_device *adev) in amdgpu_amdkfd_device_probe() argument
71 bool vf = amdgpu_sriov_vf(adev); in amdgpu_amdkfd_device_probe()
76 adev->kfd.dev = kgd2kfd_probe(adev, vf); in amdgpu_amdkfd_device_probe()
92 static void amdgpu_doorbell_get_kfd_info(struct amdgpu_device *adev, in amdgpu_doorbell_get_kfd_info() argument
101 if (adev->enable_mes) { in amdgpu_doorbell_get_kfd_info()
108 *aperture_base = adev->doorbell.base; in amdgpu_doorbell_get_kfd_info()
111 } else if (adev->doorbell.size > adev->doorbell.num_kernel_doorbells * in amdgpu_doorbell_get_kfd_info()
113 *aperture_base = adev->doorbell.base; in amdgpu_doorbell_get_kfd_info()
114 *aperture_size = adev->doorbell.size; in amdgpu_doorbell_get_kfd_info()
115 *start_offset = adev->doorbell.num_kernel_doorbells * sizeof(u32); in amdgpu_doorbell_get_kfd_info()
[all …]
Damdgpu_bios.c91 static bool igp_read_bios_from_vram(struct amdgpu_device *adev) in igp_read_bios_from_vram() argument
97 if (!(adev->flags & AMD_IS_APU)) in igp_read_bios_from_vram()
98 if (amdgpu_device_need_post(adev)) in igp_read_bios_from_vram()
102 if (pci_resource_len(adev->pdev, 0) == 0) in igp_read_bios_from_vram()
105 adev->bios = NULL; in igp_read_bios_from_vram()
106 vram_base = pci_resource_start(adev->pdev, 0); in igp_read_bios_from_vram()
111 adev->bios = kmalloc(size, GFP_KERNEL); in igp_read_bios_from_vram()
112 if (!adev->bios) { in igp_read_bios_from_vram()
116 adev->bios_size = size; in igp_read_bios_from_vram()
117 memcpy_fromio(adev->bios, bios, size); in igp_read_bios_from_vram()
[all …]
Daqua_vanjaram.c37 void aqua_vanjaram_doorbell_index_init(struct amdgpu_device *adev) in aqua_vanjaram_doorbell_index_init() argument
41 adev->doorbell_index.kiq = AMDGPU_DOORBELL_LAYOUT1_KIQ_START; in aqua_vanjaram_doorbell_index_init()
43 adev->doorbell_index.mec_ring0 = AMDGPU_DOORBELL_LAYOUT1_MEC_RING_START; in aqua_vanjaram_doorbell_index_init()
45 adev->doorbell_index.userqueue_start = AMDGPU_DOORBELL_LAYOUT1_USERQUEUE_START; in aqua_vanjaram_doorbell_index_init()
46 adev->doorbell_index.userqueue_end = AMDGPU_DOORBELL_LAYOUT1_USERQUEUE_END; in aqua_vanjaram_doorbell_index_init()
47 adev->doorbell_index.xcc_doorbell_range = AMDGPU_DOORBELL_LAYOUT1_XCC_RANGE; in aqua_vanjaram_doorbell_index_init()
49 adev->doorbell_index.sdma_doorbell_range = 20; in aqua_vanjaram_doorbell_index_init()
50 for (i = 0; i < adev->sdma.num_instances; i++) in aqua_vanjaram_doorbell_index_init()
51 adev->doorbell_index.sdma_engine[i] = in aqua_vanjaram_doorbell_index_init()
53 i * (adev->doorbell_index.sdma_doorbell_range >> 1); in aqua_vanjaram_doorbell_index_init()
[all …]
Damdgpu_irq.c124 void amdgpu_irq_disable_all(struct amdgpu_device *adev) in amdgpu_irq_disable_all() argument
130 spin_lock_irqsave(&adev->irq.lock, irqflags); in amdgpu_irq_disable_all()
132 if (!adev->irq.client[i].sources) in amdgpu_irq_disable_all()
136 struct amdgpu_irq_src *src = adev->irq.client[i].sources[j]; in amdgpu_irq_disable_all()
142 r = src->funcs->set(adev, src, k, in amdgpu_irq_disable_all()
150 spin_unlock_irqrestore(&adev->irq.lock, irqflags); in amdgpu_irq_disable_all()
167 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_irq_handler() local
170 ret = amdgpu_ih_process(adev, &adev->irq.ih); in amdgpu_irq_handler()
174 amdgpu_ras_interrupt_fatal_error_handler(adev); in amdgpu_irq_handler()
188 struct amdgpu_device *adev = container_of(work, struct amdgpu_device, in amdgpu_irq_handle_ih1() local
[all …]
Dmxgpu_nv.c36 static void xgpu_nv_mailbox_send_ack(struct amdgpu_device *adev) in xgpu_nv_mailbox_send_ack() argument
41 static void xgpu_nv_mailbox_set_valid(struct amdgpu_device *adev, bool val) in xgpu_nv_mailbox_set_valid() argument
55 static enum idh_event xgpu_nv_mailbox_peek_msg(struct amdgpu_device *adev) in xgpu_nv_mailbox_peek_msg() argument
61 static int xgpu_nv_mailbox_rcv_msg(struct amdgpu_device *adev, in xgpu_nv_mailbox_rcv_msg() argument
70 xgpu_nv_mailbox_send_ack(adev); in xgpu_nv_mailbox_rcv_msg()
75 static uint8_t xgpu_nv_peek_ack(struct amdgpu_device *adev) in xgpu_nv_peek_ack() argument
80 static int xgpu_nv_poll_ack(struct amdgpu_device *adev) in xgpu_nv_poll_ack() argument
99 static int xgpu_nv_poll_msg(struct amdgpu_device *adev, enum idh_event event) in xgpu_nv_poll_msg() argument
108 r = xgpu_nv_mailbox_rcv_msg(adev, event); in xgpu_nv_poll_msg()
120 static void xgpu_nv_mailbox_trans_msg (struct amdgpu_device *adev, in xgpu_nv_mailbox_trans_msg() argument
[all …]
Dgmc_v6_0.c44 static void gmc_v6_0_set_gmc_funcs(struct amdgpu_device *adev);
45 static void gmc_v6_0_set_irq_funcs(struct amdgpu_device *adev);
64 static void gmc_v6_0_mc_stop(struct amdgpu_device *adev) in gmc_v6_0_mc_stop() argument
68 gmc_v6_0_wait_for_idle((void *)adev); in gmc_v6_0_mc_stop()
84 static void gmc_v6_0_mc_resume(struct amdgpu_device *adev) in gmc_v6_0_mc_resume() argument
98 static int gmc_v6_0_init_microcode(struct amdgpu_device *adev) in gmc_v6_0_init_microcode() argument
107 switch (adev->asic_type) { in gmc_v6_0_init_microcode()
135 err = amdgpu_ucode_request(adev, &adev->gmc.fw, fw_name); in gmc_v6_0_init_microcode()
137 dev_err(adev->dev, in gmc_v6_0_init_microcode()
140 amdgpu_ucode_release(&adev->gmc.fw); in gmc_v6_0_init_microcode()
[all …]
Damdgpu_umc.c27 static int amdgpu_umc_convert_error_address(struct amdgpu_device *adev, in amdgpu_umc_convert_error_address() argument
31 switch (adev->ip_versions[UMC_HWIP][0]) { in amdgpu_umc_convert_error_address()
33 umc_v6_7_convert_error_address(adev, in amdgpu_umc_convert_error_address()
37 dev_warn(adev->dev, in amdgpu_umc_convert_error_address()
45 int amdgpu_umc_page_retirement_mca(struct amdgpu_device *adev, in amdgpu_umc_page_retirement_mca() argument
52 kcalloc(adev->umc.max_ras_err_cnt_per_query, in amdgpu_umc_page_retirement_mca()
55 dev_warn(adev->dev, in amdgpu_umc_page_retirement_mca()
63 ret = amdgpu_umc_convert_error_address(adev, &err_data, err_addr, in amdgpu_umc_page_retirement_mca()
69 amdgpu_ras_add_bad_pages(adev, err_data.err_addr, in amdgpu_umc_page_retirement_mca()
71 amdgpu_ras_save_bad_pages(adev, NULL); in amdgpu_umc_page_retirement_mca()
[all …]
Dgfx_v11_0.c108 static void gfx_v11_0_disable_gpa_mode(struct amdgpu_device *adev);
109 static void gfx_v11_0_set_ring_funcs(struct amdgpu_device *adev);
110 static void gfx_v11_0_set_irq_funcs(struct amdgpu_device *adev);
111 static void gfx_v11_0_set_gds_init(struct amdgpu_device *adev);
112 static void gfx_v11_0_set_rlc_funcs(struct amdgpu_device *adev);
113 static void gfx_v11_0_set_mqd_funcs(struct amdgpu_device *adev);
114 static void gfx_v11_0_set_imu_funcs(struct amdgpu_device *adev);
115 static int gfx_v11_0_get_cu_info(struct amdgpu_device *adev,
117 static uint64_t gfx_v11_0_get_gpu_clock_counter(struct amdgpu_device *adev);
118 static void gfx_v11_0_select_se_sh(struct amdgpu_device *adev, u32 se_num,
[all …]
Damdgpu_kms.c48 void amdgpu_unregister_gpu_instance(struct amdgpu_device *adev) in amdgpu_unregister_gpu_instance() argument
57 if (gpu_instance->adev == adev) { in amdgpu_unregister_gpu_instance()
61 if (adev->flags & AMD_IS_APU) in amdgpu_unregister_gpu_instance()
82 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_driver_unload_kms() local
84 if (adev == NULL) in amdgpu_driver_unload_kms()
87 amdgpu_unregister_gpu_instance(adev); in amdgpu_driver_unload_kms()
89 if (adev->rmmio == NULL) in amdgpu_driver_unload_kms()
95 amdgpu_acpi_fini(adev); in amdgpu_driver_unload_kms()
96 amdgpu_device_fini_hw(adev); in amdgpu_driver_unload_kms()
99 void amdgpu_register_gpu_instance(struct amdgpu_device *adev) in amdgpu_register_gpu_instance() argument
[all …]
/drivers/gpu/drm/amd/pm/
Damdgpu_dpm.c36 #define amdgpu_dpm_enable_bapm(adev, e) \ argument
37 ((adev)->powerplay.pp_funcs->enable_bapm((adev)->powerplay.pp_handle, (e)))
39 #define amdgpu_dpm_is_legacy_dpm(adev) ((adev)->powerplay.pp_handle == (adev)) argument
41 int amdgpu_dpm_get_sclk(struct amdgpu_device *adev, bool low) in amdgpu_dpm_get_sclk() argument
43 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_sclk()
49 mutex_lock(&adev->pm.mutex); in amdgpu_dpm_get_sclk()
50 ret = pp_funcs->get_sclk((adev)->powerplay.pp_handle, in amdgpu_dpm_get_sclk()
52 mutex_unlock(&adev->pm.mutex); in amdgpu_dpm_get_sclk()
57 int amdgpu_dpm_get_mclk(struct amdgpu_device *adev, bool low) in amdgpu_dpm_get_mclk() argument
59 const struct amd_pm_funcs *pp_funcs = adev->powerplay.pp_funcs; in amdgpu_dpm_get_mclk()
[all …]
/drivers/gpu/drm/amd/pm/legacy-dpm/
Dkv_dpm.c47 static void kv_dpm_set_irq_funcs(struct amdgpu_device *adev);
48 static int kv_enable_nb_dpm(struct amdgpu_device *adev,
50 static void kv_init_graphics_levels(struct amdgpu_device *adev);
51 static int kv_calculate_ds_divider(struct amdgpu_device *adev);
52 static int kv_calculate_nbps_level_settings(struct amdgpu_device *adev);
53 static int kv_calculate_dpm_settings(struct amdgpu_device *adev);
54 static void kv_enable_new_levels(struct amdgpu_device *adev);
55 static void kv_program_nbps_index_settings(struct amdgpu_device *adev,
57 static int kv_set_enabled_level(struct amdgpu_device *adev, u32 level);
58 static int kv_set_enabled_levels(struct amdgpu_device *adev);
[all …]

12345678910>>...31