Searched refs:num_compute_rings (Results 1 – 8 of 8) sorted by relevance
213 adev->gfx.num_compute_rings); in amdgpu_gfx_compute_queue_acquire()410 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in amdgpu_gfx_mqd_sw_init()446 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in amdgpu_gfx_mqd_sw_fini()471 adev->gfx.num_compute_rings)) in amdgpu_gfx_disable_kcq()474 for (i = 0; i < adev->gfx.num_compute_rings; i++) in amdgpu_gfx_disable_kcq()523 adev->gfx.num_compute_rings + in amdgpu_gfx_enable_kcq()531 for (i = 0; i < adev->gfx.num_compute_rings; i++) in amdgpu_gfx_enable_kcq()
293 unsigned num_compute_rings; member
1345 mec_hpd_size = adev->gfx.num_compute_rings * GFX8_MEC_HPD_SIZE; in gfx_v8_0_mec_init()2085 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v8_0_sw_fini()4371 r = amdgpu_ring_alloc(kiq_ring, (8 * adev->gfx.num_compute_rings) + 8); in gfx_v8_0_kiq_kcq_enable()4385 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kiq_kcq_enable()4740 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kcq_resume()4783 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_cp_test_all_rings()4847 r = amdgpu_ring_alloc(kiq_ring, 6 * adev->gfx.num_compute_rings); in gfx_v8_0_kcq_disable()4851 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kcq_disable()5050 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_pre_soft_reset()5145 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_post_soft_reset()[all …]
2780 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_fini()3121 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_resume()3131 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_resume()4241 adev->gfx.num_compute_rings = AMDGPU_MAX_COMPUTE_RINGS; in gfx_v7_0_early_init()4547 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v7_0_sw_fini()4898 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_eop_irq()4923 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_fault()5113 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v7_0_set_ring_funcs()
1966 mec_hpd_size = adev->gfx.num_compute_rings * GFX9_MEC_HPD_SIZE; in gfx_v9_0_mec_init()2401 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_sw_fini()3818 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_kcq_resume()3882 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_cp_resume()4682 adev->gfx.num_compute_rings = amdgpu_num_kcq; in gfx_v9_0_early_init()5840 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_eop_irq()5870 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_fault()6857 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_set_ring_funcs()
3067 adev->gfx.num_compute_rings = GFX6_NUM_COMPUTE_RINGS; in gfx_v6_0_early_init()3120 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v6_0_sw_init()3153 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v6_0_sw_fini()3556 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v6_0_set_ring_funcs()
4096 mec_hpd_size = adev->gfx.num_compute_rings * GFX10_MEC_HPD_SIZE; in gfx_v10_0_mec_init()4526 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v10_0_sw_fini()6723 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_kcq_resume()6789 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_cp_resume()7255 adev->gfx.num_compute_rings = amdgpu_num_kcq; in gfx_v10_0_early_init()8365 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_eop_irq()8437 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v10_0_handle_priv_fault()8701 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v10_0_set_ring_funcs()
358 for (i = 0; i < adev->gfx.num_compute_rings; i++) in amdgpu_hw_ip_info()