Searched refs:num_compute_rings (Results 1 – 8 of 8) sorted by relevance
141 adev->gfx.num_compute_rings = in amdgpu_gfx_compute_queue_acquire()146 if (WARN_ON(adev->gfx.num_compute_rings > AMDGPU_MAX_COMPUTE_RINGS)) in amdgpu_gfx_compute_queue_acquire()147 adev->gfx.num_compute_rings = AMDGPU_MAX_COMPUTE_RINGS; in amdgpu_gfx_compute_queue_acquire()278 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in amdgpu_gfx_compute_mqd_sw_init()304 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in amdgpu_gfx_compute_mqd_sw_fini()
237 ip_num_rings = adev->gfx.num_compute_rings; in amdgpu_queue_mgr_map()
841 mec_hpd_size = adev->gfx.num_compute_rings * GFX9_MEC_HPD_SIZE; in gfx_v9_0_mec_init()1395 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_sw_fini()2278 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_cp_compute_enable()2371 r = amdgpu_ring_alloc(kiq_ring, (7 * adev->gfx.num_compute_rings) + 11); in gfx_v9_0_kiq_kcq_enable()2388 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_kiq_kcq_enable()2761 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_kiq_resume()2823 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_cp_resume()3051 adev->gfx.num_compute_rings = AMDGPU_MAX_COMPUTE_RINGS; in gfx_v9_0_early_init()4000 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v9_0_eop_irq()4223 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v9_0_set_ring_funcs()
2700 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v7_0_cp_compute_enable()2779 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_fini()3120 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_resume()3130 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_cp_compute_resume()4319 adev->gfx.num_compute_rings = AMDGPU_MAX_COMPUTE_RINGS; in gfx_v7_0_early_init()4641 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v7_0_sw_fini()4982 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v7_0_eop_irq()5142 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v7_0_set_ring_funcs()
1316 mec_hpd_size = adev->gfx.num_compute_rings * GFX8_MEC_HPD_SIZE; in gfx_v8_0_mec_init()2053 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v8_0_sw_fini()4394 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v8_0_cp_compute_enable()4491 r = amdgpu_ring_alloc(kiq_ring, (8 * adev->gfx.num_compute_rings) + 11); in gfx_v8_0_kiq_kcq_enable()4506 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kiq_kcq_enable()4854 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kiq_resume()4887 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_kiq_resume()5129 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_pre_soft_reset()5229 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v8_0_post_soft_reset()5384 adev->gfx.num_compute_rings = AMDGPU_MAX_COMPUTE_RINGS; in gfx_v8_0_early_init()[all …]
1982 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v6_0_cp_gfx_enable()3128 adev->gfx.num_compute_rings = GFX6_NUM_COMPUTE_RINGS; in gfx_v6_0_early_init()3178 for (i = 0; i < adev->gfx.num_compute_rings; i++) { in gfx_v6_0_sw_init()3210 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v6_0_sw_fini()3582 for (i = 0; i < adev->gfx.num_compute_rings; i++) in gfx_v6_0_set_ring_funcs()
292 for (i = 0; i < adev->gfx.num_compute_rings; i++) in amdgpu_info_ioctl()
1003 unsigned num_compute_rings; member