• Home
  • Raw
  • Download

Lines Matching refs:a5xx_gpu

24 	struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu);  in update_shadow_rptr()  local
26 if (a5xx_gpu->has_whereami) { in update_shadow_rptr()
28 OUT_RING(ring, lower_32_bits(shadowptr(a5xx_gpu, ring))); in update_shadow_rptr()
29 OUT_RING(ring, upper_32_bits(shadowptr(a5xx_gpu, ring))); in update_shadow_rptr()
37 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_flush() local
62 if (a5xx_gpu->cur_ring == ring && !a5xx_in_preempt(a5xx_gpu)) in a5xx_flush()
128 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_submit() local
148 OUT_RING(ring, lower_32_bits(a5xx_gpu->preempt_iova[submit->ring->id])); in a5xx_submit()
149 OUT_RING(ring, upper_32_bits(a5xx_gpu->preempt_iova[submit->ring->id])); in a5xx_submit()
512 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_preempt_start() local
524 OUT_RING(ring, lower_32_bits(a5xx_gpu->preempt_iova[ring->id])); in a5xx_preempt_start()
525 OUT_RING(ring, upper_32_bits(a5xx_gpu->preempt_iova[ring->id])); in a5xx_preempt_start()
553 static void a5xx_ucode_check_version(struct a5xx_gpu *a5xx_gpu, in a5xx_ucode_check_version() argument
567 a5xx_gpu->has_whereami = true; in a5xx_ucode_check_version()
575 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_ucode_init() local
578 if (!a5xx_gpu->pm4_bo) { in a5xx_ucode_init()
579 a5xx_gpu->pm4_bo = adreno_fw_create_bo(gpu, in a5xx_ucode_init()
580 adreno_gpu->fw[ADRENO_FW_PM4], &a5xx_gpu->pm4_iova); in a5xx_ucode_init()
583 if (IS_ERR(a5xx_gpu->pm4_bo)) { in a5xx_ucode_init()
584 ret = PTR_ERR(a5xx_gpu->pm4_bo); in a5xx_ucode_init()
585 a5xx_gpu->pm4_bo = NULL; in a5xx_ucode_init()
591 msm_gem_object_set_name(a5xx_gpu->pm4_bo, "pm4fw"); in a5xx_ucode_init()
594 if (!a5xx_gpu->pfp_bo) { in a5xx_ucode_init()
595 a5xx_gpu->pfp_bo = adreno_fw_create_bo(gpu, in a5xx_ucode_init()
596 adreno_gpu->fw[ADRENO_FW_PFP], &a5xx_gpu->pfp_iova); in a5xx_ucode_init()
598 if (IS_ERR(a5xx_gpu->pfp_bo)) { in a5xx_ucode_init()
599 ret = PTR_ERR(a5xx_gpu->pfp_bo); in a5xx_ucode_init()
600 a5xx_gpu->pfp_bo = NULL; in a5xx_ucode_init()
606 msm_gem_object_set_name(a5xx_gpu->pfp_bo, "pfpfw"); in a5xx_ucode_init()
607 a5xx_ucode_check_version(a5xx_gpu, a5xx_gpu->pfp_bo); in a5xx_ucode_init()
611 REG_A5XX_CP_ME_INSTR_BASE_HI, a5xx_gpu->pm4_iova); in a5xx_ucode_init()
614 REG_A5XX_CP_PFP_INSTR_BASE_HI, a5xx_gpu->pfp_iova); in a5xx_ucode_init()
667 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_hw_init() local
920 if (a5xx_gpu->has_whereami) { in a5xx_hw_init()
921 if (!a5xx_gpu->shadow_bo) { in a5xx_hw_init()
922 a5xx_gpu->shadow = msm_gem_kernel_new(gpu->dev, in a5xx_hw_init()
925 gpu->aspace, &a5xx_gpu->shadow_bo, in a5xx_hw_init()
926 &a5xx_gpu->shadow_iova); in a5xx_hw_init()
928 if (IS_ERR(a5xx_gpu->shadow)) in a5xx_hw_init()
929 return PTR_ERR(a5xx_gpu->shadow); in a5xx_hw_init()
933 REG_A5XX_CP_RB_RPTR_ADDR_HI, shadowptr(a5xx_gpu, gpu->rb[0])); in a5xx_hw_init()
1027 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_destroy() local
1033 if (a5xx_gpu->pm4_bo) { in a5xx_destroy()
1034 msm_gem_unpin_iova(a5xx_gpu->pm4_bo, gpu->aspace); in a5xx_destroy()
1035 drm_gem_object_put(a5xx_gpu->pm4_bo); in a5xx_destroy()
1038 if (a5xx_gpu->pfp_bo) { in a5xx_destroy()
1039 msm_gem_unpin_iova(a5xx_gpu->pfp_bo, gpu->aspace); in a5xx_destroy()
1040 drm_gem_object_put(a5xx_gpu->pfp_bo); in a5xx_destroy()
1043 if (a5xx_gpu->gpmu_bo) { in a5xx_destroy()
1044 msm_gem_unpin_iova(a5xx_gpu->gpmu_bo, gpu->aspace); in a5xx_destroy()
1045 drm_gem_object_put(a5xx_gpu->gpmu_bo); in a5xx_destroy()
1048 if (a5xx_gpu->shadow_bo) { in a5xx_destroy()
1049 msm_gem_unpin_iova(a5xx_gpu->shadow_bo, gpu->aspace); in a5xx_destroy()
1050 drm_gem_object_put(a5xx_gpu->shadow_bo); in a5xx_destroy()
1054 kfree(a5xx_gpu); in a5xx_destroy()
1073 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_idle() local
1075 if (ring != a5xx_gpu->cur_ring) { in a5xx_idle()
1380 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_pm_suspend() local
1408 if (a5xx_gpu->has_whereami) in a5xx_pm_suspend()
1410 a5xx_gpu->shadow[i] = 0; in a5xx_pm_suspend()
1647 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_active_ring() local
1649 return a5xx_gpu->cur_ring; in a5xx_active_ring()
1679 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_get_rptr() local
1681 if (a5xx_gpu->has_whereami) in a5xx_get_rptr()
1682 return a5xx_gpu->shadow[ring->id]; in a5xx_get_rptr()
1750 struct a5xx_gpu *a5xx_gpu = NULL; in a5xx_gpu_init() local
1761 a5xx_gpu = kzalloc(sizeof(*a5xx_gpu), GFP_KERNEL); in a5xx_gpu_init()
1762 if (!a5xx_gpu) in a5xx_gpu_init()
1765 adreno_gpu = &a5xx_gpu->base; in a5xx_gpu_init()
1770 a5xx_gpu->lm_leakage = 0x4E001A; in a5xx_gpu_init()
1781 a5xx_destroy(&(a5xx_gpu->base.base)); in a5xx_gpu_init()