Lines Matching refs:a6xx_gpu
21 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in _a6xx_check_idle() local
24 if (!a6xx_gmu_isidle(&a6xx_gpu->gmu)) in _a6xx_check_idle()
58 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in update_shadow_rptr() local
61 if (a6xx_gpu->has_whereami && !adreno_gpu->base.hw_apriv) { in update_shadow_rptr()
63 OUT_RING(ring, lower_32_bits(shadowptr(a6xx_gpu, ring))); in update_shadow_rptr()
64 OUT_RING(ring, upper_32_bits(shadowptr(a6xx_gpu, ring))); in update_shadow_rptr()
102 static void a6xx_set_pagetable(struct a6xx_gpu *a6xx_gpu, in a6xx_set_pagetable() argument
109 if (ctx->seqno == a6xx_gpu->cur_ctx_seqno) in a6xx_set_pagetable()
142 a6xx_gpu->cur_ctx_seqno = ctx->seqno; in a6xx_set_pagetable()
150 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_submit() local
154 a6xx_set_pagetable(a6xx_gpu, ring, submit->queue->ctx); in a6xx_submit()
501 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_set_hwcg() local
502 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_set_hwcg()
764 static bool a6xx_ucode_check_version(struct a6xx_gpu *a6xx_gpu, in a6xx_ucode_check_version() argument
767 struct adreno_gpu *adreno_gpu = &a6xx_gpu->base; in a6xx_ucode_check_version()
799 a6xx_gpu->has_whereami = true; in a6xx_ucode_check_version()
830 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_ucode_init() local
832 if (!a6xx_gpu->sqe_bo) { in a6xx_ucode_init()
833 a6xx_gpu->sqe_bo = adreno_fw_create_bo(gpu, in a6xx_ucode_init()
834 adreno_gpu->fw[ADRENO_FW_SQE], &a6xx_gpu->sqe_iova); in a6xx_ucode_init()
836 if (IS_ERR(a6xx_gpu->sqe_bo)) { in a6xx_ucode_init()
837 int ret = PTR_ERR(a6xx_gpu->sqe_bo); in a6xx_ucode_init()
839 a6xx_gpu->sqe_bo = NULL; in a6xx_ucode_init()
846 msm_gem_object_set_name(a6xx_gpu->sqe_bo, "sqefw"); in a6xx_ucode_init()
847 if (!a6xx_ucode_check_version(a6xx_gpu, a6xx_gpu->sqe_bo)) { in a6xx_ucode_init()
848 msm_gem_unpin_iova(a6xx_gpu->sqe_bo, gpu->aspace); in a6xx_ucode_init()
849 drm_gem_object_put(a6xx_gpu->sqe_bo); in a6xx_ucode_init()
851 a6xx_gpu->sqe_bo = NULL; in a6xx_ucode_init()
857 REG_A6XX_CP_SQE_INSTR_BASE+1, a6xx_gpu->sqe_iova); in a6xx_ucode_init()
891 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in hw_init() local
895 a6xx_gmu_set_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET); in hw_init()
1068 if (adreno_gpu->base.hw_apriv || a6xx_gpu->has_whereami) { in hw_init()
1069 if (!a6xx_gpu->shadow_bo) { in hw_init()
1070 a6xx_gpu->shadow = msm_gem_kernel_new(gpu->dev, in hw_init()
1073 gpu->aspace, &a6xx_gpu->shadow_bo, in hw_init()
1074 &a6xx_gpu->shadow_iova); in hw_init()
1076 if (IS_ERR(a6xx_gpu->shadow)) in hw_init()
1077 return PTR_ERR(a6xx_gpu->shadow); in hw_init()
1082 shadowptr(a6xx_gpu, gpu->rb[0])); in hw_init()
1086 a6xx_gpu->cur_ring = gpu->rb[0]; in hw_init()
1088 a6xx_gpu->cur_ctx_seqno = 0; in hw_init()
1132 a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET); in hw_init()
1134 if (a6xx_gpu->gmu.legacy) { in hw_init()
1136 a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_BOOT_SLUMBER); in hw_init()
1145 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_hw_init() local
1148 mutex_lock(&a6xx_gpu->gmu.lock); in a6xx_hw_init()
1150 mutex_unlock(&a6xx_gpu->gmu.lock); in a6xx_hw_init()
1168 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_recover() local
1184 gmu_write(&a6xx_gpu->gmu, REG_A6XX_GMU_GMU_PWR_COL_KEEPALIVE, 0); in a6xx_recover()
1346 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_fault_detect_irq() local
1362 gmu_write(&a6xx_gpu->gmu, REG_A6XX_GMU_GMU_PWR_COL_KEEPALIVE, 1); in a6xx_fault_detect_irq()
1411 static void a6xx_llc_rmw(struct a6xx_gpu *a6xx_gpu, u32 reg, u32 mask, u32 or) in a6xx_llc_rmw() argument
1413 return msm_rmw(a6xx_gpu->llc_mmio + (reg << 2), mask, or); in a6xx_llc_rmw()
1416 static void a6xx_llc_write(struct a6xx_gpu *a6xx_gpu, u32 reg, u32 value) in a6xx_llc_write() argument
1418 return msm_writel(value, a6xx_gpu->llc_mmio + (reg << 2)); in a6xx_llc_write()
1421 static void a6xx_llc_deactivate(struct a6xx_gpu *a6xx_gpu) in a6xx_llc_deactivate() argument
1423 llcc_slice_deactivate(a6xx_gpu->llc_slice); in a6xx_llc_deactivate()
1424 llcc_slice_deactivate(a6xx_gpu->htw_llc_slice); in a6xx_llc_deactivate()
1427 static void a6xx_llc_activate(struct a6xx_gpu *a6xx_gpu) in a6xx_llc_activate() argument
1429 struct adreno_gpu *adreno_gpu = &a6xx_gpu->base; in a6xx_llc_activate()
1433 if (IS_ERR(a6xx_gpu->llc_mmio)) in a6xx_llc_activate()
1436 if (!llcc_slice_activate(a6xx_gpu->llc_slice)) { in a6xx_llc_activate()
1437 u32 gpu_scid = llcc_get_slice_id(a6xx_gpu->llc_slice); in a6xx_llc_activate()
1455 if (!llcc_slice_activate(a6xx_gpu->htw_llc_slice)) { in a6xx_llc_activate()
1456 if (!a6xx_gpu->have_mmu500) { in a6xx_llc_activate()
1457 u32 gpuhtw_scid = llcc_get_slice_id(a6xx_gpu->htw_llc_slice); in a6xx_llc_activate()
1471 if (!a6xx_gpu->have_mmu500) { in a6xx_llc_activate()
1472 a6xx_llc_write(a6xx_gpu, in a6xx_llc_activate()
1479 a6xx_llc_rmw(a6xx_gpu, in a6xx_llc_activate()
1487 static void a6xx_llc_slices_destroy(struct a6xx_gpu *a6xx_gpu) in a6xx_llc_slices_destroy() argument
1489 llcc_slice_putd(a6xx_gpu->llc_slice); in a6xx_llc_slices_destroy()
1490 llcc_slice_putd(a6xx_gpu->htw_llc_slice); in a6xx_llc_slices_destroy()
1494 struct a6xx_gpu *a6xx_gpu) in a6xx_llc_slices_init() argument
1503 a6xx_gpu->have_mmu500 = (phandle && in a6xx_llc_slices_init()
1507 if (a6xx_gpu->have_mmu500) in a6xx_llc_slices_init()
1508 a6xx_gpu->llc_mmio = NULL; in a6xx_llc_slices_init()
1510 a6xx_gpu->llc_mmio = msm_ioremap(pdev, "cx_mem", "gpu_cx"); in a6xx_llc_slices_init()
1512 a6xx_gpu->llc_slice = llcc_slice_getd(LLCC_GPU); in a6xx_llc_slices_init()
1513 a6xx_gpu->htw_llc_slice = llcc_slice_getd(LLCC_GPUHTW); in a6xx_llc_slices_init()
1515 if (IS_ERR_OR_NULL(a6xx_gpu->llc_slice) && IS_ERR_OR_NULL(a6xx_gpu->htw_llc_slice)) in a6xx_llc_slices_init()
1516 a6xx_gpu->llc_mmio = ERR_PTR(-EINVAL); in a6xx_llc_slices_init()
1522 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_pm_resume() local
1529 mutex_lock(&a6xx_gpu->gmu.lock); in a6xx_pm_resume()
1530 ret = a6xx_gmu_resume(a6xx_gpu); in a6xx_pm_resume()
1531 mutex_unlock(&a6xx_gpu->gmu.lock); in a6xx_pm_resume()
1537 a6xx_llc_activate(a6xx_gpu); in a6xx_pm_resume()
1545 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_pm_suspend() local
1550 a6xx_llc_deactivate(a6xx_gpu); in a6xx_pm_suspend()
1554 mutex_lock(&a6xx_gpu->gmu.lock); in a6xx_pm_suspend()
1555 ret = a6xx_gmu_stop(a6xx_gpu); in a6xx_pm_suspend()
1556 mutex_unlock(&a6xx_gpu->gmu.lock); in a6xx_pm_suspend()
1560 if (a6xx_gpu->shadow_bo) in a6xx_pm_suspend()
1562 a6xx_gpu->shadow[i] = 0; in a6xx_pm_suspend()
1572 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_get_timestamp() local
1574 mutex_lock(&a6xx_gpu->gmu.lock); in a6xx_get_timestamp()
1577 a6xx_gmu_set_oob(&a6xx_gpu->gmu, GMU_OOB_PERFCOUNTER_SET); in a6xx_get_timestamp()
1582 a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_PERFCOUNTER_SET); in a6xx_get_timestamp()
1584 mutex_unlock(&a6xx_gpu->gmu.lock); in a6xx_get_timestamp()
1592 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_active_ring() local
1594 return a6xx_gpu->cur_ring; in a6xx_active_ring()
1600 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_destroy() local
1602 if (a6xx_gpu->sqe_bo) { in a6xx_destroy()
1603 msm_gem_unpin_iova(a6xx_gpu->sqe_bo, gpu->aspace); in a6xx_destroy()
1604 drm_gem_object_put(a6xx_gpu->sqe_bo); in a6xx_destroy()
1607 if (a6xx_gpu->shadow_bo) { in a6xx_destroy()
1608 msm_gem_unpin_iova(a6xx_gpu->shadow_bo, gpu->aspace); in a6xx_destroy()
1609 drm_gem_object_put(a6xx_gpu->shadow_bo); in a6xx_destroy()
1612 a6xx_llc_slices_destroy(a6xx_gpu); in a6xx_destroy()
1614 a6xx_gmu_remove(a6xx_gpu); in a6xx_destroy()
1618 kfree(a6xx_gpu); in a6xx_destroy()
1624 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_gpu_busy() local
1629 if (pm_runtime_get_if_in_use(a6xx_gpu->gmu.dev) == 0) in a6xx_gpu_busy()
1632 busy_cycles = gmu_read64(&a6xx_gpu->gmu, in a6xx_gpu_busy()
1641 pm_runtime_put(a6xx_gpu->gmu.dev); in a6xx_gpu_busy()
1652 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_gpu_set_freq() local
1654 mutex_lock(&a6xx_gpu->gmu.lock); in a6xx_gpu_set_freq()
1656 mutex_unlock(&a6xx_gpu->gmu.lock); in a6xx_gpu_set_freq()
1663 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_create_address_space() local
1677 if (!IS_ERR_OR_NULL(a6xx_gpu->htw_llc_slice)) in a6xx_create_address_space()
1720 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_get_rptr() local
1722 if (adreno_gpu->base.hw_apriv || a6xx_gpu->has_whereami) in a6xx_get_rptr()
1723 return a6xx_gpu->shadow[ring->id]; in a6xx_get_rptr()
1820 struct a6xx_gpu *a6xx_gpu; in a6xx_gpu_init() local
1825 a6xx_gpu = kzalloc(sizeof(*a6xx_gpu), GFP_KERNEL); in a6xx_gpu_init()
1826 if (!a6xx_gpu) in a6xx_gpu_init()
1829 adreno_gpu = &a6xx_gpu->base; in a6xx_gpu_init()
1852 a6xx_llc_slices_init(pdev, a6xx_gpu); in a6xx_gpu_init()
1856 a6xx_destroy(&(a6xx_gpu->base.base)); in a6xx_gpu_init()
1862 a6xx_destroy(&(a6xx_gpu->base.base)); in a6xx_gpu_init()
1872 ret = a6xx_gmu_init(a6xx_gpu, node); in a6xx_gpu_init()
1875 a6xx_destroy(&(a6xx_gpu->base.base)); in a6xx_gpu_init()