Lines Matching refs:gpu
18 static inline bool _a6xx_check_idle(struct msm_gpu *gpu) in _a6xx_check_idle() argument
20 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in _a6xx_check_idle()
28 if (gpu_read(gpu, REG_A6XX_RBBM_STATUS) & in _a6xx_check_idle()
32 return !(gpu_read(gpu, REG_A6XX_RBBM_INT_0_STATUS) & in _a6xx_check_idle()
36 static bool a6xx_idle(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in a6xx_idle() argument
39 if (!adreno_idle(gpu, ring)) in a6xx_idle()
42 if (spin_until(_a6xx_check_idle(gpu))) { in a6xx_idle()
44 gpu->name, __builtin_return_address(0), in a6xx_idle()
45 gpu_read(gpu, REG_A6XX_RBBM_STATUS), in a6xx_idle()
46 gpu_read(gpu, REG_A6XX_RBBM_INT_0_STATUS), in a6xx_idle()
47 gpu_read(gpu, REG_A6XX_CP_RB_RPTR), in a6xx_idle()
48 gpu_read(gpu, REG_A6XX_CP_RB_WPTR)); in a6xx_idle()
55 static void update_shadow_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in update_shadow_rptr() argument
57 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in update_shadow_rptr()
68 static void a6xx_flush(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in a6xx_flush() argument
73 update_shadow_rptr(gpu, ring); in a6xx_flush()
88 gpu_write(gpu, REG_A6XX_CP_RB_WPTR, wptr); in a6xx_flush()
145 static void a6xx_submit(struct msm_gpu *gpu, struct msm_gem_submit *submit) in a6xx_submit() argument
148 struct msm_drm_private *priv = gpu->dev->dev_private; in a6xx_submit()
149 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_submit()
200 update_shadow_rptr(gpu, ring); in a6xx_submit()
224 gpu_read64(gpu, REG_A6XX_CP_ALWAYS_ON_COUNTER_LO, in a6xx_submit()
227 a6xx_flush(gpu, ring); in a6xx_submit()
498 static void a6xx_set_hwcg(struct msm_gpu *gpu, bool state) in a6xx_set_hwcg() argument
500 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_set_hwcg()
515 val = gpu_read(gpu, REG_A6XX_RBBM_CLOCK_CNTL); in a6xx_set_hwcg()
525 gpu_write(gpu, reg->offset, state ? reg->value : 0); in a6xx_set_hwcg()
530 gpu_write(gpu, REG_A6XX_RBBM_CLOCK_CNTL, state ? clock_cntl_on : 0); in a6xx_set_hwcg()
657 static void a6xx_set_cp_protect(struct msm_gpu *gpu) in a6xx_set_cp_protect() argument
659 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_set_cp_protect()
681 gpu_write(gpu, REG_A6XX_CP_PROTECT_CNTL, BIT(0) | BIT(1) | BIT(3)); in a6xx_set_cp_protect()
684 gpu_write(gpu, REG_A6XX_CP_PROTECT(i), regs[i]); in a6xx_set_cp_protect()
686 gpu_write(gpu, REG_A6XX_CP_PROTECT(count_max - 1), regs[i]); in a6xx_set_cp_protect()
689 static void a6xx_set_ubwc_config(struct msm_gpu *gpu) in a6xx_set_ubwc_config() argument
691 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_set_ubwc_config()
719 gpu_write(gpu, REG_A6XX_RB_NC_MODE_CNTL, in a6xx_set_ubwc_config()
721 gpu_write(gpu, REG_A6XX_TPL1_NC_MODE_CNTL, lower_bit << 1); in a6xx_set_ubwc_config()
722 gpu_write(gpu, REG_A6XX_SP_NC_MODE_CNTL, in a6xx_set_ubwc_config()
724 gpu_write(gpu, REG_A6XX_UCHE_MODE_CNTL, lower_bit << 21); in a6xx_set_ubwc_config()
727 static int a6xx_cp_init(struct msm_gpu *gpu) in a6xx_cp_init() argument
729 struct msm_ringbuffer *ring = gpu->rb[0]; in a6xx_cp_init()
752 a6xx_flush(gpu, ring); in a6xx_cp_init()
753 return a6xx_idle(gpu, ring) ? 0 : -EINVAL; in a6xx_cp_init()
764 struct msm_gpu *gpu = &adreno_gpu->base; in a6xx_ucode_check_version() local
800 DRM_DEV_ERROR(&gpu->pdev->dev, in a6xx_ucode_check_version()
809 DRM_DEV_ERROR(&gpu->pdev->dev, in a6xx_ucode_check_version()
815 DRM_DEV_ERROR(&gpu->pdev->dev, in a6xx_ucode_check_version()
823 static int a6xx_ucode_init(struct msm_gpu *gpu) in a6xx_ucode_init() argument
825 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_ucode_init()
829 a6xx_gpu->sqe_bo = adreno_fw_create_bo(gpu, in a6xx_ucode_init()
836 DRM_DEV_ERROR(&gpu->pdev->dev, in a6xx_ucode_init()
844 msm_gem_unpin_iova(a6xx_gpu->sqe_bo, gpu->aspace); in a6xx_ucode_init()
852 gpu_write64(gpu, REG_A6XX_CP_SQE_INSTR_BASE, in a6xx_ucode_init()
858 static int a6xx_zap_shader_init(struct msm_gpu *gpu) in a6xx_zap_shader_init() argument
866 ret = adreno_zap_shader_load(gpu, GPU_PAS_ID); in a6xx_zap_shader_init()
884 static int hw_init(struct msm_gpu *gpu) in hw_init() argument
886 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in hw_init()
893 gpu_write(gpu, REG_A6XX_RBBM_SECVID_TSB_CNTL, 0); in hw_init()
900 gpu_write64(gpu, REG_A6XX_RBBM_SECVID_TSB_TRUSTED_BASE_LO, in hw_init()
902 gpu_write(gpu, REG_A6XX_RBBM_SECVID_TSB_TRUSTED_SIZE, 0x00000000); in hw_init()
905 gpu_write(gpu, REG_A6XX_CP_ADDR_MODE_CNTL, 0x1); in hw_init()
906 gpu_write(gpu, REG_A6XX_VSC_ADDR_MODE_CNTL, 0x1); in hw_init()
907 gpu_write(gpu, REG_A6XX_GRAS_ADDR_MODE_CNTL, 0x1); in hw_init()
908 gpu_write(gpu, REG_A6XX_RB_ADDR_MODE_CNTL, 0x1); in hw_init()
909 gpu_write(gpu, REG_A6XX_PC_ADDR_MODE_CNTL, 0x1); in hw_init()
910 gpu_write(gpu, REG_A6XX_HLSQ_ADDR_MODE_CNTL, 0x1); in hw_init()
911 gpu_write(gpu, REG_A6XX_VFD_ADDR_MODE_CNTL, 0x1); in hw_init()
912 gpu_write(gpu, REG_A6XX_VPC_ADDR_MODE_CNTL, 0x1); in hw_init()
913 gpu_write(gpu, REG_A6XX_UCHE_ADDR_MODE_CNTL, 0x1); in hw_init()
914 gpu_write(gpu, REG_A6XX_SP_ADDR_MODE_CNTL, 0x1); in hw_init()
915 gpu_write(gpu, REG_A6XX_TPL1_ADDR_MODE_CNTL, 0x1); in hw_init()
916 gpu_write(gpu, REG_A6XX_RBBM_SECVID_TSB_ADDR_MODE_CNTL, 0x1); in hw_init()
919 a6xx_set_hwcg(gpu, true); in hw_init()
924 gpu_write(gpu, REG_A6XX_GBIF_QSB_SIDE0, 0x00071620); in hw_init()
925 gpu_write(gpu, REG_A6XX_GBIF_QSB_SIDE1, 0x00071620); in hw_init()
926 gpu_write(gpu, REG_A6XX_GBIF_QSB_SIDE2, 0x00071620); in hw_init()
927 gpu_write(gpu, REG_A6XX_GBIF_QSB_SIDE3, 0x00071620); in hw_init()
928 gpu_write(gpu, REG_A6XX_GBIF_QSB_SIDE3, 0x00071620); in hw_init()
929 gpu_write(gpu, REG_A6XX_RBBM_GBIF_CLIENT_QOS_CNTL, 0x3); in hw_init()
931 gpu_write(gpu, REG_A6XX_RBBM_VBIF_CLIENT_QOS_CNTL, 0x3); in hw_init()
935 gpu_write(gpu, REG_A6XX_VBIF_GATE_OFF_WRREQ_EN, 0x00000009); in hw_init()
938 gpu_write(gpu, REG_A6XX_RBBM_PERFCTR_GPU_BUSY_MASKED, 0xffffffff); in hw_init()
941 gpu_write(gpu, REG_A6XX_UCHE_WRITE_RANGE_MAX_LO, 0xffffffc0); in hw_init()
942 gpu_write(gpu, REG_A6XX_UCHE_WRITE_RANGE_MAX_HI, 0x0001ffff); in hw_init()
943 gpu_write(gpu, REG_A6XX_UCHE_TRAP_BASE_LO, 0xfffff000); in hw_init()
944 gpu_write(gpu, REG_A6XX_UCHE_TRAP_BASE_HI, 0x0001ffff); in hw_init()
945 gpu_write(gpu, REG_A6XX_UCHE_WRITE_THRU_BASE_LO, 0xfffff000); in hw_init()
946 gpu_write(gpu, REG_A6XX_UCHE_WRITE_THRU_BASE_HI, 0x0001ffff); in hw_init()
950 gpu_write64(gpu, REG_A6XX_UCHE_GMEM_RANGE_MIN_LO, in hw_init()
953 gpu_write64(gpu, REG_A6XX_UCHE_GMEM_RANGE_MAX_LO, in hw_init()
958 gpu_write(gpu, REG_A6XX_UCHE_FILTER_CNTL, 0x804); in hw_init()
959 gpu_write(gpu, REG_A6XX_UCHE_CACHE_WAYS, 0x4); in hw_init()
963 gpu_write(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_2, 0x02000140); in hw_init()
965 gpu_write(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_2, 0x010000c0); in hw_init()
966 gpu_write(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_1, 0x8040362c); in hw_init()
969 gpu_write(gpu, REG_A6XX_CP_LPAC_PROG_FIFO_SIZE, 0x00000020); in hw_init()
972 gpu_write(gpu, REG_A6XX_CP_MEM_POOL_SIZE, 128); in hw_init()
978 gpu_write(gpu, REG_A6XX_PC_DBG_ECO_CNTL, 0x00300200); in hw_init()
980 gpu_write(gpu, REG_A6XX_PC_DBG_ECO_CNTL, 0x00200200); in hw_init()
982 gpu_write(gpu, REG_A6XX_PC_DBG_ECO_CNTL, 0x00300200); in hw_init()
984 gpu_write(gpu, REG_A6XX_PC_DBG_ECO_CNTL, 0x00180000); in hw_init()
987 gpu_write(gpu, REG_A6XX_CP_AHB_CNTL, 0x1); in hw_init()
990 gpu_write(gpu, REG_A6XX_RBBM_PERFCTR_CNTL, 0x1); in hw_init()
993 gpu_write(gpu, REG_A6XX_CP_PERFCTR_CP_SEL(0), PERF_CP_ALWAYS_COUNT); in hw_init()
995 a6xx_set_ubwc_config(gpu); in hw_init()
998 gpu_write(gpu, REG_A6XX_RBBM_INTERFACE_HANG_INT_CNTL, in hw_init()
1001 gpu_write(gpu, REG_A6XX_UCHE_CLIENT_PF, 1); in hw_init()
1005 gpu_write(gpu, REG_A6XX_TPL1_BICUBIC_WEIGHTS_TABLE_0, 0); in hw_init()
1006 gpu_write(gpu, REG_A6XX_TPL1_BICUBIC_WEIGHTS_TABLE_1, in hw_init()
1008 gpu_write(gpu, REG_A6XX_TPL1_BICUBIC_WEIGHTS_TABLE_2, in hw_init()
1010 gpu_write(gpu, REG_A6XX_TPL1_BICUBIC_WEIGHTS_TABLE_3, in hw_init()
1012 gpu_write(gpu, REG_A6XX_TPL1_BICUBIC_WEIGHTS_TABLE_4, in hw_init()
1017 a6xx_set_cp_protect(gpu); in hw_init()
1020 gpu_write(gpu, REG_A6XX_CP_CHICKEN_DBG, 0x1); in hw_init()
1021 gpu_write(gpu, REG_A6XX_RBBM_GBIF_CLIENT_QOS_CNTL, 0x0); in hw_init()
1026 gpu_write(gpu, REG_A6XX_UCHE_CMDQ_CONFIG, 0x66906); in hw_init()
1029 if (gpu->hw_apriv) { in hw_init()
1030 gpu_write(gpu, REG_A6XX_CP_APRIV_CNTL, in hw_init()
1035 gpu_write(gpu, REG_A6XX_RBBM_INT_0_MASK, A6XX_INT_MASK); in hw_init()
1037 ret = adreno_hw_init(gpu); in hw_init()
1041 ret = a6xx_ucode_init(gpu); in hw_init()
1046 gpu_write64(gpu, REG_A6XX_CP_RB_BASE, REG_A6XX_CP_RB_BASE_HI, in hw_init()
1047 gpu->rb[0]->iova); in hw_init()
1054 gpu_write(gpu, REG_A6XX_CP_RB_CNTL, MSM_GPU_RB_CNTL_DEFAULT); in hw_init()
1056 gpu_write(gpu, REG_A6XX_CP_RB_CNTL, in hw_init()
1066 a6xx_gpu->shadow = msm_gem_kernel_new(gpu->dev, in hw_init()
1067 sizeof(u32) * gpu->nr_rings, in hw_init()
1069 gpu->aspace, &a6xx_gpu->shadow_bo, in hw_init()
1076 gpu_write64(gpu, REG_A6XX_CP_RB_RPTR_ADDR_LO, in hw_init()
1078 shadowptr(a6xx_gpu, gpu->rb[0])); in hw_init()
1082 a6xx_gpu->cur_ring = gpu->rb[0]; in hw_init()
1087 gpu_write(gpu, REG_A6XX_CP_SQE_CNTL, 1); in hw_init()
1089 ret = a6xx_cp_init(gpu); in hw_init()
1100 ret = a6xx_zap_shader_init(gpu); in hw_init()
1102 OUT_PKT7(gpu->rb[0], CP_SET_SECURE_MODE, 1); in hw_init()
1103 OUT_RING(gpu->rb[0], 0x00000000); in hw_init()
1105 a6xx_flush(gpu, gpu->rb[0]); in hw_init()
1106 if (!a6xx_idle(gpu, gpu->rb[0])) in hw_init()
1115 dev_warn_once(gpu->dev->dev, in hw_init()
1117 gpu_write(gpu, REG_A6XX_RBBM_SECVID_TRUST_CNTL, 0x0); in hw_init()
1138 static int a6xx_hw_init(struct msm_gpu *gpu) in a6xx_hw_init() argument
1140 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_hw_init()
1145 ret = hw_init(gpu); in a6xx_hw_init()
1151 static void a6xx_dump(struct msm_gpu *gpu) in a6xx_dump() argument
1153 DRM_DEV_INFO(&gpu->pdev->dev, "status: %08x\n", in a6xx_dump()
1154 gpu_read(gpu, REG_A6XX_RBBM_STATUS)); in a6xx_dump()
1155 adreno_dump(gpu); in a6xx_dump()
1161 static void a6xx_recover(struct msm_gpu *gpu) in a6xx_recover() argument
1163 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_recover()
1167 adreno_dump_info(gpu); in a6xx_recover()
1170 DRM_DEV_INFO(&gpu->pdev->dev, "CP_SCRATCH_REG%d: %u\n", i, in a6xx_recover()
1171 gpu_read(gpu, REG_A6XX_CP_SCRATCH_REG(i))); in a6xx_recover()
1174 a6xx_dump(gpu); in a6xx_recover()
1182 gpu->funcs->pm_suspend(gpu); in a6xx_recover()
1183 gpu->funcs->pm_resume(gpu); in a6xx_recover()
1185 msm_gpu_hw_init(gpu); in a6xx_recover()
1188 static const char *a6xx_uche_fault_block(struct msm_gpu *gpu, u32 mid) in a6xx_uche_fault_block() argument
1202 val = gpu_read(gpu, REG_A6XX_UCHE_CLIENT_PF); in a6xx_uche_fault_block()
1216 static const char *a6xx_fault_block(struct msm_gpu *gpu, u32 id) in a6xx_fault_block() argument
1225 return a6xx_uche_fault_block(gpu, id); in a6xx_fault_block()
1234 struct msm_gpu *gpu = arg; in a6xx_fault_handler() local
1238 bool do_devcoredump = info && !READ_ONCE(gpu->crashstate); in a6xx_fault_handler()
1245 gpu->aspace->mmu->funcs->resume_translation(gpu->aspace->mmu); in a6xx_fault_handler()
1255 gpu_read(gpu, REG_A6XX_CP_SCRATCH_REG(4)), in a6xx_fault_handler()
1256 gpu_read(gpu, REG_A6XX_CP_SCRATCH_REG(5)), in a6xx_fault_handler()
1257 gpu_read(gpu, REG_A6XX_CP_SCRATCH_REG(6)), in a6xx_fault_handler()
1258 gpu_read(gpu, REG_A6XX_CP_SCRATCH_REG(7))); in a6xx_fault_handler()
1270 block = a6xx_fault_block(gpu, info->fsynr1 & 0xff); in a6xx_fault_handler()
1276 gpu_read(gpu, REG_A6XX_CP_SCRATCH_REG(4)), in a6xx_fault_handler()
1277 gpu_read(gpu, REG_A6XX_CP_SCRATCH_REG(5)), in a6xx_fault_handler()
1278 gpu_read(gpu, REG_A6XX_CP_SCRATCH_REG(6)), in a6xx_fault_handler()
1279 gpu_read(gpu, REG_A6XX_CP_SCRATCH_REG(7))); in a6xx_fault_handler()
1283 del_timer(&gpu->hangcheck_timer); in a6xx_fault_handler()
1285 gpu->fault_info.ttbr0 = info->ttbr0; in a6xx_fault_handler()
1286 gpu->fault_info.iova = iova; in a6xx_fault_handler()
1287 gpu->fault_info.flags = flags; in a6xx_fault_handler()
1288 gpu->fault_info.type = type; in a6xx_fault_handler()
1289 gpu->fault_info.block = block; in a6xx_fault_handler()
1291 kthread_queue_work(gpu->worker, &gpu->fault_work); in a6xx_fault_handler()
1297 static void a6xx_cp_hw_err_irq(struct msm_gpu *gpu) in a6xx_cp_hw_err_irq() argument
1299 u32 status = gpu_read(gpu, REG_A6XX_CP_INTERRUPT_STATUS); in a6xx_cp_hw_err_irq()
1304 gpu_write(gpu, REG_A6XX_CP_SQE_STAT_ADDR, 1); in a6xx_cp_hw_err_irq()
1305 val = gpu_read(gpu, REG_A6XX_CP_SQE_STAT_DATA); in a6xx_cp_hw_err_irq()
1306 dev_err_ratelimited(&gpu->pdev->dev, in a6xx_cp_hw_err_irq()
1312 dev_err_ratelimited(&gpu->pdev->dev, in a6xx_cp_hw_err_irq()
1316 dev_err_ratelimited(&gpu->pdev->dev, "CP | HW fault | status=0x%8.8X\n", in a6xx_cp_hw_err_irq()
1317 gpu_read(gpu, REG_A6XX_CP_HW_FAULT)); in a6xx_cp_hw_err_irq()
1320 u32 val = gpu_read(gpu, REG_A6XX_CP_PROTECT_STATUS); in a6xx_cp_hw_err_irq()
1322 dev_err_ratelimited(&gpu->pdev->dev, in a6xx_cp_hw_err_irq()
1329 dev_err_ratelimited(&gpu->pdev->dev, "CP AHB error interrupt\n"); in a6xx_cp_hw_err_irq()
1332 dev_err_ratelimited(&gpu->pdev->dev, "CP VSD decoder parity error\n"); in a6xx_cp_hw_err_irq()
1335 dev_err_ratelimited(&gpu->pdev->dev, "CP illegal instruction error\n"); in a6xx_cp_hw_err_irq()
1339 static void a6xx_fault_detect_irq(struct msm_gpu *gpu) in a6xx_fault_detect_irq() argument
1341 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_fault_detect_irq()
1343 struct msm_ringbuffer *ring = gpu->funcs->active_ring(gpu); in a6xx_fault_detect_irq()
1351 if (gpu_read(gpu, REG_A6XX_RBBM_STATUS3) & A6XX_RBBM_STATUS3_SMMU_STALLED_ON_FAULT) in a6xx_fault_detect_irq()
1360 DRM_DEV_ERROR(&gpu->pdev->dev, in a6xx_fault_detect_irq()
1363 gpu_read(gpu, REG_A6XX_RBBM_STATUS), in a6xx_fault_detect_irq()
1364 gpu_read(gpu, REG_A6XX_CP_RB_RPTR), in a6xx_fault_detect_irq()
1365 gpu_read(gpu, REG_A6XX_CP_RB_WPTR), in a6xx_fault_detect_irq()
1366 gpu_read64(gpu, REG_A6XX_CP_IB1_BASE, REG_A6XX_CP_IB1_BASE_HI), in a6xx_fault_detect_irq()
1367 gpu_read(gpu, REG_A6XX_CP_IB1_REM_SIZE), in a6xx_fault_detect_irq()
1368 gpu_read64(gpu, REG_A6XX_CP_IB2_BASE, REG_A6XX_CP_IB2_BASE_HI), in a6xx_fault_detect_irq()
1369 gpu_read(gpu, REG_A6XX_CP_IB2_REM_SIZE)); in a6xx_fault_detect_irq()
1372 del_timer(&gpu->hangcheck_timer); in a6xx_fault_detect_irq()
1374 kthread_queue_work(gpu->worker, &gpu->recover_work); in a6xx_fault_detect_irq()
1377 static irqreturn_t a6xx_irq(struct msm_gpu *gpu) in a6xx_irq() argument
1379 u32 status = gpu_read(gpu, REG_A6XX_RBBM_INT_0_STATUS); in a6xx_irq()
1381 gpu_write(gpu, REG_A6XX_RBBM_INT_CLEAR_CMD, status); in a6xx_irq()
1384 a6xx_fault_detect_irq(gpu); in a6xx_irq()
1387 dev_err_ratelimited(&gpu->pdev->dev, "CP | AHB bus error\n"); in a6xx_irq()
1390 a6xx_cp_hw_err_irq(gpu); in a6xx_irq()
1393 dev_err_ratelimited(&gpu->pdev->dev, "RBBM | ATB ASYNC overflow\n"); in a6xx_irq()
1396 dev_err_ratelimited(&gpu->pdev->dev, "RBBM | ATB bus overflow\n"); in a6xx_irq()
1399 dev_err_ratelimited(&gpu->pdev->dev, "UCHE | Out of bounds access\n"); in a6xx_irq()
1402 msm_gpu_retire(gpu); in a6xx_irq()
1426 struct msm_gpu *gpu = &adreno_gpu->base; in a6xx_llc_activate() local
1443 gpu_rmw(gpu, REG_A6XX_GBIF_SCACHE_CNTL0, (0x1f << 10) | in a6xx_llc_activate()
1480 gpu_rmw(gpu, REG_A6XX_GBIF_SCACHE_CNTL1, GENMASK(24, 0), cntl1_regval); in a6xx_llc_activate()
1515 static int a6xx_pm_resume(struct msm_gpu *gpu) in a6xx_pm_resume() argument
1517 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_pm_resume()
1521 gpu->needs_hw_init = true; in a6xx_pm_resume()
1531 msm_devfreq_resume(gpu); in a6xx_pm_resume()
1538 static int a6xx_pm_suspend(struct msm_gpu *gpu) in a6xx_pm_suspend() argument
1540 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_pm_suspend()
1548 msm_devfreq_suspend(gpu); in a6xx_pm_suspend()
1557 for (i = 0; i < gpu->nr_rings; i++) in a6xx_pm_suspend()
1563 static int a6xx_get_timestamp(struct msm_gpu *gpu, uint64_t *value) in a6xx_get_timestamp() argument
1565 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_get_timestamp()
1573 *value = gpu_read64(gpu, REG_A6XX_CP_ALWAYS_ON_COUNTER_LO, in a6xx_get_timestamp()
1583 static struct msm_ringbuffer *a6xx_active_ring(struct msm_gpu *gpu) in a6xx_active_ring() argument
1585 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_active_ring()
1591 static void a6xx_destroy(struct msm_gpu *gpu) in a6xx_destroy() argument
1593 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_destroy()
1597 msm_gem_unpin_iova(a6xx_gpu->sqe_bo, gpu->aspace); in a6xx_destroy()
1602 msm_gem_unpin_iova(a6xx_gpu->shadow_bo, gpu->aspace); in a6xx_destroy()
1615 static unsigned long a6xx_gpu_busy(struct msm_gpu *gpu) in a6xx_gpu_busy() argument
1617 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_gpu_busy()
1630 busy_time = (busy_cycles - gpu->devfreq.busy_cycles) * 10; in a6xx_gpu_busy()
1633 gpu->devfreq.busy_cycles = busy_cycles; in a6xx_gpu_busy()
1643 static void a6xx_gpu_set_freq(struct msm_gpu *gpu, struct dev_pm_opp *opp) in a6xx_gpu_set_freq() argument
1645 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_gpu_set_freq()
1649 a6xx_gmu_set_freq(gpu, opp); in a6xx_gpu_set_freq()
1654 a6xx_create_address_space(struct msm_gpu *gpu, struct platform_device *pdev) in a6xx_create_address_space() argument
1656 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_create_address_space()
1698 a6xx_create_private_address_space(struct msm_gpu *gpu) in a6xx_create_private_address_space() argument
1702 mmu = msm_iommu_pagetable_create(gpu->aspace->mmu); in a6xx_create_private_address_space()
1711 static uint32_t a6xx_get_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in a6xx_get_rptr() argument
1713 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_get_rptr()
1719 return ring->memptrs->rptr = gpu_read(gpu, REG_A6XX_CP_RB_RPTR); in a6xx_get_rptr()
1818 struct msm_gpu *gpu; in a6xx_gpu_init() local
1826 gpu = &adreno_gpu->base; in a6xx_gpu_init()
1846 gpu->clamp_to_idle = true; in a6xx_gpu_init()
1874 if (gpu->aspace) in a6xx_gpu_init()
1875 msm_mmu_set_fault_handler(gpu->aspace->mmu, gpu, in a6xx_gpu_init()
1878 return gpu; in a6xx_gpu_init()