Lines Matching full:gpu
15 static inline bool _a6xx_check_idle(struct msm_gpu *gpu) in _a6xx_check_idle() argument
17 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in _a6xx_check_idle()
25 if (gpu_read(gpu, REG_A6XX_RBBM_STATUS) & in _a6xx_check_idle()
29 return !(gpu_read(gpu, REG_A6XX_RBBM_INT_0_STATUS) & in _a6xx_check_idle()
33 bool a6xx_idle(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in a6xx_idle() argument
36 if (!adreno_idle(gpu, ring)) in a6xx_idle()
39 if (spin_until(_a6xx_check_idle(gpu))) { in a6xx_idle()
40 DRM_ERROR("%s: %ps: timeout waiting for GPU to idle: status %8.8X irq %8.8X rptr/wptr %d/%d\n", in a6xx_idle()
41 gpu->name, __builtin_return_address(0), in a6xx_idle()
42 gpu_read(gpu, REG_A6XX_RBBM_STATUS), in a6xx_idle()
43 gpu_read(gpu, REG_A6XX_RBBM_INT_0_STATUS), in a6xx_idle()
44 gpu_read(gpu, REG_A6XX_CP_RB_RPTR), in a6xx_idle()
45 gpu_read(gpu, REG_A6XX_CP_RB_WPTR)); in a6xx_idle()
52 static void a6xx_flush(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in a6xx_flush() argument
54 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_flush()
81 gpu_write(gpu, REG_A6XX_CP_RB_WPTR, wptr); in a6xx_flush()
129 * lingering in that part of the GPU in a6xx_set_pagetable()
138 static void a6xx_submit(struct msm_gpu *gpu, struct msm_gem_submit *submit) in a6xx_submit() argument
141 struct msm_drm_private *priv = gpu->dev->dev_private; in a6xx_submit()
142 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_submit()
154 * GPU registers so we need to add 0x1a800 to the register value on A630 in a6xx_submit()
209 a6xx_flush(gpu, ring); in a6xx_submit()
427 static void a6xx_set_hwcg(struct msm_gpu *gpu, bool state) in a6xx_set_hwcg() argument
429 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_set_hwcg()
444 val = gpu_read(gpu, REG_A6XX_RBBM_CLOCK_CNTL); in a6xx_set_hwcg()
454 gpu_write(gpu, reg->offset, state ? reg->value : 0); in a6xx_set_hwcg()
459 gpu_write(gpu, REG_A6XX_RBBM_CLOCK_CNTL, state ? clock_cntl_on : 0); in a6xx_set_hwcg()
462 static void a6xx_set_ubwc_config(struct msm_gpu *gpu) in a6xx_set_ubwc_config() argument
464 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_set_ubwc_config()
485 gpu_write(gpu, REG_A6XX_RB_NC_MODE_CNTL, in a6xx_set_ubwc_config()
487 gpu_write(gpu, REG_A6XX_TPL1_NC_MODE_CNTL, lower_bit << 1); in a6xx_set_ubwc_config()
488 gpu_write(gpu, REG_A6XX_SP_NC_MODE_CNTL, in a6xx_set_ubwc_config()
490 gpu_write(gpu, REG_A6XX_UCHE_MODE_CNTL, lower_bit << 21); in a6xx_set_ubwc_config()
493 static int a6xx_cp_init(struct msm_gpu *gpu) in a6xx_cp_init() argument
495 struct msm_ringbuffer *ring = gpu->rb[0]; in a6xx_cp_init()
518 a6xx_flush(gpu, ring); in a6xx_cp_init()
519 return a6xx_idle(gpu, ring) ? 0 : -EINVAL; in a6xx_cp_init()
546 static int a6xx_ucode_init(struct msm_gpu *gpu) in a6xx_ucode_init() argument
548 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_ucode_init()
552 a6xx_gpu->sqe_bo = adreno_fw_create_bo(gpu, in a6xx_ucode_init()
559 DRM_DEV_ERROR(&gpu->pdev->dev, in a6xx_ucode_init()
569 gpu_write64(gpu, REG_A6XX_CP_SQE_INSTR_BASE_LO, in a6xx_ucode_init()
575 static int a6xx_zap_shader_init(struct msm_gpu *gpu) in a6xx_zap_shader_init() argument
583 ret = adreno_zap_shader_load(gpu, GPU_PAS_ID); in a6xx_zap_shader_init()
601 static int a6xx_hw_init(struct msm_gpu *gpu) in a6xx_hw_init() argument
603 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_hw_init()
607 /* Make sure the GMU keeps the GPU on while we set it up */ in a6xx_hw_init()
610 gpu_write(gpu, REG_A6XX_RBBM_SECVID_TSB_CNTL, 0); in a6xx_hw_init()
617 gpu_write64(gpu, REG_A6XX_RBBM_SECVID_TSB_TRUSTED_BASE_LO, in a6xx_hw_init()
619 gpu_write(gpu, REG_A6XX_RBBM_SECVID_TSB_TRUSTED_SIZE, 0x00000000); in a6xx_hw_init()
622 gpu_write(gpu, REG_A6XX_CP_ADDR_MODE_CNTL, 0x1); in a6xx_hw_init()
623 gpu_write(gpu, REG_A6XX_VSC_ADDR_MODE_CNTL, 0x1); in a6xx_hw_init()
624 gpu_write(gpu, REG_A6XX_GRAS_ADDR_MODE_CNTL, 0x1); in a6xx_hw_init()
625 gpu_write(gpu, REG_A6XX_RB_ADDR_MODE_CNTL, 0x1); in a6xx_hw_init()
626 gpu_write(gpu, REG_A6XX_PC_ADDR_MODE_CNTL, 0x1); in a6xx_hw_init()
627 gpu_write(gpu, REG_A6XX_HLSQ_ADDR_MODE_CNTL, 0x1); in a6xx_hw_init()
628 gpu_write(gpu, REG_A6XX_VFD_ADDR_MODE_CNTL, 0x1); in a6xx_hw_init()
629 gpu_write(gpu, REG_A6XX_VPC_ADDR_MODE_CNTL, 0x1); in a6xx_hw_init()
630 gpu_write(gpu, REG_A6XX_UCHE_ADDR_MODE_CNTL, 0x1); in a6xx_hw_init()
631 gpu_write(gpu, REG_A6XX_SP_ADDR_MODE_CNTL, 0x1); in a6xx_hw_init()
632 gpu_write(gpu, REG_A6XX_TPL1_ADDR_MODE_CNTL, 0x1); in a6xx_hw_init()
633 gpu_write(gpu, REG_A6XX_RBBM_SECVID_TSB_ADDR_MODE_CNTL, 0x1); in a6xx_hw_init()
636 a6xx_set_hwcg(gpu, true); in a6xx_hw_init()
640 gpu_write(gpu, REG_A6XX_GBIF_QSB_SIDE0, 0x00071620); in a6xx_hw_init()
641 gpu_write(gpu, REG_A6XX_GBIF_QSB_SIDE1, 0x00071620); in a6xx_hw_init()
642 gpu_write(gpu, REG_A6XX_GBIF_QSB_SIDE2, 0x00071620); in a6xx_hw_init()
643 gpu_write(gpu, REG_A6XX_GBIF_QSB_SIDE3, 0x00071620); in a6xx_hw_init()
644 gpu_write(gpu, REG_A6XX_GBIF_QSB_SIDE3, 0x00071620); in a6xx_hw_init()
645 gpu_write(gpu, REG_A6XX_RBBM_GBIF_CLIENT_QOS_CNTL, 0x3); in a6xx_hw_init()
647 gpu_write(gpu, REG_A6XX_RBBM_VBIF_CLIENT_QOS_CNTL, 0x3); in a6xx_hw_init()
651 gpu_write(gpu, REG_A6XX_VBIF_GATE_OFF_WRREQ_EN, 0x00000009); in a6xx_hw_init()
653 /* Make all blocks contribute to the GPU BUSY perf counter */ in a6xx_hw_init()
654 gpu_write(gpu, REG_A6XX_RBBM_PERFCTR_GPU_BUSY_MASKED, 0xffffffff); in a6xx_hw_init()
657 gpu_write(gpu, REG_A6XX_UCHE_WRITE_RANGE_MAX_LO, 0xffffffc0); in a6xx_hw_init()
658 gpu_write(gpu, REG_A6XX_UCHE_WRITE_RANGE_MAX_HI, 0x0001ffff); in a6xx_hw_init()
659 gpu_write(gpu, REG_A6XX_UCHE_TRAP_BASE_LO, 0xfffff000); in a6xx_hw_init()
660 gpu_write(gpu, REG_A6XX_UCHE_TRAP_BASE_HI, 0x0001ffff); in a6xx_hw_init()
661 gpu_write(gpu, REG_A6XX_UCHE_WRITE_THRU_BASE_LO, 0xfffff000); in a6xx_hw_init()
662 gpu_write(gpu, REG_A6XX_UCHE_WRITE_THRU_BASE_HI, 0x0001ffff); in a6xx_hw_init()
665 /* Set the GMEM VA range [0x100000:0x100000 + gpu->gmem - 1] */ in a6xx_hw_init()
666 gpu_write64(gpu, REG_A6XX_UCHE_GMEM_RANGE_MIN_LO, in a6xx_hw_init()
669 gpu_write64(gpu, REG_A6XX_UCHE_GMEM_RANGE_MAX_LO, in a6xx_hw_init()
674 gpu_write(gpu, REG_A6XX_UCHE_FILTER_CNTL, 0x804); in a6xx_hw_init()
675 gpu_write(gpu, REG_A6XX_UCHE_CACHE_WAYS, 0x4); in a6xx_hw_init()
678 gpu_write(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_2, 0x02000140); in a6xx_hw_init()
680 gpu_write(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_2, 0x010000c0); in a6xx_hw_init()
681 gpu_write(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_1, 0x8040362c); in a6xx_hw_init()
684 gpu_write(gpu, REG_A6XX_CP_MEM_POOL_SIZE, 128); in a6xx_hw_init()
688 gpu_write(gpu, REG_A6XX_PC_DBG_ECO_CNTL, 0x00300000); in a6xx_hw_init()
690 gpu_write(gpu, REG_A6XX_PC_DBG_ECO_CNTL, 0x00200000); in a6xx_hw_init()
692 gpu_write(gpu, REG_A6XX_PC_DBG_ECO_CNTL, (0x300 << 11)); in a6xx_hw_init()
695 gpu_write(gpu, REG_A6XX_CP_AHB_CNTL, 0x1); in a6xx_hw_init()
698 gpu_write(gpu, REG_A6XX_RBBM_PERFCTR_CNTL, 0x1); in a6xx_hw_init()
701 gpu_write(gpu, REG_A6XX_CP_PERFCTR_CP_SEL_0, PERF_CP_ALWAYS_COUNT); in a6xx_hw_init()
703 a6xx_set_ubwc_config(gpu); in a6xx_hw_init()
706 gpu_write(gpu, REG_A6XX_RBBM_INTERFACE_HANG_INT_CNTL, in a6xx_hw_init()
709 gpu_write(gpu, REG_A6XX_UCHE_CLIENT_PF, 1); in a6xx_hw_init()
713 gpu_write(gpu, REG_A6XX_TPL1_BICUBIC_WEIGHTS_TABLE_0, 0); in a6xx_hw_init()
714 gpu_write(gpu, REG_A6XX_TPL1_BICUBIC_WEIGHTS_TABLE_1, in a6xx_hw_init()
716 gpu_write(gpu, REG_A6XX_TPL1_BICUBIC_WEIGHTS_TABLE_2, in a6xx_hw_init()
718 gpu_write(gpu, REG_A6XX_TPL1_BICUBIC_WEIGHTS_TABLE_3, in a6xx_hw_init()
720 gpu_write(gpu, REG_A6XX_TPL1_BICUBIC_WEIGHTS_TABLE_4, in a6xx_hw_init()
725 gpu_write(gpu, REG_A6XX_CP_PROTECT_CNTL, 0x00000003); in a6xx_hw_init()
727 gpu_write(gpu, REG_A6XX_CP_PROTECT(0), in a6xx_hw_init()
729 gpu_write(gpu, REG_A6XX_CP_PROTECT(1), A6XX_PROTECT_RW(0xae50, 0x2)); in a6xx_hw_init()
730 gpu_write(gpu, REG_A6XX_CP_PROTECT(2), A6XX_PROTECT_RW(0x9624, 0x13)); in a6xx_hw_init()
731 gpu_write(gpu, REG_A6XX_CP_PROTECT(3), A6XX_PROTECT_RW(0x8630, 0x8)); in a6xx_hw_init()
732 gpu_write(gpu, REG_A6XX_CP_PROTECT(4), A6XX_PROTECT_RW(0x9e70, 0x1)); in a6xx_hw_init()
733 gpu_write(gpu, REG_A6XX_CP_PROTECT(5), A6XX_PROTECT_RW(0x9e78, 0x187)); in a6xx_hw_init()
734 gpu_write(gpu, REG_A6XX_CP_PROTECT(6), A6XX_PROTECT_RW(0xf000, 0x810)); in a6xx_hw_init()
735 gpu_write(gpu, REG_A6XX_CP_PROTECT(7), in a6xx_hw_init()
737 gpu_write(gpu, REG_A6XX_CP_PROTECT(8), A6XX_PROTECT_RW(0x50e, 0x0)); in a6xx_hw_init()
738 gpu_write(gpu, REG_A6XX_CP_PROTECT(9), A6XX_PROTECT_RDONLY(0x50f, 0x0)); in a6xx_hw_init()
739 gpu_write(gpu, REG_A6XX_CP_PROTECT(10), A6XX_PROTECT_RW(0x510, 0x0)); in a6xx_hw_init()
740 gpu_write(gpu, REG_A6XX_CP_PROTECT(11), in a6xx_hw_init()
742 gpu_write(gpu, REG_A6XX_CP_PROTECT(12), in a6xx_hw_init()
744 gpu_write(gpu, REG_A6XX_CP_PROTECT(13), in a6xx_hw_init()
746 gpu_write(gpu, REG_A6XX_CP_PROTECT(14), A6XX_PROTECT_RW(0xe00, 0xe)); in a6xx_hw_init()
747 gpu_write(gpu, REG_A6XX_CP_PROTECT(15), A6XX_PROTECT_RW(0x8e00, 0x0)); in a6xx_hw_init()
748 gpu_write(gpu, REG_A6XX_CP_PROTECT(16), A6XX_PROTECT_RW(0x8e50, 0xf)); in a6xx_hw_init()
749 gpu_write(gpu, REG_A6XX_CP_PROTECT(17), A6XX_PROTECT_RW(0xbe02, 0x0)); in a6xx_hw_init()
750 gpu_write(gpu, REG_A6XX_CP_PROTECT(18), in a6xx_hw_init()
752 gpu_write(gpu, REG_A6XX_CP_PROTECT(19), A6XX_PROTECT_RW(0x800, 0x82)); in a6xx_hw_init()
753 gpu_write(gpu, REG_A6XX_CP_PROTECT(20), A6XX_PROTECT_RW(0x8a0, 0x8)); in a6xx_hw_init()
754 gpu_write(gpu, REG_A6XX_CP_PROTECT(21), A6XX_PROTECT_RW(0x8ab, 0x19)); in a6xx_hw_init()
755 gpu_write(gpu, REG_A6XX_CP_PROTECT(22), A6XX_PROTECT_RW(0x900, 0x4d)); in a6xx_hw_init()
756 gpu_write(gpu, REG_A6XX_CP_PROTECT(23), A6XX_PROTECT_RW(0x98d, 0x76)); in a6xx_hw_init()
757 gpu_write(gpu, REG_A6XX_CP_PROTECT(24), in a6xx_hw_init()
759 gpu_write(gpu, REG_A6XX_CP_PROTECT(25), A6XX_PROTECT_RW(0xa630, 0x0)); in a6xx_hw_init()
762 if (gpu->hw_apriv) { in a6xx_hw_init()
763 gpu_write(gpu, REG_A6XX_CP_APRIV_CNTL, in a6xx_hw_init()
768 gpu_write(gpu, REG_A6XX_RBBM_INT_0_MASK, A6XX_INT_MASK); in a6xx_hw_init()
770 ret = adreno_hw_init(gpu); in a6xx_hw_init()
774 ret = a6xx_ucode_init(gpu); in a6xx_hw_init()
779 gpu_write64(gpu, REG_A6XX_CP_RB_BASE, REG_A6XX_CP_RB_BASE_HI, in a6xx_hw_init()
780 gpu->rb[0]->iova); in a6xx_hw_init()
787 gpu_write(gpu, REG_A6XX_CP_RB_CNTL, MSM_GPU_RB_CNTL_DEFAULT); in a6xx_hw_init()
789 gpu_write(gpu, REG_A6XX_CP_RB_CNTL, in a6xx_hw_init()
799 a6xx_gpu->shadow = msm_gem_kernel_new_locked(gpu->dev, in a6xx_hw_init()
800 sizeof(u32) * gpu->nr_rings, in a6xx_hw_init()
802 gpu->aspace, &a6xx_gpu->shadow_bo, in a6xx_hw_init()
809 gpu_write64(gpu, REG_A6XX_CP_RB_RPTR_ADDR_LO, in a6xx_hw_init()
811 shadowptr(a6xx_gpu, gpu->rb[0])); in a6xx_hw_init()
815 a6xx_gpu->cur_ring = gpu->rb[0]; in a6xx_hw_init()
820 gpu_write(gpu, REG_A6XX_CP_SQE_CNTL, 1); in a6xx_hw_init()
822 ret = a6xx_cp_init(gpu); in a6xx_hw_init()
833 ret = a6xx_zap_shader_init(gpu); in a6xx_hw_init()
835 OUT_PKT7(gpu->rb[0], CP_SET_SECURE_MODE, 1); in a6xx_hw_init()
836 OUT_RING(gpu->rb[0], 0x00000000); in a6xx_hw_init()
838 a6xx_flush(gpu, gpu->rb[0]); in a6xx_hw_init()
839 if (!a6xx_idle(gpu, gpu->rb[0])) in a6xx_hw_init()
848 dev_warn_once(gpu->dev->dev, in a6xx_hw_init()
850 gpu_write(gpu, REG_A6XX_RBBM_SECVID_TRUST_CNTL, 0x0); in a6xx_hw_init()
858 * Tell the GMU that we are done touching the GPU and it can start power in a6xx_hw_init()
871 static void a6xx_dump(struct msm_gpu *gpu) in a6xx_dump() argument
873 DRM_DEV_INFO(&gpu->pdev->dev, "status: %08x\n", in a6xx_dump()
874 gpu_read(gpu, REG_A6XX_RBBM_STATUS)); in a6xx_dump()
875 adreno_dump(gpu); in a6xx_dump()
881 static void a6xx_recover(struct msm_gpu *gpu) in a6xx_recover() argument
883 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_recover()
887 adreno_dump_info(gpu); in a6xx_recover()
890 DRM_DEV_INFO(&gpu->pdev->dev, "CP_SCRATCH_REG%d: %u\n", i, in a6xx_recover()
891 gpu_read(gpu, REG_A6XX_CP_SCRATCH_REG(i))); in a6xx_recover()
894 a6xx_dump(gpu); in a6xx_recover()
902 gpu->funcs->pm_suspend(gpu); in a6xx_recover()
903 gpu->funcs->pm_resume(gpu); in a6xx_recover()
905 msm_gpu_hw_init(gpu); in a6xx_recover()
910 struct msm_gpu *gpu = arg; in a6xx_fault_handler() local
912 pr_warn_ratelimited("*** gpu fault: iova=%08lx, flags=%d (%u,%u,%u,%u)\n", in a6xx_fault_handler()
914 gpu_read(gpu, REG_A6XX_CP_SCRATCH_REG(4)), in a6xx_fault_handler()
915 gpu_read(gpu, REG_A6XX_CP_SCRATCH_REG(5)), in a6xx_fault_handler()
916 gpu_read(gpu, REG_A6XX_CP_SCRATCH_REG(6)), in a6xx_fault_handler()
917 gpu_read(gpu, REG_A6XX_CP_SCRATCH_REG(7))); in a6xx_fault_handler()
922 static void a6xx_cp_hw_err_irq(struct msm_gpu *gpu) in a6xx_cp_hw_err_irq() argument
924 u32 status = gpu_read(gpu, REG_A6XX_CP_INTERRUPT_STATUS); in a6xx_cp_hw_err_irq()
929 gpu_write(gpu, REG_A6XX_CP_SQE_STAT_ADDR, 1); in a6xx_cp_hw_err_irq()
930 val = gpu_read(gpu, REG_A6XX_CP_SQE_STAT_DATA); in a6xx_cp_hw_err_irq()
931 dev_err_ratelimited(&gpu->pdev->dev, in a6xx_cp_hw_err_irq()
937 dev_err_ratelimited(&gpu->pdev->dev, in a6xx_cp_hw_err_irq()
941 dev_err_ratelimited(&gpu->pdev->dev, "CP | HW fault | status=0x%8.8X\n", in a6xx_cp_hw_err_irq()
942 gpu_read(gpu, REG_A6XX_CP_HW_FAULT)); in a6xx_cp_hw_err_irq()
945 u32 val = gpu_read(gpu, REG_A6XX_CP_PROTECT_STATUS); in a6xx_cp_hw_err_irq()
947 dev_err_ratelimited(&gpu->pdev->dev, in a6xx_cp_hw_err_irq()
954 dev_err_ratelimited(&gpu->pdev->dev, "CP AHB error interrupt\n"); in a6xx_cp_hw_err_irq()
957 dev_err_ratelimited(&gpu->pdev->dev, "CP VSD decoder parity error\n"); in a6xx_cp_hw_err_irq()
960 dev_err_ratelimited(&gpu->pdev->dev, "CP illegal instruction error\n"); in a6xx_cp_hw_err_irq()
964 static void a6xx_fault_detect_irq(struct msm_gpu *gpu) in a6xx_fault_detect_irq() argument
966 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_fault_detect_irq()
968 struct drm_device *dev = gpu->dev; in a6xx_fault_detect_irq()
970 struct msm_ringbuffer *ring = gpu->funcs->active_ring(gpu); in a6xx_fault_detect_irq()
973 * Force the GPU to stay on until after we finish in a6xx_fault_detect_irq()
978 DRM_DEV_ERROR(&gpu->pdev->dev, in a6xx_fault_detect_irq()
979 …"gpu fault ring %d fence %x status %8.8X rb %4.4x/%4.4x ib1 %16.16llX/%4.4x ib2 %16.16llX/%4.4x\n", in a6xx_fault_detect_irq()
981 gpu_read(gpu, REG_A6XX_RBBM_STATUS), in a6xx_fault_detect_irq()
982 gpu_read(gpu, REG_A6XX_CP_RB_RPTR), in a6xx_fault_detect_irq()
983 gpu_read(gpu, REG_A6XX_CP_RB_WPTR), in a6xx_fault_detect_irq()
984 gpu_read64(gpu, REG_A6XX_CP_IB1_BASE, REG_A6XX_CP_IB1_BASE_HI), in a6xx_fault_detect_irq()
985 gpu_read(gpu, REG_A6XX_CP_IB1_REM_SIZE), in a6xx_fault_detect_irq()
986 gpu_read64(gpu, REG_A6XX_CP_IB2_BASE, REG_A6XX_CP_IB2_BASE_HI), in a6xx_fault_detect_irq()
987 gpu_read(gpu, REG_A6XX_CP_IB2_REM_SIZE)); in a6xx_fault_detect_irq()
990 del_timer(&gpu->hangcheck_timer); in a6xx_fault_detect_irq()
992 queue_work(priv->wq, &gpu->recover_work); in a6xx_fault_detect_irq()
995 static irqreturn_t a6xx_irq(struct msm_gpu *gpu) in a6xx_irq() argument
997 u32 status = gpu_read(gpu, REG_A6XX_RBBM_INT_0_STATUS); in a6xx_irq()
999 gpu_write(gpu, REG_A6XX_RBBM_INT_CLEAR_CMD, status); in a6xx_irq()
1002 a6xx_fault_detect_irq(gpu); in a6xx_irq()
1005 dev_err_ratelimited(&gpu->pdev->dev, "CP | AHB bus error\n"); in a6xx_irq()
1008 a6xx_cp_hw_err_irq(gpu); in a6xx_irq()
1011 dev_err_ratelimited(&gpu->pdev->dev, "RBBM | ATB ASYNC overflow\n"); in a6xx_irq()
1014 dev_err_ratelimited(&gpu->pdev->dev, "RBBM | ATB bus overflow\n"); in a6xx_irq()
1017 dev_err_ratelimited(&gpu->pdev->dev, "UCHE | Out of bounds access\n"); in a6xx_irq()
1020 msm_gpu_retire(gpu); in a6xx_irq()
1025 static int a6xx_pm_resume(struct msm_gpu *gpu) in a6xx_pm_resume() argument
1027 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_pm_resume()
1031 gpu->needs_hw_init = true; in a6xx_pm_resume()
1039 msm_gpu_resume_devfreq(gpu); in a6xx_pm_resume()
1044 static int a6xx_pm_suspend(struct msm_gpu *gpu) in a6xx_pm_suspend() argument
1046 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_pm_suspend()
1051 devfreq_suspend_device(gpu->devfreq.devfreq); in a6xx_pm_suspend()
1056 static int a6xx_get_timestamp(struct msm_gpu *gpu, uint64_t *value) in a6xx_get_timestamp() argument
1058 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_get_timestamp()
1061 /* Force the GPU power on so we can read this register */ in a6xx_get_timestamp()
1064 *value = gpu_read64(gpu, REG_A6XX_RBBM_PERFCTR_CP_0_LO, in a6xx_get_timestamp()
1071 static struct msm_ringbuffer *a6xx_active_ring(struct msm_gpu *gpu) in a6xx_active_ring() argument
1073 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_active_ring()
1079 static void a6xx_destroy(struct msm_gpu *gpu) in a6xx_destroy() argument
1081 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_destroy()
1085 msm_gem_unpin_iova(a6xx_gpu->sqe_bo, gpu->aspace); in a6xx_destroy()
1090 msm_gem_unpin_iova(a6xx_gpu->shadow_bo, gpu->aspace); in a6xx_destroy()
1100 static unsigned long a6xx_gpu_busy(struct msm_gpu *gpu) in a6xx_gpu_busy() argument
1102 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_gpu_busy()
1107 /* Only read the gpu busy if the hardware is already active */ in a6xx_gpu_busy()
1115 busy_time = (busy_cycles - gpu->devfreq.busy_cycles) * 10; in a6xx_gpu_busy()
1118 gpu->devfreq.busy_cycles = busy_cycles; in a6xx_gpu_busy()
1129 a6xx_create_private_address_space(struct msm_gpu *gpu) in a6xx_create_private_address_space() argument
1133 mmu = msm_iommu_pagetable_create(gpu->aspace->mmu); in a6xx_create_private_address_space()
1139 "gpu", 0x100000000ULL, 0x1ffffffffULL); in a6xx_create_private_address_space()
1142 static uint32_t a6xx_get_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in a6xx_get_rptr() argument
1144 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_get_rptr()
1150 return ring->memptrs->rptr = gpu_read(gpu, REG_A6XX_CP_RB_RPTR); in a6xx_get_rptr()
1190 struct msm_gpu *gpu; in a6xx_gpu_init() local
1198 gpu = &adreno_gpu->base; in a6xx_gpu_init()
1230 if (gpu->aspace) in a6xx_gpu_init()
1231 msm_mmu_set_fault_handler(gpu->aspace->mmu, gpu, in a6xx_gpu_init()
1234 return gpu; in a6xx_gpu_init()