Lines Matching full:gpu

120 static int a6xx_crashdumper_init(struct msm_gpu *gpu,  in a6xx_crashdumper_init()  argument
123 dumper->ptr = msm_gem_kernel_new(gpu->dev, in a6xx_crashdumper_init()
124 SZ_1M, MSM_BO_WC, gpu->aspace, in a6xx_crashdumper_init()
133 static int a6xx_crashdumper_run(struct msm_gpu *gpu, in a6xx_crashdumper_run() argument
136 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_crashdumper_run()
150 gpu_write64(gpu, REG_A6XX_CP_CRASH_SCRIPT_BASE, dumper->iova); in a6xx_crashdumper_run()
152 gpu_write(gpu, REG_A6XX_CP_CRASH_DUMP_CNTL, 1); in a6xx_crashdumper_run()
154 ret = gpu_poll_timeout(gpu, REG_A6XX_CP_CRASH_DUMP_STATUS, val, in a6xx_crashdumper_run()
157 gpu_write(gpu, REG_A6XX_CP_CRASH_DUMP_CNTL, 0); in a6xx_crashdumper_run()
163 static int debugbus_read(struct msm_gpu *gpu, u32 block, u32 offset, in debugbus_read() argument
169 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_SEL_A, reg); in debugbus_read()
170 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_SEL_B, reg); in debugbus_read()
171 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_SEL_C, reg); in debugbus_read()
172 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_SEL_D, reg); in debugbus_read()
177 data[0] = gpu_read(gpu, REG_A6XX_DBGC_CFG_DBGBUS_TRACE_BUF2); in debugbus_read()
178 data[1] = gpu_read(gpu, REG_A6XX_DBGC_CFG_DBGBUS_TRACE_BUF1); in debugbus_read()
211 static int vbif_debugbus_read(struct msm_gpu *gpu, u32 ctrl0, u32 ctrl1, in vbif_debugbus_read() argument
216 gpu_write(gpu, ctrl0, reg); in vbif_debugbus_read()
219 gpu_write(gpu, ctrl1, i); in vbif_debugbus_read()
220 data[i] = gpu_read(gpu, REG_A6XX_VBIF_TEST_BUS_OUT); in vbif_debugbus_read()
235 static void a6xx_get_vbif_debugbus_block(struct msm_gpu *gpu, in a6xx_get_vbif_debugbus_block() argument
250 clk = gpu_read(gpu, REG_A6XX_VBIF_CLKON); in a6xx_get_vbif_debugbus_block()
253 gpu_write(gpu, REG_A6XX_VBIF_CLKON, in a6xx_get_vbif_debugbus_block()
257 gpu_write(gpu, REG_A6XX_VBIF_TEST_BUS1_CTRL0, 0); in a6xx_get_vbif_debugbus_block()
260 gpu_write(gpu, REG_A6XX_VBIF_TEST_BUS_OUT_CTRL, 1); in a6xx_get_vbif_debugbus_block()
265 ptr += vbif_debugbus_read(gpu, in a6xx_get_vbif_debugbus_block()
271 ptr += vbif_debugbus_read(gpu, in a6xx_get_vbif_debugbus_block()
277 gpu_write(gpu, REG_A6XX_VBIF_TEST_BUS2_CTRL0, 0); in a6xx_get_vbif_debugbus_block()
280 ptr += vbif_debugbus_read(gpu, in a6xx_get_vbif_debugbus_block()
286 gpu_write(gpu, REG_A6XX_VBIF_CLKON, clk); in a6xx_get_vbif_debugbus_block()
289 static void a6xx_get_debugbus_block(struct msm_gpu *gpu, in a6xx_get_debugbus_block() argument
304 ptr += debugbus_read(gpu, block->id, i, ptr); in a6xx_get_debugbus_block()
325 static void a6xx_get_debugbus(struct msm_gpu *gpu, in a6xx_get_debugbus() argument
334 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_CNTLT, in a6xx_get_debugbus()
337 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_CNTLM, in a6xx_get_debugbus()
340 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_IVTL_0, 0); in a6xx_get_debugbus()
341 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_IVTL_1, 0); in a6xx_get_debugbus()
342 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_IVTL_2, 0); in a6xx_get_debugbus()
343 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_IVTL_3, 0); in a6xx_get_debugbus()
345 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_BYTEL_0, 0x76543210); in a6xx_get_debugbus()
346 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_BYTEL_1, 0xFEDCBA98); in a6xx_get_debugbus()
348 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_MASKL_0, 0); in a6xx_get_debugbus()
349 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_MASKL_1, 0); in a6xx_get_debugbus()
350 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_MASKL_2, 0); in a6xx_get_debugbus()
351 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_MASKL_3, 0); in a6xx_get_debugbus()
356 res = platform_get_resource_byname(gpu->pdev, IORESOURCE_MEM, in a6xx_get_debugbus()
386 (a6xx_has_gbif(to_adreno_gpu(gpu)) ? 1 : 0); in a6xx_get_debugbus()
388 if (adreno_is_a650_family(to_adreno_gpu(gpu))) in a6xx_get_debugbus()
398 a6xx_get_debugbus_block(gpu, in a6xx_get_debugbus()
406 * GBIF has same debugbus as of other GPU blocks, fall back to in a6xx_get_debugbus()
407 * default path if GPU uses GBIF, also GBIF uses exactly same in a6xx_get_debugbus()
410 if (a6xx_has_gbif(to_adreno_gpu(gpu))) { in a6xx_get_debugbus()
411 a6xx_get_debugbus_block(gpu, a6xx_state, in a6xx_get_debugbus()
419 if (adreno_is_a650_family(to_adreno_gpu(gpu))) { in a6xx_get_debugbus()
421 a6xx_get_debugbus_block(gpu, in a6xx_get_debugbus()
429 if (!a6xx_has_gbif(to_adreno_gpu(gpu))) { in a6xx_get_debugbus()
435 a6xx_get_vbif_debugbus_block(gpu, a6xx_state, in a6xx_get_debugbus()
465 static void a6xx_get_dbgahb_cluster(struct msm_gpu *gpu, in a6xx_get_dbgahb_cluster() argument
503 if (a6xx_crashdumper_run(gpu, dumper)) in a6xx_get_dbgahb_cluster()
511 static void a6xx_get_dbgahb_clusters(struct msm_gpu *gpu, in a6xx_get_dbgahb_clusters() argument
527 a6xx_get_dbgahb_cluster(gpu, a6xx_state, in a6xx_get_dbgahb_clusters()
533 static void a6xx_get_cluster(struct msm_gpu *gpu, in a6xx_get_cluster() argument
539 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_get_cluster()
585 if (a6xx_crashdumper_run(gpu, dumper)) in a6xx_get_cluster()
593 static void a6xx_get_clusters(struct msm_gpu *gpu, in a6xx_get_clusters() argument
608 a6xx_get_cluster(gpu, a6xx_state, &a6xx_clusters[i], in a6xx_get_clusters()
613 static void a6xx_get_shader_block(struct msm_gpu *gpu, in a6xx_get_shader_block() argument
636 if (a6xx_crashdumper_run(gpu, dumper)) in a6xx_get_shader_block()
644 static void a6xx_get_shaders(struct msm_gpu *gpu, in a6xx_get_shaders() argument
659 a6xx_get_shader_block(gpu, a6xx_state, &a6xx_shader_blocks[i], in a6xx_get_shaders()
664 static void a6xx_get_crashdumper_hlsq_registers(struct msm_gpu *gpu, in a6xx_get_crashdumper_hlsq_registers() argument
693 if (a6xx_crashdumper_run(gpu, dumper)) in a6xx_get_crashdumper_hlsq_registers()
702 static void a6xx_get_crashdumper_registers(struct msm_gpu *gpu, in a6xx_get_crashdumper_registers() argument
714 if (!adreno_is_a660_family(to_adreno_gpu(gpu)) && in a6xx_get_crashdumper_registers()
736 if (a6xx_crashdumper_run(gpu, dumper)) in a6xx_get_crashdumper_registers()
745 static void a6xx_get_ahb_gpu_registers(struct msm_gpu *gpu, in a6xx_get_ahb_gpu_registers() argument
753 if (!adreno_is_a660_family(to_adreno_gpu(gpu)) && in a6xx_get_ahb_gpu_registers()
770 obj->data[index++] = gpu_read(gpu, in a6xx_get_ahb_gpu_registers()
776 static void _a6xx_get_gmu_registers(struct msm_gpu *gpu, in _a6xx_get_gmu_registers() argument
782 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in _a6xx_get_gmu_registers()
813 static void a6xx_get_gmu_registers(struct msm_gpu *gpu, in a6xx_get_gmu_registers() argument
816 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_get_gmu_registers()
828 _a6xx_get_gmu_registers(gpu, a6xx_state, &a6xx_gmu_reglist[0], in a6xx_get_gmu_registers()
830 _a6xx_get_gmu_registers(gpu, a6xx_state, &a6xx_gmu_reglist[1], in a6xx_get_gmu_registers()
837 gpu_write(gpu, REG_A6XX_GMU_AO_AHB_FENCE_CTRL, 0); in a6xx_get_gmu_registers()
839 _a6xx_get_gmu_registers(gpu, a6xx_state, &a6xx_gmu_reglist[2], in a6xx_get_gmu_registers()
866 static void a6xx_snapshot_gmu_hfi_history(struct msm_gpu *gpu, in a6xx_snapshot_gmu_hfi_history() argument
869 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_snapshot_gmu_hfi_history()
887 static void a6xx_get_registers(struct msm_gpu *gpu, in a6xx_get_registers() argument
895 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_get_registers()
906 a6xx_get_ahb_gpu_registers(gpu, in a6xx_get_registers()
910 a6xx_get_ahb_gpu_registers(gpu, in a6xx_get_registers()
915 a6xx_get_ahb_gpu_registers(gpu, in a6xx_get_registers()
919 a6xx_get_ahb_gpu_registers(gpu, in a6xx_get_registers()
923 a6xx_get_ahb_gpu_registers(gpu, in a6xx_get_registers()
929 * because the GPU has no memory access until we resume in a6xx_get_registers()
931 * we have captured as much useful GPU state as possible). in a6xx_get_registers()
935 a6xx_get_ahb_gpu_registers(gpu, in a6xx_get_registers()
942 a6xx_get_crashdumper_registers(gpu, in a6xx_get_registers()
948 a6xx_get_crashdumper_hlsq_registers(gpu, in a6xx_get_registers()
954 static u32 a6xx_get_cp_roq_size(struct msm_gpu *gpu) in a6xx_get_cp_roq_size() argument
957 return gpu_read(gpu, REG_A6XX_CP_ROQ_THRESHOLDS_2) >> 14; in a6xx_get_cp_roq_size()
960 static u32 a7xx_get_cp_roq_size(struct msm_gpu *gpu) in a7xx_get_cp_roq_size() argument
967 gpu_write(gpu, REG_A6XX_CP_SQE_UCODE_DBG_ADDR, 0x70d3); in a7xx_get_cp_roq_size()
969 return 4 * (gpu_read(gpu, REG_A6XX_CP_SQE_UCODE_DBG_DATA) >> 20); in a7xx_get_cp_roq_size()
973 static void a6xx_get_indexed_regs(struct msm_gpu *gpu, in a6xx_get_indexed_regs() argument
982 indexed->count = indexed->count_fn(gpu); in a6xx_get_indexed_regs()
989 gpu_write(gpu, indexed->addr, 0); in a6xx_get_indexed_regs()
993 obj->data[i] = gpu_read(gpu, indexed->data); in a6xx_get_indexed_regs()
996 static void a6xx_get_indexed_registers(struct msm_gpu *gpu, in a6xx_get_indexed_registers() argument
1009 a6xx_get_indexed_regs(gpu, a6xx_state, &a6xx_indexed_reglist[i], in a6xx_get_indexed_registers()
1012 if (adreno_is_a650_family(to_adreno_gpu(gpu))) { in a6xx_get_indexed_registers()
1015 val = gpu_read(gpu, REG_A6XX_CP_CHICKEN_DBG); in a6xx_get_indexed_registers()
1016 gpu_write(gpu, REG_A6XX_CP_CHICKEN_DBG, val | 4); in a6xx_get_indexed_registers()
1019 a6xx_get_indexed_regs(gpu, a6xx_state, &a6xx_cp_mempool_indexed, in a6xx_get_indexed_registers()
1022 gpu_write(gpu, REG_A6XX_CP_CHICKEN_DBG, val); in a6xx_get_indexed_registers()
1028 mempool_size = gpu_read(gpu, REG_A6XX_CP_MEM_POOL_SIZE); in a6xx_get_indexed_registers()
1029 gpu_write(gpu, REG_A6XX_CP_MEM_POOL_SIZE, 0); in a6xx_get_indexed_registers()
1032 a6xx_get_indexed_regs(gpu, a6xx_state, &a6xx_cp_mempool_indexed, in a6xx_get_indexed_registers()
1042 gpu_write(gpu, REG_A6XX_CP_MEM_POOL_SIZE, mempool_size); in a6xx_get_indexed_registers()
1045 static void a7xx_get_indexed_registers(struct msm_gpu *gpu, in a7xx_get_indexed_registers() argument
1063 a6xx_get_indexed_regs(gpu, a6xx_state, &a7xx_indexed_reglist[i], in a7xx_get_indexed_registers()
1066 gpu_rmw(gpu, REG_A6XX_CP_CHICKEN_DBG, 0, BIT(2)); in a7xx_get_indexed_registers()
1067 gpu_rmw(gpu, REG_A7XX_CP_BV_CHICKEN_DBG, 0, BIT(2)); in a7xx_get_indexed_registers()
1071 a6xx_get_indexed_regs(gpu, a6xx_state, a7xx_cp_bv_mempool_indexed, in a7xx_get_indexed_registers()
1074 gpu_rmw(gpu, REG_A6XX_CP_CHICKEN_DBG, BIT(2), 0); in a7xx_get_indexed_registers()
1075 gpu_rmw(gpu, REG_A7XX_CP_BV_CHICKEN_DBG, BIT(2), 0); in a7xx_get_indexed_registers()
1079 struct msm_gpu_state *a6xx_gpu_state_get(struct msm_gpu *gpu) in a6xx_gpu_state_get() argument
1082 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_gpu_state_get()
1086 bool stalled = !!(gpu_read(gpu, REG_A6XX_RBBM_STATUS3) & in a6xx_gpu_state_get()
1095 adreno_gpu_state_get(gpu, &a6xx_state->base); in a6xx_gpu_state_get()
1098 a6xx_get_gmu_registers(gpu, a6xx_state); in a6xx_gpu_state_get()
1104 a6xx_snapshot_gmu_hfi_history(gpu, a6xx_state); in a6xx_gpu_state_get()
1113 a7xx_get_indexed_registers(gpu, a6xx_state); in a6xx_gpu_state_get()
1118 a6xx_get_indexed_registers(gpu, a6xx_state); in a6xx_gpu_state_get()
1123 * write out GPU state, so we need to skip this when the SMMU is in a6xx_gpu_state_get()
1126 if (!stalled && !gpu->needs_hw_init && in a6xx_gpu_state_get()
1127 !a6xx_crashdumper_init(gpu, &_dumper)) { in a6xx_gpu_state_get()
1131 a6xx_get_registers(gpu, a6xx_state, dumper); in a6xx_gpu_state_get()
1134 a6xx_get_shaders(gpu, a6xx_state, dumper); in a6xx_gpu_state_get()
1135 a6xx_get_clusters(gpu, a6xx_state, dumper); in a6xx_gpu_state_get()
1136 a6xx_get_dbgahb_clusters(gpu, a6xx_state, dumper); in a6xx_gpu_state_get()
1138 msm_gem_kernel_put(dumper->bo, gpu->aspace); in a6xx_gpu_state_get()
1142 a6xx_get_debugbus(gpu, a6xx_state); in a6xx_gpu_state_get()
1144 a6xx_state->gpu_initialized = !gpu->needs_hw_init; in a6xx_gpu_state_get()
1369 void a6xx_show(struct msm_gpu *gpu, struct msm_gpu_state *state, in a6xx_show() argument
1379 drm_printf(p, "gpu-initialized: %d\n", a6xx_state->gpu_initialized); in a6xx_show()
1381 adreno_show(gpu, state, p); in a6xx_show()