Lines Matching full:gpu

112 static int a6xx_crashdumper_init(struct msm_gpu *gpu,  in a6xx_crashdumper_init()  argument
115 dumper->ptr = msm_gem_kernel_new_locked(gpu->dev, in a6xx_crashdumper_init()
116 SZ_1M, MSM_BO_UNCACHED, gpu->aspace, in a6xx_crashdumper_init()
125 static int a6xx_crashdumper_run(struct msm_gpu *gpu, in a6xx_crashdumper_run() argument
128 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_crashdumper_run()
142 gpu_write64(gpu, REG_A6XX_CP_CRASH_SCRIPT_BASE_LO, in a6xx_crashdumper_run()
145 gpu_write(gpu, REG_A6XX_CP_CRASH_DUMP_CNTL, 1); in a6xx_crashdumper_run()
147 ret = gpu_poll_timeout(gpu, REG_A6XX_CP_CRASH_DUMP_STATUS, val, in a6xx_crashdumper_run()
150 gpu_write(gpu, REG_A6XX_CP_CRASH_DUMP_CNTL, 0); in a6xx_crashdumper_run()
156 static int debugbus_read(struct msm_gpu *gpu, u32 block, u32 offset, in debugbus_read() argument
162 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_SEL_A, reg); in debugbus_read()
163 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_SEL_B, reg); in debugbus_read()
164 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_SEL_C, reg); in debugbus_read()
165 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_SEL_D, reg); in debugbus_read()
170 data[0] = gpu_read(gpu, REG_A6XX_DBGC_CFG_DBGBUS_TRACE_BUF2); in debugbus_read()
171 data[1] = gpu_read(gpu, REG_A6XX_DBGC_CFG_DBGBUS_TRACE_BUF1); in debugbus_read()
204 static int vbif_debugbus_read(struct msm_gpu *gpu, u32 ctrl0, u32 ctrl1, in vbif_debugbus_read() argument
209 gpu_write(gpu, ctrl0, reg); in vbif_debugbus_read()
212 gpu_write(gpu, ctrl1, i); in vbif_debugbus_read()
213 data[i] = gpu_read(gpu, REG_A6XX_VBIF_TEST_BUS_OUT); in vbif_debugbus_read()
228 static void a6xx_get_vbif_debugbus_block(struct msm_gpu *gpu, in a6xx_get_vbif_debugbus_block() argument
243 clk = gpu_read(gpu, REG_A6XX_VBIF_CLKON); in a6xx_get_vbif_debugbus_block()
246 gpu_write(gpu, REG_A6XX_VBIF_CLKON, in a6xx_get_vbif_debugbus_block()
250 gpu_write(gpu, REG_A6XX_VBIF_TEST_BUS1_CTRL0, 0); in a6xx_get_vbif_debugbus_block()
253 gpu_write(gpu, REG_A6XX_VBIF_TEST_BUS_OUT_CTRL, 1); in a6xx_get_vbif_debugbus_block()
258 ptr += vbif_debugbus_read(gpu, in a6xx_get_vbif_debugbus_block()
264 ptr += vbif_debugbus_read(gpu, in a6xx_get_vbif_debugbus_block()
270 gpu_write(gpu, REG_A6XX_VBIF_TEST_BUS2_CTRL0, 0); in a6xx_get_vbif_debugbus_block()
273 ptr += vbif_debugbus_read(gpu, in a6xx_get_vbif_debugbus_block()
279 gpu_write(gpu, REG_A6XX_VBIF_CLKON, clk); in a6xx_get_vbif_debugbus_block()
282 static void a6xx_get_debugbus_block(struct msm_gpu *gpu, in a6xx_get_debugbus_block() argument
297 ptr += debugbus_read(gpu, block->id, i, ptr); in a6xx_get_debugbus_block()
318 static void a6xx_get_debugbus(struct msm_gpu *gpu, in a6xx_get_debugbus() argument
327 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_CNTLT, in a6xx_get_debugbus()
330 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_CNTLM, in a6xx_get_debugbus()
333 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_IVTL_0, 0); in a6xx_get_debugbus()
334 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_IVTL_1, 0); in a6xx_get_debugbus()
335 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_IVTL_2, 0); in a6xx_get_debugbus()
336 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_IVTL_3, 0); in a6xx_get_debugbus()
338 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_BYTEL_0, 0x76543210); in a6xx_get_debugbus()
339 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_BYTEL_1, 0xFEDCBA98); in a6xx_get_debugbus()
341 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_MASKL_0, 0); in a6xx_get_debugbus()
342 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_MASKL_1, 0); in a6xx_get_debugbus()
343 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_MASKL_2, 0); in a6xx_get_debugbus()
344 gpu_write(gpu, REG_A6XX_DBGC_CFG_DBGBUS_MASKL_3, 0); in a6xx_get_debugbus()
349 res = platform_get_resource_byname(gpu->pdev, IORESOURCE_MEM, in a6xx_get_debugbus()
379 (a6xx_has_gbif(to_adreno_gpu(gpu)) ? 1 : 0); in a6xx_get_debugbus()
388 a6xx_get_debugbus_block(gpu, in a6xx_get_debugbus()
396 * GBIF has same debugbus as of other GPU blocks, fall back to in a6xx_get_debugbus()
397 * default path if GPU uses GBIF, also GBIF uses exactly same in a6xx_get_debugbus()
400 if (a6xx_has_gbif(to_adreno_gpu(gpu))) { in a6xx_get_debugbus()
401 a6xx_get_debugbus_block(gpu, a6xx_state, in a6xx_get_debugbus()
410 if (!a6xx_has_gbif(to_adreno_gpu(gpu))) { in a6xx_get_debugbus()
416 a6xx_get_vbif_debugbus_block(gpu, a6xx_state, in a6xx_get_debugbus()
446 static void a6xx_get_dbgahb_cluster(struct msm_gpu *gpu, in a6xx_get_dbgahb_cluster() argument
484 if (a6xx_crashdumper_run(gpu, dumper)) in a6xx_get_dbgahb_cluster()
492 static void a6xx_get_dbgahb_clusters(struct msm_gpu *gpu, in a6xx_get_dbgahb_clusters() argument
508 a6xx_get_dbgahb_cluster(gpu, a6xx_state, in a6xx_get_dbgahb_clusters()
514 static void a6xx_get_cluster(struct msm_gpu *gpu, in a6xx_get_cluster() argument
555 if (a6xx_crashdumper_run(gpu, dumper)) in a6xx_get_cluster()
563 static void a6xx_get_clusters(struct msm_gpu *gpu, in a6xx_get_clusters() argument
578 a6xx_get_cluster(gpu, a6xx_state, &a6xx_clusters[i], in a6xx_get_clusters()
583 static void a6xx_get_shader_block(struct msm_gpu *gpu, in a6xx_get_shader_block() argument
606 if (a6xx_crashdumper_run(gpu, dumper)) in a6xx_get_shader_block()
614 static void a6xx_get_shaders(struct msm_gpu *gpu, in a6xx_get_shaders() argument
629 a6xx_get_shader_block(gpu, a6xx_state, &a6xx_shader_blocks[i], in a6xx_get_shaders()
634 static void a6xx_get_crashdumper_hlsq_registers(struct msm_gpu *gpu, in a6xx_get_crashdumper_hlsq_registers() argument
663 if (a6xx_crashdumper_run(gpu, dumper)) in a6xx_get_crashdumper_hlsq_registers()
672 static void a6xx_get_crashdumper_registers(struct msm_gpu *gpu, in a6xx_get_crashdumper_registers() argument
701 if (a6xx_crashdumper_run(gpu, dumper)) in a6xx_get_crashdumper_registers()
710 static void a6xx_get_ahb_gpu_registers(struct msm_gpu *gpu, in a6xx_get_ahb_gpu_registers() argument
730 obj->data[index++] = gpu_read(gpu, in a6xx_get_ahb_gpu_registers()
736 static void _a6xx_get_gmu_registers(struct msm_gpu *gpu, in _a6xx_get_gmu_registers() argument
742 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in _a6xx_get_gmu_registers()
773 static void a6xx_get_gmu_registers(struct msm_gpu *gpu, in a6xx_get_gmu_registers() argument
776 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_get_gmu_registers()
788 _a6xx_get_gmu_registers(gpu, a6xx_state, &a6xx_gmu_reglist[0], in a6xx_get_gmu_registers()
790 _a6xx_get_gmu_registers(gpu, a6xx_state, &a6xx_gmu_reglist[1], in a6xx_get_gmu_registers()
797 gpu_write(gpu, REG_A6XX_GMU_AO_AHB_FENCE_CTRL, 0); in a6xx_get_gmu_registers()
799 _a6xx_get_gmu_registers(gpu, a6xx_state, &a6xx_gmu_reglist[2], in a6xx_get_gmu_registers()
804 static void a6xx_get_registers(struct msm_gpu *gpu, in a6xx_get_registers() argument
812 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_get_registers()
823 a6xx_get_ahb_gpu_registers(gpu, in a6xx_get_registers()
828 a6xx_get_ahb_gpu_registers(gpu, in a6xx_get_registers()
832 a6xx_get_ahb_gpu_registers(gpu, in a6xx_get_registers()
837 a6xx_get_crashdumper_registers(gpu, in a6xx_get_registers()
843 a6xx_get_crashdumper_hlsq_registers(gpu, in a6xx_get_registers()
850 static void a6xx_get_indexed_regs(struct msm_gpu *gpu, in a6xx_get_indexed_regs() argument
863 gpu_write(gpu, indexed->addr, 0); in a6xx_get_indexed_regs()
867 obj->data[i] = gpu_read(gpu, indexed->data); in a6xx_get_indexed_regs()
870 static void a6xx_get_indexed_registers(struct msm_gpu *gpu, in a6xx_get_indexed_registers() argument
883 a6xx_get_indexed_regs(gpu, a6xx_state, &a6xx_indexed_reglist[i], in a6xx_get_indexed_registers()
887 mempool_size = gpu_read(gpu, REG_A6XX_CP_MEM_POOL_SIZE); in a6xx_get_indexed_registers()
888 gpu_write(gpu, REG_A6XX_CP_MEM_POOL_SIZE, 0); in a6xx_get_indexed_registers()
891 a6xx_get_indexed_regs(gpu, a6xx_state, &a6xx_cp_mempool_indexed, in a6xx_get_indexed_registers()
901 gpu_write(gpu, REG_A6XX_CP_MEM_POOL_SIZE, mempool_size); in a6xx_get_indexed_registers()
906 struct msm_gpu_state *a6xx_gpu_state_get(struct msm_gpu *gpu) in a6xx_gpu_state_get() argument
909 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a6xx_gpu_state_get()
920 adreno_gpu_state_get(gpu, &a6xx_state->base); in a6xx_gpu_state_get()
922 a6xx_get_gmu_registers(gpu, a6xx_state); in a6xx_gpu_state_get()
929 a6xx_get_indexed_registers(gpu, a6xx_state); in a6xx_gpu_state_get()
932 if (!a6xx_crashdumper_init(gpu, &dumper)) { in a6xx_gpu_state_get()
933 a6xx_get_registers(gpu, a6xx_state, &dumper); in a6xx_gpu_state_get()
934 a6xx_get_shaders(gpu, a6xx_state, &dumper); in a6xx_gpu_state_get()
935 a6xx_get_clusters(gpu, a6xx_state, &dumper); in a6xx_gpu_state_get()
936 a6xx_get_dbgahb_clusters(gpu, a6xx_state, &dumper); in a6xx_gpu_state_get()
938 msm_gem_kernel_put(dumper.bo, gpu->aspace, true); in a6xx_gpu_state_get()
942 a6xx_get_debugbus(gpu, a6xx_state); in a6xx_gpu_state_get()
1156 void a6xx_show(struct msm_gpu *gpu, struct msm_gpu_state *state, in a6xx_show() argument
1166 adreno_show(gpu, state, p); in a6xx_show()