/linux/drivers/gpu/drm/msm/adreno/ |
H A D | a6xx_gmu.c | 21 static void a6xx_gmu_fault(struct a6xx_gmu *gmu) in a6xx_gmu_fault() 39 struct a6xx_gmu *gmu = data; in a6xx_gmu_irq() local 63 struct a6xx_gmu *gmu = data; in a6xx_hfi_irq() local 78 bool a6xx_gmu_sptprac_is_on(struct a6xx_gmu *gmu) in a6xx_gmu_sptprac_is_on() 94 bool a6xx_gmu_gx_is_on(struct a6xx_gmu *gmu) in a6xx_gmu_gx_is_on() 115 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_gmu_set_freq() local 209 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_gmu_get_freq() local 214 static bool a6xx_gmu_check_idle_level(struct a6xx_gmu *gmu) in a6xx_gmu_check_idle_level() 235 int a6xx_gmu_wait_for_idle(struct a6xx_gmu *gmu) in a6xx_gmu_wait_for_idle() 240 static int a6xx_gmu_start(struct a6xx_gmu *gmu) in a6xx_gmu_start() [all …]
|
H A D | a6xx_hfi.c | 29 static int a6xx_hfi_queue_read(struct a6xx_gmu *gmu, in a6xx_hfi_queue_read() 67 static int a6xx_hfi_queue_write(struct a6xx_gmu *gmu, in a6xx_hfi_queue_write() 103 static int a6xx_hfi_wait_for_msg_interrupt(struct a6xx_gmu *gmu, u32 id, u32 seqnum) in a6xx_hfi_wait_for_msg_interrupt() 126 static int a6xx_hfi_wait_for_ack(struct a6xx_gmu *gmu, u32 id, u32 seqnum, in a6xx_hfi_wait_for_ack() 189 static int a6xx_hfi_send_msg(struct a6xx_gmu *gmu, int id, in a6xx_hfi_send_msg() 212 static int a6xx_hfi_send_gmu_init(struct a6xx_gmu *gmu, int boot_state) in a6xx_hfi_send_gmu_init() 224 static int a6xx_hfi_get_fw_version(struct a6xx_gmu *gmu, u32 *version) in a6xx_hfi_get_fw_version() 235 static int a6xx_hfi_send_perf_table_v1(struct a6xx_gmu *gmu) in a6xx_hfi_send_perf_table_v1() 257 static int a6xx_hfi_send_perf_table(struct a6xx_gmu *gmu) in a6xx_hfi_send_perf_table() 280 static void a6xx_generate_bw_table(const struct a6xx_info *info, struct a6xx_gmu *gmu, in a6xx_generate_bw_table() [all …]
|
H A D | a6xx_gmu.h | 122 static inline u32 gmu_read(struct a6xx_gmu *gmu, u32 offset) in gmu_read() 127 static inline void gmu_write(struct a6xx_gmu *gmu, u32 offset, u32 value) in gmu_write() 133 gmu_write_bulk(struct a6xx_gmu *gmu, u32 offset, const u32 *data, u32 size) in gmu_write_bulk() 139 static inline void gmu_rmw(struct a6xx_gmu *gmu, u32 reg, u32 mask, u32 or) in gmu_rmw() 148 static inline u64 gmu_read64(struct a6xx_gmu *gmu, u32 lo, u32 hi) in gmu_read64() 158 #define gmu_poll_timeout(gmu, addr, val, cond, interval, timeout) \ argument 162 static inline u32 gmu_read_rscc(struct a6xx_gmu *gmu, u32 offset) in gmu_read_rscc() 167 static inline void gmu_write_rscc(struct a6xx_gmu *gmu, u32 offset, u32 value) in gmu_write_rscc() 172 #define gmu_poll_timeout_rscc(gmu, addr, val, cond, interval, timeout) \ argument
|
H A D | a6xx_gpu.c | 515 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_set_hwcg() local 1068 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in hw_init() local 1446 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_recover() local 2067 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_pm_resume() local 2145 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_pm_suspend() local
|
H A D | a6xx_gpu.h | 85 struct a6xx_gmu gmu; member
|
H A D | a6xx_gpu_state.c | 1181 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in _a6xx_get_gmu_registers() local 1275 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_snapshot_gmu_hfi_history() local
|
/linux/arch/arm64/boot/dts/qcom/ |
H A D | sc8180x.dtsi | 2316 gmu: gmu@2c6a000 { label
|
H A D | sm6350.dtsi | 1506 gmu: gmu@3d6a000 { label
|
H A D | sar2130p.dtsi | 1772 gmu: gmu@3d6a000 { label
|
H A D | sm8350.dtsi | 2114 gmu: gmu@3d6a000 { label
|
H A D | sc8280xp.dtsi | 3054 gmu: gmu@3d6a000 { label
|
H A D | sm8150.dtsi | 2303 gmu: gmu@2c6a000 { label
|
H A D | sm8550.dtsi | 2517 gmu: gmu@3d6a000 { label
|
H A D | sc7180.dtsi | 2266 gmu: gmu@506a000 { label
|
H A D | sdm845.dtsi | 4966 gmu: gmu@506a000 { label
|
H A D | x1e80100.dtsi | 3873 gmu: gmu@3d6a000 { label
|
H A D | sm8250.dtsi | 2998 gmu: gmu@3d6a000 { label
|
H A D | sc7280.dtsi | 2947 gmu: gmu@3d6a000 { label
|