1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2015-2018, 2020-2021 The Linux Foundation. All rights reserved.
4  */
5 
6 #define pr_fmt(fmt)	"[drm:%s:%d] " fmt, __func__, __LINE__
7 #include <linux/delay.h>
8 #include <linux/string_choices.h>
9 #include "dpu_encoder_phys.h"
10 #include "dpu_hw_interrupts.h"
11 #include "dpu_hw_pingpong.h"
12 #include "dpu_core_irq.h"
13 #include "dpu_formats.h"
14 #include "dpu_trace.h"
15 #include "disp/msm_disp_snapshot.h"
16 
17 #include <drm/drm_managed.h>
18 
19 #define DPU_DEBUG_CMDENC(e, fmt, ...) DPU_DEBUG("enc%d intf%d " fmt, \
20 		(e) && (e)->base.parent ? \
21 		(e)->base.parent->base.id : -1, \
22 		(e) ? (e)->base.hw_intf->idx - INTF_0 : -1, ##__VA_ARGS__)
23 
24 #define DPU_ERROR_CMDENC(e, fmt, ...) DPU_ERROR("enc%d intf%d " fmt, \
25 		(e) && (e)->base.parent ? \
26 		(e)->base.parent->base.id : -1, \
27 		(e) ? (e)->base.hw_intf->idx - INTF_0 : -1, ##__VA_ARGS__)
28 
29 #define to_dpu_encoder_phys_cmd(x) \
30 	container_of(x, struct dpu_encoder_phys_cmd, base)
31 
32 #define PP_TIMEOUT_MAX_TRIALS	10
33 
34 /*
35  * Tearcheck sync start and continue thresholds are empirically found
36  * based on common panels In the future, may want to allow panels to override
37  * these default values
38  */
39 #define DEFAULT_TEARCHECK_SYNC_THRESH_START	4
40 #define DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE	4
41 
42 static void dpu_encoder_phys_cmd_enable_te(struct dpu_encoder_phys *phys_enc);
43 
dpu_encoder_phys_cmd_is_master(struct dpu_encoder_phys * phys_enc)44 static bool dpu_encoder_phys_cmd_is_master(struct dpu_encoder_phys *phys_enc)
45 {
46 	return (phys_enc->split_role != ENC_ROLE_SLAVE);
47 }
48 
_dpu_encoder_phys_cmd_update_intf_cfg(struct dpu_encoder_phys * phys_enc)49 static void _dpu_encoder_phys_cmd_update_intf_cfg(
50 		struct dpu_encoder_phys *phys_enc)
51 {
52 	struct dpu_encoder_phys_cmd *cmd_enc =
53 			to_dpu_encoder_phys_cmd(phys_enc);
54 	struct dpu_hw_ctl *ctl;
55 	struct dpu_hw_intf_cfg intf_cfg = { 0 };
56 	struct dpu_hw_intf_cmd_mode_cfg cmd_mode_cfg = {};
57 
58 	ctl = phys_enc->hw_ctl;
59 	if (!ctl->ops.setup_intf_cfg)
60 		return;
61 
62 	intf_cfg.intf = phys_enc->hw_intf->idx;
63 	intf_cfg.intf_mode_sel = DPU_CTL_MODE_SEL_CMD;
64 	intf_cfg.stream_sel = cmd_enc->stream_sel;
65 	intf_cfg.mode_3d = dpu_encoder_helper_get_3d_blend_mode(phys_enc);
66 	intf_cfg.dsc = dpu_encoder_helper_get_dsc(phys_enc);
67 	ctl->ops.setup_intf_cfg(ctl, &intf_cfg);
68 
69 	/* setup which pp blk will connect to this intf */
70 	if (test_bit(DPU_CTL_ACTIVE_CFG, &ctl->caps->features) && phys_enc->hw_intf->ops.bind_pingpong_blk)
71 		phys_enc->hw_intf->ops.bind_pingpong_blk(
72 				phys_enc->hw_intf,
73 				phys_enc->hw_pp->idx);
74 
75 	if (intf_cfg.dsc != 0)
76 		cmd_mode_cfg.data_compress = true;
77 
78 	cmd_mode_cfg.wide_bus_en = dpu_encoder_is_widebus_enabled(phys_enc->parent);
79 
80 	if (phys_enc->hw_intf->ops.program_intf_cmd_cfg)
81 		phys_enc->hw_intf->ops.program_intf_cmd_cfg(phys_enc->hw_intf, &cmd_mode_cfg);
82 }
83 
dpu_encoder_phys_cmd_pp_tx_done_irq(void * arg)84 static void dpu_encoder_phys_cmd_pp_tx_done_irq(void *arg)
85 {
86 	struct dpu_encoder_phys *phys_enc = arg;
87 	unsigned long lock_flags;
88 	int new_cnt;
89 	u32 event = DPU_ENCODER_FRAME_EVENT_DONE;
90 
91 	if (!phys_enc->hw_pp)
92 		return;
93 
94 	DPU_ATRACE_BEGIN("pp_done_irq");
95 	/* notify all synchronous clients first, then asynchronous clients */
96 	dpu_encoder_frame_done_callback(phys_enc->parent, phys_enc, event);
97 
98 	spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
99 	new_cnt = atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
100 	spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
101 
102 	trace_dpu_enc_phys_cmd_pp_tx_done(DRMID(phys_enc->parent),
103 					  phys_enc->hw_pp->idx - PINGPONG_0,
104 					  new_cnt, event);
105 
106 	/* Signal any waiting atomic commit thread */
107 	wake_up_all(&phys_enc->pending_kickoff_wq);
108 	DPU_ATRACE_END("pp_done_irq");
109 }
110 
dpu_encoder_phys_cmd_te_rd_ptr_irq(void * arg)111 static void dpu_encoder_phys_cmd_te_rd_ptr_irq(void *arg)
112 {
113 	struct dpu_encoder_phys *phys_enc = arg;
114 	struct dpu_encoder_phys_cmd *cmd_enc;
115 
116 	DPU_ATRACE_BEGIN("rd_ptr_irq");
117 	cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
118 
119 	dpu_encoder_vblank_callback(phys_enc->parent, phys_enc);
120 
121 	atomic_add_unless(&cmd_enc->pending_vblank_cnt, -1, 0);
122 	wake_up_all(&cmd_enc->pending_vblank_wq);
123 	DPU_ATRACE_END("rd_ptr_irq");
124 }
125 
dpu_encoder_phys_cmd_ctl_start_irq(void * arg)126 static void dpu_encoder_phys_cmd_ctl_start_irq(void *arg)
127 {
128 	struct dpu_encoder_phys *phys_enc = arg;
129 
130 	DPU_ATRACE_BEGIN("ctl_start_irq");
131 
132 	atomic_add_unless(&phys_enc->pending_ctlstart_cnt, -1, 0);
133 
134 	/* Signal any waiting ctl start interrupt */
135 	wake_up_all(&phys_enc->pending_kickoff_wq);
136 	DPU_ATRACE_END("ctl_start_irq");
137 }
138 
dpu_encoder_phys_cmd_underrun_irq(void * arg)139 static void dpu_encoder_phys_cmd_underrun_irq(void *arg)
140 {
141 	struct dpu_encoder_phys *phys_enc = arg;
142 
143 	dpu_encoder_underrun_callback(phys_enc->parent, phys_enc);
144 }
145 
dpu_encoder_phys_cmd_atomic_mode_set(struct dpu_encoder_phys * phys_enc,struct drm_crtc_state * crtc_state,struct drm_connector_state * conn_state)146 static void dpu_encoder_phys_cmd_atomic_mode_set(
147 		struct dpu_encoder_phys *phys_enc,
148 		struct drm_crtc_state *crtc_state,
149 		struct drm_connector_state *conn_state)
150 {
151 	phys_enc->irq[INTR_IDX_CTL_START] = phys_enc->hw_ctl->caps->intr_start;
152 
153 	phys_enc->irq[INTR_IDX_PINGPONG] = phys_enc->hw_pp->caps->intr_done;
154 
155 	if (phys_enc->has_intf_te)
156 		phys_enc->irq[INTR_IDX_RDPTR] = phys_enc->hw_intf->cap->intr_tear_rd_ptr;
157 	else
158 		phys_enc->irq[INTR_IDX_RDPTR] = phys_enc->hw_pp->caps->intr_rdptr;
159 
160 	phys_enc->irq[INTR_IDX_UNDERRUN] = phys_enc->hw_intf->cap->intr_underrun;
161 }
162 
_dpu_encoder_phys_cmd_handle_ppdone_timeout(struct dpu_encoder_phys * phys_enc)163 static int _dpu_encoder_phys_cmd_handle_ppdone_timeout(
164 		struct dpu_encoder_phys *phys_enc)
165 {
166 	struct dpu_encoder_phys_cmd *cmd_enc =
167 			to_dpu_encoder_phys_cmd(phys_enc);
168 	u32 frame_event = DPU_ENCODER_FRAME_EVENT_ERROR;
169 	bool do_log = false;
170 	struct drm_encoder *drm_enc;
171 
172 	if (!phys_enc->hw_pp)
173 		return -EINVAL;
174 
175 	drm_enc = phys_enc->parent;
176 
177 	cmd_enc->pp_timeout_report_cnt++;
178 	if (cmd_enc->pp_timeout_report_cnt == PP_TIMEOUT_MAX_TRIALS) {
179 		frame_event |= DPU_ENCODER_FRAME_EVENT_PANEL_DEAD;
180 		do_log = true;
181 	} else if (cmd_enc->pp_timeout_report_cnt == 1) {
182 		do_log = true;
183 	}
184 
185 	trace_dpu_enc_phys_cmd_pdone_timeout(DRMID(drm_enc),
186 		     phys_enc->hw_pp->idx - PINGPONG_0,
187 		     cmd_enc->pp_timeout_report_cnt,
188 		     atomic_read(&phys_enc->pending_kickoff_cnt),
189 		     frame_event);
190 
191 	/* to avoid flooding, only log first time, and "dead" time */
192 	if (do_log) {
193 		DRM_ERROR("id:%d pp:%d kickoff timeout %d cnt %d koff_cnt %d\n",
194 			  DRMID(drm_enc),
195 			  phys_enc->hw_pp->idx - PINGPONG_0,
196 			  phys_enc->hw_ctl->idx - CTL_0,
197 			  cmd_enc->pp_timeout_report_cnt,
198 			  atomic_read(&phys_enc->pending_kickoff_cnt));
199 		msm_disp_snapshot_state(drm_enc->dev);
200 		dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
201 				phys_enc->irq[INTR_IDX_RDPTR]);
202 	}
203 
204 	atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
205 
206 	/* request a ctl reset before the next kickoff */
207 	phys_enc->enable_state = DPU_ENC_ERR_NEEDS_HW_RESET;
208 
209 	dpu_encoder_frame_done_callback(phys_enc->parent, phys_enc, frame_event);
210 
211 	return -ETIMEDOUT;
212 }
213 
_dpu_encoder_phys_cmd_wait_for_idle(struct dpu_encoder_phys * phys_enc)214 static int _dpu_encoder_phys_cmd_wait_for_idle(
215 		struct dpu_encoder_phys *phys_enc)
216 {
217 	struct dpu_encoder_phys_cmd *cmd_enc =
218 			to_dpu_encoder_phys_cmd(phys_enc);
219 	struct dpu_encoder_wait_info wait_info;
220 	int ret;
221 
222 	wait_info.wq = &phys_enc->pending_kickoff_wq;
223 	wait_info.atomic_cnt = &phys_enc->pending_kickoff_cnt;
224 	wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
225 
226 	ret = dpu_encoder_helper_wait_for_irq(phys_enc,
227 			phys_enc->irq[INTR_IDX_PINGPONG],
228 			dpu_encoder_phys_cmd_pp_tx_done_irq,
229 			&wait_info);
230 	if (ret == -ETIMEDOUT)
231 		_dpu_encoder_phys_cmd_handle_ppdone_timeout(phys_enc);
232 	else if (!ret)
233 		cmd_enc->pp_timeout_report_cnt = 0;
234 
235 	return ret;
236 }
237 
dpu_encoder_phys_cmd_control_vblank_irq(struct dpu_encoder_phys * phys_enc,bool enable)238 static int dpu_encoder_phys_cmd_control_vblank_irq(
239 		struct dpu_encoder_phys *phys_enc,
240 		bool enable)
241 {
242 	int ret = 0;
243 	int refcount;
244 
245 	if (!phys_enc->hw_pp) {
246 		DPU_ERROR("invalid encoder\n");
247 		return -EINVAL;
248 	}
249 
250 	mutex_lock(&phys_enc->vblank_ctl_lock);
251 	refcount = phys_enc->vblank_refcount;
252 
253 	/* Slave encoders don't report vblank */
254 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
255 		goto end;
256 
257 	/* protect against negative */
258 	if (!enable && refcount == 0) {
259 		ret = -EINVAL;
260 		goto end;
261 	}
262 
263 	DRM_DEBUG_KMS("id:%u pp:%d enable=%s/%d\n", DRMID(phys_enc->parent),
264 		      phys_enc->hw_pp->idx - PINGPONG_0,
265 		      str_true_false(enable), refcount);
266 
267 	if (enable) {
268 		if (phys_enc->vblank_refcount == 0)
269 			ret = dpu_core_irq_register_callback(phys_enc->dpu_kms,
270 					phys_enc->irq[INTR_IDX_RDPTR],
271 					dpu_encoder_phys_cmd_te_rd_ptr_irq,
272 					phys_enc);
273 		if (!ret)
274 			phys_enc->vblank_refcount++;
275 	} else if (!enable) {
276 		if (phys_enc->vblank_refcount == 1)
277 			ret = dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
278 					phys_enc->irq[INTR_IDX_RDPTR]);
279 		if (!ret)
280 			phys_enc->vblank_refcount--;
281 	}
282 
283 end:
284 	mutex_unlock(&phys_enc->vblank_ctl_lock);
285 	if (ret) {
286 		DRM_ERROR("vblank irq err id:%u pp:%d ret:%d, enable %s/%d\n",
287 			  DRMID(phys_enc->parent),
288 			  phys_enc->hw_pp->idx - PINGPONG_0, ret,
289 			  str_true_false(enable), refcount);
290 	}
291 
292 	return ret;
293 }
294 
dpu_encoder_phys_cmd_irq_enable(struct dpu_encoder_phys * phys_enc)295 static void dpu_encoder_phys_cmd_irq_enable(struct dpu_encoder_phys *phys_enc)
296 {
297 	trace_dpu_enc_phys_cmd_irq_enable(DRMID(phys_enc->parent),
298 					  phys_enc->hw_pp->idx - PINGPONG_0,
299 					  phys_enc->vblank_refcount);
300 
301 	dpu_core_irq_register_callback(phys_enc->dpu_kms,
302 				       phys_enc->irq[INTR_IDX_PINGPONG],
303 				       dpu_encoder_phys_cmd_pp_tx_done_irq,
304 				       phys_enc);
305 	dpu_core_irq_register_callback(phys_enc->dpu_kms,
306 				       phys_enc->irq[INTR_IDX_UNDERRUN],
307 				       dpu_encoder_phys_cmd_underrun_irq,
308 				       phys_enc);
309 	dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, true);
310 
311 	if (dpu_encoder_phys_cmd_is_master(phys_enc) && phys_enc->irq[INTR_IDX_CTL_START])
312 		dpu_core_irq_register_callback(phys_enc->dpu_kms,
313 					       phys_enc->irq[INTR_IDX_CTL_START],
314 					       dpu_encoder_phys_cmd_ctl_start_irq,
315 					       phys_enc);
316 }
317 
dpu_encoder_phys_cmd_irq_disable(struct dpu_encoder_phys * phys_enc)318 static void dpu_encoder_phys_cmd_irq_disable(struct dpu_encoder_phys *phys_enc)
319 {
320 	trace_dpu_enc_phys_cmd_irq_disable(DRMID(phys_enc->parent),
321 					   phys_enc->hw_pp->idx - PINGPONG_0,
322 					   phys_enc->vblank_refcount);
323 
324 	if (dpu_encoder_phys_cmd_is_master(phys_enc) && phys_enc->irq[INTR_IDX_CTL_START])
325 		dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
326 						 phys_enc->irq[INTR_IDX_CTL_START]);
327 
328 	dpu_core_irq_unregister_callback(phys_enc->dpu_kms, phys_enc->irq[INTR_IDX_UNDERRUN]);
329 	dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, false);
330 	dpu_core_irq_unregister_callback(phys_enc->dpu_kms, phys_enc->irq[INTR_IDX_PINGPONG]);
331 }
332 
dpu_encoder_phys_cmd_tearcheck_config(struct dpu_encoder_phys * phys_enc)333 static void dpu_encoder_phys_cmd_tearcheck_config(
334 		struct dpu_encoder_phys *phys_enc)
335 {
336 	struct dpu_encoder_phys_cmd *cmd_enc =
337 		to_dpu_encoder_phys_cmd(phys_enc);
338 	struct dpu_hw_tear_check tc_cfg = { 0 };
339 	struct drm_display_mode *mode;
340 	bool tc_enable = true;
341 	unsigned long vsync_hz;
342 	struct dpu_kms *dpu_kms;
343 
344 	/*
345 	 * TODO: if/when resource allocation is refactored, move this to a
346 	 * place where the driver can actually return an error.
347 	 */
348 	if (!phys_enc->has_intf_te &&
349 	    (!phys_enc->hw_pp ||
350 	     !phys_enc->hw_pp->ops.enable_tearcheck)) {
351 		DPU_DEBUG_CMDENC(cmd_enc, "tearcheck not supported\n");
352 		return;
353 	}
354 
355 	DPU_DEBUG_CMDENC(cmd_enc, "intf %d pp %d\n",
356 			 phys_enc->hw_intf ? phys_enc->hw_intf->idx - INTF_0 : -1,
357 			 phys_enc->hw_pp ? phys_enc->hw_pp->idx - PINGPONG_0 : -1);
358 
359 	mode = &phys_enc->cached_mode;
360 
361 	dpu_kms = phys_enc->dpu_kms;
362 
363 	/*
364 	 * TE default: dsi byte clock calculated base on 70 fps;
365 	 * around 14 ms to complete a kickoff cycle if te disabled;
366 	 * vclk_line base on 60 fps; write is faster than read;
367 	 * init == start == rdptr;
368 	 *
369 	 * vsync_count is ratio of MDP VSYNC clock frequency to LCD panel
370 	 * frequency divided by the no. of rows (lines) in the LCDpanel.
371 	 */
372 	vsync_hz = dpu_kms_get_clk_rate(dpu_kms, "vsync");
373 	if (!vsync_hz) {
374 		DPU_DEBUG_CMDENC(cmd_enc, "invalid - no vsync clock\n");
375 		return;
376 	}
377 
378 	tc_cfg.vsync_count = vsync_hz /
379 				(mode->vtotal * drm_mode_vrefresh(mode));
380 
381 	/*
382 	 * Set the sync_cfg_height to twice vtotal so that if we lose a
383 	 * TE event coming from the display TE pin we won't stall immediately
384 	 */
385 	tc_cfg.hw_vsync_mode = 1;
386 	tc_cfg.sync_cfg_height = mode->vtotal * 2;
387 	tc_cfg.vsync_init_val = mode->vdisplay;
388 	tc_cfg.sync_threshold_start = DEFAULT_TEARCHECK_SYNC_THRESH_START;
389 	tc_cfg.sync_threshold_continue = DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE;
390 	tc_cfg.start_pos = mode->vdisplay;
391 	tc_cfg.rd_ptr_irq = mode->vdisplay + 1;
392 
393 	DPU_DEBUG_CMDENC(cmd_enc,
394 		"tc vsync_clk_speed_hz %lu vtotal %u vrefresh %u\n",
395 		vsync_hz, mode->vtotal, drm_mode_vrefresh(mode));
396 	DPU_DEBUG_CMDENC(cmd_enc,
397 		"tc enable %u start_pos %u rd_ptr_irq %u\n",
398 		tc_enable, tc_cfg.start_pos, tc_cfg.rd_ptr_irq);
399 	DPU_DEBUG_CMDENC(cmd_enc,
400 		"tc hw_vsync_mode %u vsync_count %u vsync_init_val %u\n",
401 		tc_cfg.hw_vsync_mode, tc_cfg.vsync_count,
402 		tc_cfg.vsync_init_val);
403 	DPU_DEBUG_CMDENC(cmd_enc,
404 		"tc cfgheight %u thresh_start %u thresh_cont %u\n",
405 		tc_cfg.sync_cfg_height, tc_cfg.sync_threshold_start,
406 		tc_cfg.sync_threshold_continue);
407 
408 	if (phys_enc->has_intf_te)
409 		phys_enc->hw_intf->ops.enable_tearcheck(phys_enc->hw_intf, &tc_cfg);
410 	else
411 		phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, &tc_cfg);
412 }
413 
_dpu_encoder_phys_cmd_pingpong_config(struct dpu_encoder_phys * phys_enc)414 static void _dpu_encoder_phys_cmd_pingpong_config(
415 		struct dpu_encoder_phys *phys_enc)
416 {
417 	struct dpu_encoder_phys_cmd *cmd_enc =
418 		to_dpu_encoder_phys_cmd(phys_enc);
419 
420 	if (!phys_enc->hw_pp || !phys_enc->hw_ctl->ops.setup_intf_cfg) {
421 		DPU_ERROR("invalid arg(s), enc %d\n", phys_enc != NULL);
422 		return;
423 	}
424 
425 	DPU_DEBUG_CMDENC(cmd_enc, "pp %d, enabling mode:\n",
426 			phys_enc->hw_pp->idx - PINGPONG_0);
427 	drm_mode_debug_printmodeline(&phys_enc->cached_mode);
428 
429 	_dpu_encoder_phys_cmd_update_intf_cfg(phys_enc);
430 	dpu_encoder_phys_cmd_tearcheck_config(phys_enc);
431 }
432 
dpu_encoder_phys_cmd_needs_single_flush(struct dpu_encoder_phys * phys_enc)433 static bool dpu_encoder_phys_cmd_needs_single_flush(
434 		struct dpu_encoder_phys *phys_enc)
435 {
436 	/**
437 	 * we do separate flush for each CTL and let
438 	 * CTL_START synchronize them
439 	 */
440 	return false;
441 }
442 
dpu_encoder_phys_cmd_enable_helper(struct dpu_encoder_phys * phys_enc)443 static void dpu_encoder_phys_cmd_enable_helper(
444 		struct dpu_encoder_phys *phys_enc)
445 {
446 	struct dpu_hw_ctl *ctl;
447 
448 	if (!phys_enc->hw_pp) {
449 		DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc != NULL);
450 		return;
451 	}
452 
453 	dpu_encoder_helper_split_config(phys_enc, phys_enc->hw_intf->idx);
454 
455 	_dpu_encoder_phys_cmd_pingpong_config(phys_enc);
456 
457 	ctl = phys_enc->hw_ctl;
458 	ctl->ops.update_pending_flush_intf(ctl, phys_enc->hw_intf->idx);
459 }
460 
dpu_encoder_phys_cmd_enable(struct dpu_encoder_phys * phys_enc)461 static void dpu_encoder_phys_cmd_enable(struct dpu_encoder_phys *phys_enc)
462 {
463 	struct dpu_encoder_phys_cmd *cmd_enc =
464 		to_dpu_encoder_phys_cmd(phys_enc);
465 
466 	if (!phys_enc->hw_pp) {
467 		DPU_ERROR("invalid phys encoder\n");
468 		return;
469 	}
470 
471 	DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
472 
473 	if (phys_enc->enable_state == DPU_ENC_ENABLED) {
474 		DPU_ERROR("already enabled\n");
475 		return;
476 	}
477 
478 	dpu_encoder_phys_cmd_enable_helper(phys_enc);
479 	phys_enc->enable_state = DPU_ENC_ENABLED;
480 }
481 
_dpu_encoder_phys_cmd_connect_te(struct dpu_encoder_phys * phys_enc,bool enable)482 static void _dpu_encoder_phys_cmd_connect_te(
483 		struct dpu_encoder_phys *phys_enc, bool enable)
484 {
485 	if (phys_enc->has_intf_te) {
486 		if (!phys_enc->hw_intf || !phys_enc->hw_intf->ops.connect_external_te)
487 			return;
488 
489 		trace_dpu_enc_phys_cmd_connect_te(DRMID(phys_enc->parent), enable);
490 		phys_enc->hw_intf->ops.connect_external_te(phys_enc->hw_intf, enable);
491 	} else {
492 		if (!phys_enc->hw_pp || !phys_enc->hw_pp->ops.connect_external_te)
493 			return;
494 
495 		trace_dpu_enc_phys_cmd_connect_te(DRMID(phys_enc->parent), enable);
496 		phys_enc->hw_pp->ops.connect_external_te(phys_enc->hw_pp, enable);
497 	}
498 }
499 
dpu_encoder_phys_cmd_prepare_idle_pc(struct dpu_encoder_phys * phys_enc)500 static void dpu_encoder_phys_cmd_prepare_idle_pc(
501 		struct dpu_encoder_phys *phys_enc)
502 {
503 	_dpu_encoder_phys_cmd_connect_te(phys_enc, false);
504 }
505 
dpu_encoder_phys_cmd_get_line_count(struct dpu_encoder_phys * phys_enc)506 static int dpu_encoder_phys_cmd_get_line_count(
507 		struct dpu_encoder_phys *phys_enc)
508 {
509 	struct dpu_hw_pingpong *hw_pp;
510 	struct dpu_hw_intf *hw_intf;
511 
512 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
513 		return -EINVAL;
514 
515 	if (phys_enc->has_intf_te) {
516 		hw_intf = phys_enc->hw_intf;
517 		if (!hw_intf || !hw_intf->ops.get_line_count)
518 			return -EINVAL;
519 		return hw_intf->ops.get_line_count(hw_intf);
520 	}
521 
522 	hw_pp = phys_enc->hw_pp;
523 	if (!hw_pp || !hw_pp->ops.get_line_count)
524 		return -EINVAL;
525 	return hw_pp->ops.get_line_count(hw_pp);
526 }
527 
dpu_encoder_phys_cmd_disable(struct dpu_encoder_phys * phys_enc)528 static void dpu_encoder_phys_cmd_disable(struct dpu_encoder_phys *phys_enc)
529 {
530 	struct dpu_encoder_phys_cmd *cmd_enc =
531 		to_dpu_encoder_phys_cmd(phys_enc);
532 	struct dpu_hw_ctl *ctl;
533 
534 	if (phys_enc->enable_state == DPU_ENC_DISABLED) {
535 		DPU_ERROR_CMDENC(cmd_enc, "already disabled\n");
536 		return;
537 	}
538 
539 	if (phys_enc->has_intf_te) {
540 		DRM_DEBUG_KMS("id:%u intf:%d state:%d\n", DRMID(phys_enc->parent),
541 			      phys_enc->hw_intf->idx - INTF_0,
542 			      phys_enc->enable_state);
543 
544 		if (phys_enc->hw_intf->ops.disable_tearcheck)
545 			phys_enc->hw_intf->ops.disable_tearcheck(phys_enc->hw_intf);
546 	} else {
547 		if (!phys_enc->hw_pp) {
548 			DPU_ERROR("invalid encoder\n");
549 			return;
550 		}
551 
552 		DRM_DEBUG_KMS("id:%u pp:%d state:%d\n", DRMID(phys_enc->parent),
553 			      phys_enc->hw_pp->idx - PINGPONG_0,
554 			      phys_enc->enable_state);
555 
556 		if (phys_enc->hw_pp->ops.disable_tearcheck)
557 			phys_enc->hw_pp->ops.disable_tearcheck(phys_enc->hw_pp);
558 	}
559 
560 	if (phys_enc->hw_intf->ops.bind_pingpong_blk) {
561 		phys_enc->hw_intf->ops.bind_pingpong_blk(
562 				phys_enc->hw_intf,
563 				PINGPONG_NONE);
564 
565 		ctl = phys_enc->hw_ctl;
566 		ctl->ops.update_pending_flush_intf(ctl, phys_enc->hw_intf->idx);
567 	}
568 
569 	phys_enc->enable_state = DPU_ENC_DISABLED;
570 }
571 
dpu_encoder_phys_cmd_prepare_for_kickoff(struct dpu_encoder_phys * phys_enc)572 static void dpu_encoder_phys_cmd_prepare_for_kickoff(
573 		struct dpu_encoder_phys *phys_enc)
574 {
575 	struct dpu_encoder_phys_cmd *cmd_enc =
576 			to_dpu_encoder_phys_cmd(phys_enc);
577 	int ret;
578 
579 	if (!phys_enc->hw_pp) {
580 		DPU_ERROR("invalid encoder\n");
581 		return;
582 	}
583 	DRM_DEBUG_KMS("id:%u pp:%d pending_cnt:%d\n", DRMID(phys_enc->parent),
584 		      phys_enc->hw_pp->idx - PINGPONG_0,
585 		      atomic_read(&phys_enc->pending_kickoff_cnt));
586 
587 	/*
588 	 * Mark kickoff request as outstanding. If there are more than one,
589 	 * outstanding, then we have to wait for the previous one to complete
590 	 */
591 	ret = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
592 	if (ret) {
593 		/* force pending_kickoff_cnt 0 to discard failed kickoff */
594 		atomic_set(&phys_enc->pending_kickoff_cnt, 0);
595 		DRM_ERROR("failed wait_for_idle: id:%u ret:%d pp:%d\n",
596 			  DRMID(phys_enc->parent), ret,
597 			  phys_enc->hw_pp->idx - PINGPONG_0);
598 	}
599 
600 	dpu_encoder_phys_cmd_enable_te(phys_enc);
601 
602 	DPU_DEBUG_CMDENC(cmd_enc, "pp:%d pending_cnt %d\n",
603 			phys_enc->hw_pp->idx - PINGPONG_0,
604 			atomic_read(&phys_enc->pending_kickoff_cnt));
605 }
606 
dpu_encoder_phys_cmd_enable_te(struct dpu_encoder_phys * phys_enc)607 static void dpu_encoder_phys_cmd_enable_te(struct dpu_encoder_phys *phys_enc)
608 {
609 	if (!phys_enc)
610 		return;
611 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
612 		return;
613 
614 	if (phys_enc->has_intf_te) {
615 		if (!phys_enc->hw_intf->ops.disable_autorefresh)
616 			return;
617 
618 		phys_enc->hw_intf->ops.disable_autorefresh(
619 				phys_enc->hw_intf,
620 				DRMID(phys_enc->parent),
621 				phys_enc->cached_mode.vdisplay);
622 	} else {
623 		if (!phys_enc->hw_pp ||
624 		    !phys_enc->hw_pp->ops.disable_autorefresh)
625 			return;
626 
627 		phys_enc->hw_pp->ops.disable_autorefresh(
628 				phys_enc->hw_pp,
629 				DRMID(phys_enc->parent),
630 				phys_enc->cached_mode.vdisplay);
631 	}
632 }
633 
_dpu_encoder_phys_cmd_wait_for_ctl_start(struct dpu_encoder_phys * phys_enc)634 static int _dpu_encoder_phys_cmd_wait_for_ctl_start(
635 		struct dpu_encoder_phys *phys_enc)
636 {
637 	struct dpu_encoder_phys_cmd *cmd_enc =
638 			to_dpu_encoder_phys_cmd(phys_enc);
639 	struct dpu_encoder_wait_info wait_info;
640 	int ret;
641 
642 	wait_info.wq = &phys_enc->pending_kickoff_wq;
643 	wait_info.atomic_cnt = &phys_enc->pending_ctlstart_cnt;
644 	wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
645 
646 	ret = dpu_encoder_helper_wait_for_irq(phys_enc,
647 			phys_enc->irq[INTR_IDX_CTL_START],
648 			dpu_encoder_phys_cmd_ctl_start_irq,
649 			&wait_info);
650 	if (ret == -ETIMEDOUT) {
651 		DPU_ERROR_CMDENC(cmd_enc, "ctl start interrupt wait failed\n");
652 		ret = -EINVAL;
653 	} else if (!ret)
654 		ret = 0;
655 
656 	return ret;
657 }
658 
dpu_encoder_phys_cmd_wait_for_tx_complete(struct dpu_encoder_phys * phys_enc)659 static int dpu_encoder_phys_cmd_wait_for_tx_complete(
660 		struct dpu_encoder_phys *phys_enc)
661 {
662 	int rc;
663 
664 	rc = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
665 	if (rc) {
666 		DRM_ERROR("failed wait_for_idle: id:%u ret:%d intf:%d\n",
667 			  DRMID(phys_enc->parent), rc,
668 			  phys_enc->hw_intf->idx - INTF_0);
669 	}
670 
671 	return rc;
672 }
673 
dpu_encoder_phys_cmd_wait_for_commit_done(struct dpu_encoder_phys * phys_enc)674 static int dpu_encoder_phys_cmd_wait_for_commit_done(
675 		struct dpu_encoder_phys *phys_enc)
676 {
677 	/* only required for master controller */
678 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
679 		return 0;
680 
681 	if (phys_enc->hw_ctl->ops.is_started(phys_enc->hw_ctl))
682 		return dpu_encoder_phys_cmd_wait_for_tx_complete(phys_enc);
683 
684 	return _dpu_encoder_phys_cmd_wait_for_ctl_start(phys_enc);
685 }
686 
dpu_encoder_phys_cmd_handle_post_kickoff(struct dpu_encoder_phys * phys_enc)687 static void dpu_encoder_phys_cmd_handle_post_kickoff(
688 		struct dpu_encoder_phys *phys_enc)
689 {
690 	/**
691 	 * re-enable external TE, either for the first time after enabling
692 	 * or if disabled for Autorefresh
693 	 */
694 	_dpu_encoder_phys_cmd_connect_te(phys_enc, true);
695 }
696 
dpu_encoder_phys_cmd_trigger_start(struct dpu_encoder_phys * phys_enc)697 static void dpu_encoder_phys_cmd_trigger_start(
698 		struct dpu_encoder_phys *phys_enc)
699 {
700 	dpu_encoder_helper_trigger_start(phys_enc);
701 }
702 
dpu_encoder_phys_cmd_init_ops(struct dpu_encoder_phys_ops * ops)703 static void dpu_encoder_phys_cmd_init_ops(
704 		struct dpu_encoder_phys_ops *ops)
705 {
706 	ops->is_master = dpu_encoder_phys_cmd_is_master;
707 	ops->atomic_mode_set = dpu_encoder_phys_cmd_atomic_mode_set;
708 	ops->enable = dpu_encoder_phys_cmd_enable;
709 	ops->disable = dpu_encoder_phys_cmd_disable;
710 	ops->control_vblank_irq = dpu_encoder_phys_cmd_control_vblank_irq;
711 	ops->wait_for_commit_done = dpu_encoder_phys_cmd_wait_for_commit_done;
712 	ops->prepare_for_kickoff = dpu_encoder_phys_cmd_prepare_for_kickoff;
713 	ops->wait_for_tx_complete = dpu_encoder_phys_cmd_wait_for_tx_complete;
714 	ops->trigger_start = dpu_encoder_phys_cmd_trigger_start;
715 	ops->needs_single_flush = dpu_encoder_phys_cmd_needs_single_flush;
716 	ops->irq_enable = dpu_encoder_phys_cmd_irq_enable;
717 	ops->irq_disable = dpu_encoder_phys_cmd_irq_disable;
718 	ops->restore = dpu_encoder_phys_cmd_enable_helper;
719 	ops->prepare_idle_pc = dpu_encoder_phys_cmd_prepare_idle_pc;
720 	ops->handle_post_kickoff = dpu_encoder_phys_cmd_handle_post_kickoff;
721 	ops->get_line_count = dpu_encoder_phys_cmd_get_line_count;
722 }
723 
724 /**
725  * dpu_encoder_phys_cmd_init - Construct a new command mode physical encoder
726  * @dev:  Corresponding device for devres management
727  * @p:	Pointer to init params structure
728  * Return: Error code or newly allocated encoder
729  */
dpu_encoder_phys_cmd_init(struct drm_device * dev,struct dpu_enc_phys_init_params * p)730 struct dpu_encoder_phys *dpu_encoder_phys_cmd_init(struct drm_device *dev,
731 		struct dpu_enc_phys_init_params *p)
732 {
733 	struct dpu_encoder_phys *phys_enc = NULL;
734 	struct dpu_encoder_phys_cmd *cmd_enc = NULL;
735 
736 	DPU_DEBUG("intf\n");
737 
738 	cmd_enc = drmm_kzalloc(dev, sizeof(*cmd_enc), GFP_KERNEL);
739 	if (!cmd_enc) {
740 		DPU_ERROR("failed to allocate\n");
741 		return ERR_PTR(-ENOMEM);
742 	}
743 	phys_enc = &cmd_enc->base;
744 
745 	dpu_encoder_phys_init(phys_enc, p);
746 
747 	mutex_init(&phys_enc->vblank_ctl_lock);
748 	phys_enc->vblank_refcount = 0;
749 
750 	dpu_encoder_phys_cmd_init_ops(&phys_enc->ops);
751 	phys_enc->intf_mode = INTF_MODE_CMD;
752 	cmd_enc->stream_sel = 0;
753 
754 	if (!phys_enc->hw_intf) {
755 		DPU_ERROR_CMDENC(cmd_enc, "no INTF provided\n");
756 		return ERR_PTR(-EINVAL);
757 	}
758 
759 	/* DPU before 5.0 use PINGPONG for TE handling */
760 	if (phys_enc->dpu_kms->catalog->mdss_ver->core_major_ver >= 5)
761 		phys_enc->has_intf_te = true;
762 
763 	if (phys_enc->has_intf_te && !phys_enc->hw_intf->ops.enable_tearcheck) {
764 		DPU_ERROR_CMDENC(cmd_enc, "tearcheck not supported\n");
765 		return ERR_PTR(-EINVAL);
766 	}
767 
768 	atomic_set(&cmd_enc->pending_vblank_cnt, 0);
769 	init_waitqueue_head(&cmd_enc->pending_vblank_wq);
770 
771 	DPU_DEBUG_CMDENC(cmd_enc, "created\n");
772 
773 	return phys_enc;
774 }
775