1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2025 Collabora, Ltd.
4  * Author: Shreeya Patel <shreeya.patel@collabora.com>
5  * Author: Dmitry Osipenko <dmitry.osipenko@collabora.com>
6  *
7  * Copyright (c) 2021 Rockchip Electronics Co. Ltd.
8  * Author: Dingxian Wen <shawn.wen@rock-chips.com>
9  */
10 
11 #include <linux/clk.h>
12 #include <linux/completion.h>
13 #include <linux/delay.h>
14 #include <linux/dma-mapping.h>
15 #include <linux/gpio/consumer.h>
16 #include <linux/hdmi.h>
17 #include <linux/interrupt.h>
18 #include <linux/irq.h>
19 #include <linux/mfd/syscon.h>
20 #include <linux/math64.h>
21 #include <linux/module.h>
22 #include <linux/of.h>
23 #include <linux/of_platform.h>
24 #include <linux/of_reserved_mem.h>
25 #include <linux/pinctrl/consumer.h>
26 #include <linux/platform_device.h>
27 #include <linux/property.h>
28 #include <linux/regmap.h>
29 #include <linux/reset.h>
30 #include <linux/v4l2-dv-timings.h>
31 #include <linux/workqueue.h>
32 
33 #include <media/cec.h>
34 #include <media/v4l2-common.h>
35 #include <media/v4l2-ctrls.h>
36 #include <media/v4l2-device.h>
37 #include <media/v4l2-dv-timings.h>
38 #include <media/v4l2-event.h>
39 #include <media/v4l2-fh.h>
40 #include <media/v4l2-ioctl.h>
41 #include <media/videobuf2-dma-contig.h>
42 #include <media/videobuf2-v4l2.h>
43 
44 #include "snps_hdmirx.h"
45 #include "snps_hdmirx_cec.h"
46 
47 #define EDID_NUM_BLOCKS_MAX				4
48 #define EDID_BLOCK_SIZE					128
49 #define HDMIRX_PLANE_Y					0
50 #define HDMIRX_PLANE_CBCR				1
51 #define FILTER_FRAME_CNT				6
52 
53 static int debug;
54 module_param(debug, int, 0644);
55 MODULE_PARM_DESC(debug, "debug level (0-3)");
56 
57 enum hdmirx_pix_fmt {
58 	HDMIRX_RGB888 = 0,
59 	HDMIRX_YUV422 = 1,
60 	HDMIRX_YUV444 = 2,
61 	HDMIRX_YUV420 = 3,
62 };
63 
64 enum ddr_store_fmt {
65 	STORE_RGB888 = 0,
66 	STORE_RGBA_ARGB,
67 	STORE_YUV420_8BIT,
68 	STORE_YUV420_10BIT,
69 	STORE_YUV422_8BIT,
70 	STORE_YUV422_10BIT,
71 	STORE_YUV444_8BIT,
72 	STORE_YUV420_16BIT = 8,
73 	STORE_YUV422_16BIT = 9,
74 };
75 
76 enum hdmirx_reg_attr {
77 	HDMIRX_ATTR_RW = 0,
78 	HDMIRX_ATTR_RO = 1,
79 	HDMIRX_ATTR_WO = 2,
80 	HDMIRX_ATTR_RE = 3,
81 };
82 
83 enum {
84 	HDMIRX_RST_A,
85 	HDMIRX_RST_P,
86 	HDMIRX_RST_REF,
87 	HDMIRX_RST_BIU,
88 	HDMIRX_NUM_RST,
89 };
90 
91 static const char *const pix_fmt_str[] = {
92 	"RGB888",
93 	"YUV422",
94 	"YUV444",
95 	"YUV420",
96 };
97 
98 struct hdmirx_buffer {
99 	struct vb2_v4l2_buffer vb;
100 	struct list_head queue;
101 	u32 buff_addr[VIDEO_MAX_PLANES];
102 };
103 
104 struct hdmirx_stream {
105 	struct snps_hdmirx_dev *hdmirx_dev;
106 	struct video_device vdev;
107 	struct vb2_queue buf_queue;
108 	struct list_head buf_head;
109 	struct hdmirx_buffer *curr_buf;
110 	struct hdmirx_buffer *next_buf;
111 	struct v4l2_pix_format_mplane pixm;
112 	const struct v4l2_format_info *out_finfo;
113 	struct mutex vlock; /* to lock resources associated with video buffer and video device */
114 	spinlock_t vbq_lock; /* to lock video buffer queue */
115 	bool stopping;
116 	wait_queue_head_t wq_stopped;
117 	u32 frame_idx;
118 	u32 line_flag_int_cnt;
119 	u32 irq_stat;
120 };
121 
122 struct snps_hdmirx_dev {
123 	struct device *dev;
124 	struct hdmirx_stream stream;
125 	struct v4l2_device v4l2_dev;
126 	struct v4l2_ctrl_handler hdl;
127 	struct v4l2_ctrl *detect_tx_5v_ctrl;
128 	struct v4l2_ctrl *rgb_range;
129 	struct v4l2_ctrl *content_type;
130 	struct v4l2_dv_timings timings;
131 	struct gpio_desc *detect_5v_gpio;
132 	struct delayed_work delayed_work_hotplug;
133 	struct delayed_work delayed_work_res_change;
134 	struct hdmirx_cec *cec;
135 	struct mutex stream_lock; /* to lock video stream capture */
136 	struct mutex work_lock; /* to lock the critical section of hotplug event */
137 	struct reset_control_bulk_data resets[HDMIRX_NUM_RST];
138 	struct clk_bulk_data *clks;
139 	struct regmap *grf;
140 	struct regmap *vo1_grf;
141 	struct completion cr_write_done;
142 	struct completion timer_base_lock;
143 	struct completion avi_pkt_rcv;
144 	struct dentry *debugfs_dir;
145 	struct v4l2_debugfs_if *infoframes;
146 	enum hdmirx_pix_fmt pix_fmt;
147 	void __iomem *regs;
148 	int hdmi_irq;
149 	int dma_irq;
150 	int det_irq;
151 	bool hpd_trigger_level_high;
152 	bool tmds_clk_ratio;
153 	bool plugged;
154 	int num_clks;
155 	u32 edid_blocks_written;
156 	u32 cur_fmt_fourcc;
157 	u32 color_depth;
158 	spinlock_t rst_lock; /* to lock register access */
159 	u8 edid[EDID_NUM_BLOCKS_MAX * EDID_BLOCK_SIZE];
160 };
161 
162 static const struct v4l2_dv_timings cea640x480 = V4L2_DV_BT_CEA_640X480P59_94;
163 
164 static const struct v4l2_dv_timings_cap hdmirx_timings_cap = {
165 	.type = V4L2_DV_BT_656_1120,
166 	.reserved = { 0 },
167 	V4L2_INIT_BT_TIMINGS(640, 4096,			/* min/max width */
168 			     480, 2160,			/* min/max height */
169 			     20000000, 600000000,	/* min/max pixelclock */
170 			     /* standards */
171 			     V4L2_DV_BT_STD_CEA861,
172 			     /* capabilities */
173 			     V4L2_DV_BT_CAP_PROGRESSIVE |
174 			     V4L2_DV_BT_CAP_INTERLACED)
175 };
176 
hdmirx_writel(struct snps_hdmirx_dev * hdmirx_dev,int reg,u32 val)177 static void hdmirx_writel(struct snps_hdmirx_dev *hdmirx_dev, int reg, u32 val)
178 {
179 	guard(spinlock_irqsave)(&hdmirx_dev->rst_lock);
180 
181 	writel(val, hdmirx_dev->regs + reg);
182 }
183 
hdmirx_readl(struct snps_hdmirx_dev * hdmirx_dev,int reg)184 static u32 hdmirx_readl(struct snps_hdmirx_dev *hdmirx_dev, int reg)
185 {
186 	guard(spinlock_irqsave)(&hdmirx_dev->rst_lock);
187 
188 	return readl(hdmirx_dev->regs + reg);
189 }
190 
hdmirx_reset_dma(struct snps_hdmirx_dev * hdmirx_dev)191 static void hdmirx_reset_dma(struct snps_hdmirx_dev *hdmirx_dev)
192 {
193 	guard(spinlock_irqsave)(&hdmirx_dev->rst_lock);
194 
195 	reset_control_reset(hdmirx_dev->resets[0].rstc);
196 }
197 
hdmirx_update_bits(struct snps_hdmirx_dev * hdmirx_dev,int reg,u32 mask,u32 data)198 static void hdmirx_update_bits(struct snps_hdmirx_dev *hdmirx_dev, int reg,
199 			       u32 mask, u32 data)
200 {
201 	u32 val;
202 
203 	guard(spinlock_irqsave)(&hdmirx_dev->rst_lock);
204 
205 	val = readl(hdmirx_dev->regs + reg) & ~mask;
206 	val |= (data & mask);
207 	writel(val, hdmirx_dev->regs + reg);
208 }
209 
hdmirx_subscribe_event(struct v4l2_fh * fh,const struct v4l2_event_subscription * sub)210 static int hdmirx_subscribe_event(struct v4l2_fh *fh,
211 				  const struct v4l2_event_subscription *sub)
212 {
213 	switch (sub->type) {
214 	case V4L2_EVENT_SOURCE_CHANGE:
215 		return v4l2_src_change_event_subscribe(fh, sub);
216 	case V4L2_EVENT_CTRL:
217 		return v4l2_ctrl_subscribe_event(fh, sub);
218 	default:
219 		break;
220 	}
221 
222 	return -EINVAL;
223 }
224 
tx_5v_power_present(struct snps_hdmirx_dev * hdmirx_dev)225 static bool tx_5v_power_present(struct snps_hdmirx_dev *hdmirx_dev)
226 {
227 	const unsigned int detection_threshold = 7;
228 	int val, i, cnt = 0;
229 	bool ret;
230 
231 	for (i = 0; i < 10; i++) {
232 		usleep_range(1000, 1100);
233 		val = gpiod_get_value(hdmirx_dev->detect_5v_gpio);
234 		if (val > 0)
235 			cnt++;
236 		if (cnt >= detection_threshold)
237 			break;
238 	}
239 
240 	ret = (cnt >= detection_threshold) ? true : false;
241 	v4l2_dbg(3, debug, &hdmirx_dev->v4l2_dev, "%s: %d\n", __func__, ret);
242 
243 	return ret;
244 }
245 
signal_not_lock(struct snps_hdmirx_dev * hdmirx_dev)246 static bool signal_not_lock(struct snps_hdmirx_dev *hdmirx_dev)
247 {
248 	u32 mu_status, dma_st10, cmu_st;
249 
250 	mu_status = hdmirx_readl(hdmirx_dev, MAINUNIT_STATUS);
251 	dma_st10 = hdmirx_readl(hdmirx_dev, DMA_STATUS10);
252 	cmu_st = hdmirx_readl(hdmirx_dev, CMU_STATUS);
253 
254 	if ((mu_status & TMDSVALID_STABLE_ST) &&
255 	    (dma_st10 & HDMIRX_LOCK) &&
256 	    (cmu_st & TMDSQPCLK_LOCKED_ST))
257 		return false;
258 
259 	return true;
260 }
261 
hdmirx_get_timings(struct snps_hdmirx_dev * hdmirx_dev,struct v4l2_bt_timings * bt)262 static void hdmirx_get_timings(struct snps_hdmirx_dev *hdmirx_dev,
263 			       struct v4l2_bt_timings *bt)
264 {
265 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
266 	u32 hact, vact, htotal, vtotal, fps;
267 	u32 hfp, hs, hbp, vfp, vs, vbp;
268 	u32 val;
269 
270 	val = hdmirx_readl(hdmirx_dev, DMA_STATUS2);
271 	hact = (val >> 16) & 0xffff;
272 	vact = val & 0xffff;
273 
274 	val = hdmirx_readl(hdmirx_dev, DMA_STATUS3);
275 	htotal = (val >> 16) & 0xffff;
276 	vtotal = val & 0xffff;
277 
278 	val = hdmirx_readl(hdmirx_dev, DMA_STATUS4);
279 	hs = (val >> 16) & 0xffff;
280 	vs = val & 0xffff;
281 
282 	val = hdmirx_readl(hdmirx_dev, DMA_STATUS5);
283 	hbp = (val >> 16) & 0xffff;
284 	vbp = val & 0xffff;
285 
286 	if (hdmirx_dev->pix_fmt == HDMIRX_YUV420) {
287 		htotal *= 2;
288 		hbp *= 2;
289 		hs *= 2;
290 	}
291 
292 	hfp = htotal - hact - hs - hbp;
293 	vfp = vtotal - vact - vs - vbp;
294 
295 	fps = div_u64(bt->pixelclock + (htotal * vtotal) / 2, htotal * vtotal);
296 	bt->width = hact;
297 	bt->height = vact;
298 	bt->hfrontporch = hfp;
299 	bt->hsync = hs;
300 	bt->hbackporch = hbp;
301 	bt->vfrontporch = vfp;
302 	bt->vsync = vs;
303 	bt->vbackporch = vbp;
304 
305 	v4l2_dbg(1, debug, v4l2_dev, "get timings from dma\n");
306 	v4l2_dbg(1, debug, v4l2_dev,
307 		 "act:%ux%u%s, total:%ux%u, fps:%u, pixclk:%llu\n",
308 		 bt->width, bt->height, bt->interlaced ? "i" : "p",
309 		 htotal, vtotal, fps, bt->pixelclock);
310 
311 	v4l2_dbg(2, debug, v4l2_dev,
312 		 "hfp:%u, hact:%u, hs:%u, hbp:%u, vfp:%u, vact:%u, vs:%u, vbp:%u\n",
313 		 bt->hfrontporch, hact, bt->hsync, bt->hbackporch,
314 		 bt->vfrontporch, vact, bt->vsync, bt->vbackporch);
315 
316 	if (bt->interlaced == V4L2_DV_INTERLACED) {
317 		bt->height *= 2;
318 		bt->il_vfrontporch = bt->vfrontporch;
319 		bt->il_vsync = bt->vsync + 1;
320 		bt->il_vbackporch = bt->vbackporch;
321 	}
322 }
323 
hdmirx_check_timing_valid(struct v4l2_bt_timings * bt)324 static bool hdmirx_check_timing_valid(struct v4l2_bt_timings *bt)
325 {
326 	/*
327 	 * Sanity-check timing values. Some of the values will be outside
328 	 * of a valid range till hardware becomes ready to perform capture.
329 	 */
330 	if (bt->width < 100 || bt->width > 5000 ||
331 	    bt->height < 100 || bt->height > 5000)
332 		return false;
333 
334 	if (!bt->hsync || bt->hsync > 200 ||
335 	    !bt->vsync || bt->vsync > 100)
336 		return false;
337 
338 	/*
339 	 * According to the CEA-861, 1280x720p25 Hblank timing is up to 2680,
340 	 * and all standard video format timings are less than 3000.
341 	 */
342 	if (!bt->hbackporch || bt->hbackporch > 3000 ||
343 	    !bt->vbackporch || bt->vbackporch > 3000)
344 		return false;
345 
346 	if (!bt->hfrontporch || bt->hfrontporch > 3000 ||
347 	    !bt->vfrontporch || bt->vfrontporch > 3000)
348 		return false;
349 
350 	return true;
351 }
352 
hdmirx_toggle_polarity(struct snps_hdmirx_dev * hdmirx_dev)353 static void hdmirx_toggle_polarity(struct snps_hdmirx_dev *hdmirx_dev)
354 {
355 	u32 val = hdmirx_readl(hdmirx_dev, DMA_CONFIG6);
356 
357 	if (!(val & (VSYNC_TOGGLE_EN | HSYNC_TOGGLE_EN))) {
358 		hdmirx_update_bits(hdmirx_dev, DMA_CONFIG6,
359 				   VSYNC_TOGGLE_EN | HSYNC_TOGGLE_EN,
360 				   VSYNC_TOGGLE_EN | HSYNC_TOGGLE_EN);
361 		hdmirx_update_bits(hdmirx_dev, VIDEO_CONFIG2,
362 				   VPROC_VSYNC_POL_OVR_VALUE |
363 				   VPROC_VSYNC_POL_OVR_EN |
364 				   VPROC_HSYNC_POL_OVR_VALUE |
365 				   VPROC_HSYNC_POL_OVR_EN,
366 				   VPROC_VSYNC_POL_OVR_EN |
367 				   VPROC_HSYNC_POL_OVR_EN);
368 		return;
369 	}
370 
371 	hdmirx_update_bits(hdmirx_dev, DMA_CONFIG6,
372 			   VSYNC_TOGGLE_EN | HSYNC_TOGGLE_EN, 0);
373 
374 	hdmirx_update_bits(hdmirx_dev, VIDEO_CONFIG2,
375 			   VPROC_VSYNC_POL_OVR_VALUE |
376 			   VPROC_VSYNC_POL_OVR_EN |
377 			   VPROC_HSYNC_POL_OVR_VALUE |
378 			   VPROC_HSYNC_POL_OVR_EN, 0);
379 }
380 
381 /*
382  * When querying DV timings during preview, if the DMA's timing is stable,
383  * we retrieve the timings directly from the DMA. However, if the current
384  * resolution is negative, obtaining the timing from CTRL may require a
385  * change in the sync polarity, potentially leading to DMA errors.
386  */
hdmirx_get_detected_timings(struct snps_hdmirx_dev * hdmirx_dev,struct v4l2_dv_timings * timings)387 static int hdmirx_get_detected_timings(struct snps_hdmirx_dev *hdmirx_dev,
388 				       struct v4l2_dv_timings *timings)
389 {
390 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
391 	struct v4l2_bt_timings *bt = &timings->bt;
392 	u32 val, tmdsqpclk_freq, pix_clk;
393 	unsigned int num_retries = 0;
394 	u32 field_type, deframer_st;
395 	u64 tmp_data, tmds_clk;
396 	bool is_dvi_mode;
397 	int ret;
398 
399 	mutex_lock(&hdmirx_dev->work_lock);
400 retry:
401 	memset(timings, 0, sizeof(struct v4l2_dv_timings));
402 	timings->type = V4L2_DV_BT_656_1120;
403 
404 	val = hdmirx_readl(hdmirx_dev, DMA_STATUS11);
405 	field_type = (val & HDMIRX_TYPE_MASK) >> 7;
406 
407 	if (field_type & BIT(0))
408 		bt->interlaced = V4L2_DV_INTERLACED;
409 	else
410 		bt->interlaced = V4L2_DV_PROGRESSIVE;
411 
412 	deframer_st = hdmirx_readl(hdmirx_dev, DEFRAMER_STATUS);
413 	is_dvi_mode = !(deframer_st & OPMODE_STS_MASK);
414 
415 	tmdsqpclk_freq = hdmirx_readl(hdmirx_dev, CMU_TMDSQPCLK_FREQ);
416 	tmds_clk = tmdsqpclk_freq * 4 * 1000;
417 	tmp_data = tmds_clk * 24;
418 	do_div(tmp_data, hdmirx_dev->color_depth);
419 	pix_clk = tmp_data;
420 	bt->pixelclock = pix_clk;
421 
422 	if (hdmirx_dev->pix_fmt == HDMIRX_YUV420)
423 		bt->pixelclock *= 2;
424 
425 	hdmirx_get_timings(hdmirx_dev, bt);
426 
427 	v4l2_dbg(2, debug, v4l2_dev, "tmds_clk:%llu, pix_clk:%d\n", tmds_clk, pix_clk);
428 	v4l2_dbg(1, debug, v4l2_dev, "interlace:%d, fmt:%d, color:%d, mode:%s\n",
429 		 bt->interlaced, hdmirx_dev->pix_fmt,
430 		 hdmirx_dev->color_depth,
431 		 is_dvi_mode ? "dvi" : "hdmi");
432 	v4l2_dbg(2, debug, v4l2_dev, "deframer_st:%#x\n", deframer_st);
433 
434 	/*
435 	 * Timing will be invalid until it's latched by HW or if signal's
436 	 * polarity doesn't match.
437 	 */
438 	if (!hdmirx_check_timing_valid(bt)) {
439 		if (num_retries++ < 20) {
440 			if (num_retries == 10)
441 				hdmirx_toggle_polarity(hdmirx_dev);
442 
443 			usleep_range(10 * 1000, 10 * 1100);
444 			goto retry;
445 		}
446 
447 		ret = -ERANGE;
448 	} else {
449 		ret = 0;
450 	}
451 
452 	mutex_unlock(&hdmirx_dev->work_lock);
453 
454 	return ret;
455 }
456 
port_no_link(struct snps_hdmirx_dev * hdmirx_dev)457 static bool port_no_link(struct snps_hdmirx_dev *hdmirx_dev)
458 {
459 	return !tx_5v_power_present(hdmirx_dev);
460 }
461 
hdmirx_query_dv_timings(struct file * file,void * _fh,struct v4l2_dv_timings * timings)462 static int hdmirx_query_dv_timings(struct file *file, void *_fh,
463 				   struct v4l2_dv_timings *timings)
464 {
465 	struct hdmirx_stream *stream = video_drvdata(file);
466 	struct snps_hdmirx_dev *hdmirx_dev = stream->hdmirx_dev;
467 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
468 	int ret;
469 
470 	if (port_no_link(hdmirx_dev)) {
471 		v4l2_err(v4l2_dev, "%s: port has no link\n", __func__);
472 		return -ENOLINK;
473 	}
474 
475 	if (signal_not_lock(hdmirx_dev)) {
476 		v4l2_err(v4l2_dev, "%s: signal is not locked\n", __func__);
477 		return -ENOLCK;
478 	}
479 
480 	ret = hdmirx_get_detected_timings(hdmirx_dev, timings);
481 	if (ret)
482 		return ret;
483 
484 	if (debug)
485 		v4l2_print_dv_timings(hdmirx_dev->v4l2_dev.name,
486 				      "query_dv_timings: ", timings, false);
487 
488 	if (!v4l2_valid_dv_timings(timings, &hdmirx_timings_cap, NULL, NULL)) {
489 		v4l2_dbg(1, debug, v4l2_dev, "%s: timings out of range\n", __func__);
490 		return -ERANGE;
491 	}
492 
493 	return 0;
494 }
495 
hdmirx_hpd_ctrl(struct snps_hdmirx_dev * hdmirx_dev,bool en)496 static void hdmirx_hpd_ctrl(struct snps_hdmirx_dev *hdmirx_dev, bool en)
497 {
498 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
499 
500 	v4l2_dbg(1, debug, v4l2_dev, "%s: %sable, hpd_trigger_level_high:%d\n",
501 		 __func__, en ? "en" : "dis", hdmirx_dev->hpd_trigger_level_high);
502 
503 	hdmirx_update_bits(hdmirx_dev, SCDC_CONFIG, HPDLOW, en ? 0 : HPDLOW);
504 	hdmirx_writel(hdmirx_dev, CORE_CONFIG,
505 		      hdmirx_dev->hpd_trigger_level_high ? en : !en);
506 
507 	/* 100ms delay as per HDMI spec */
508 	if (!en)
509 		msleep(100);
510 }
511 
hdmirx_write_edid_data(struct snps_hdmirx_dev * hdmirx_dev,u8 * edid,unsigned int num_blocks)512 static void hdmirx_write_edid_data(struct snps_hdmirx_dev *hdmirx_dev,
513 				   u8 *edid, unsigned int num_blocks)
514 {
515 	static u8 data[EDID_NUM_BLOCKS_MAX * EDID_BLOCK_SIZE];
516 	unsigned int edid_len = num_blocks * EDID_BLOCK_SIZE;
517 	unsigned int i;
518 
519 	cec_s_phys_addr_from_edid(hdmirx_dev->cec->adap,
520 				  (const struct edid *)edid);
521 
522 	hdmirx_update_bits(hdmirx_dev, DMA_CONFIG11,
523 			   EDID_READ_EN_MASK |
524 			   EDID_WRITE_EN_MASK |
525 			   EDID_SLAVE_ADDR_MASK,
526 			   EDID_READ_EN(0) |
527 			   EDID_WRITE_EN(1) |
528 			   EDID_SLAVE_ADDR(0x50));
529 	for (i = 0; i < edid_len; i++)
530 		hdmirx_writel(hdmirx_dev, DMA_CONFIG10, edid[i]);
531 
532 	/* read out for debug */
533 	if (debug >= 2) {
534 		hdmirx_update_bits(hdmirx_dev, DMA_CONFIG11,
535 				   EDID_READ_EN_MASK |
536 				   EDID_WRITE_EN_MASK,
537 				   EDID_READ_EN(1) |
538 				   EDID_WRITE_EN(0));
539 
540 		for (i = 0; i < edid_len; i++)
541 			data[i] = hdmirx_readl(hdmirx_dev, DMA_STATUS14);
542 
543 		print_hex_dump(KERN_INFO, "", DUMP_PREFIX_NONE, 16, 1, data,
544 			       edid_len, false);
545 	}
546 
547 	/*
548 	 * Must set EDID_READ_EN & EDID_WRITE_EN bit to 0,
549 	 * when the read/write edid operation is completed. Otherwise, it
550 	 * will affect the reading and writing of other registers.
551 	 */
552 	hdmirx_update_bits(hdmirx_dev, DMA_CONFIG11,
553 			   EDID_READ_EN_MASK | EDID_WRITE_EN_MASK,
554 			   EDID_READ_EN(0) | EDID_WRITE_EN(0));
555 }
556 
hdmirx_write_edid(struct snps_hdmirx_dev * hdmirx_dev,struct v4l2_edid * edid)557 static void hdmirx_write_edid(struct snps_hdmirx_dev *hdmirx_dev,
558 			      struct v4l2_edid *edid)
559 {
560 	memset(edid->reserved, 0, sizeof(edid->reserved));
561 	memset(hdmirx_dev->edid, 0, sizeof(hdmirx_dev->edid));
562 
563 	hdmirx_write_edid_data(hdmirx_dev, edid->edid, edid->blocks);
564 
565 	hdmirx_dev->edid_blocks_written = edid->blocks;
566 	memcpy(hdmirx_dev->edid, edid->edid, edid->blocks * EDID_BLOCK_SIZE);
567 }
568 
569 /*
570  * Before clearing interrupt, we need to read the interrupt status.
571  */
hdmirx_clear_interrupt(struct snps_hdmirx_dev * hdmirx_dev,u32 reg,u32 val)572 static inline void hdmirx_clear_interrupt(struct snps_hdmirx_dev *hdmirx_dev,
573 					  u32 reg, u32 val)
574 {
575 	/* (interrupt status register) = (interrupt clear register) - 0x8 */
576 	hdmirx_readl(hdmirx_dev, reg - 0x8);
577 	hdmirx_writel(hdmirx_dev, reg, val);
578 }
579 
hdmirx_interrupts_setup(struct snps_hdmirx_dev * hdmirx_dev,bool en)580 static void hdmirx_interrupts_setup(struct snps_hdmirx_dev *hdmirx_dev, bool en)
581 {
582 	v4l2_dbg(1, debug, &hdmirx_dev->v4l2_dev, "%s: %sable\n",
583 		 __func__, en ? "en" : "dis");
584 
585 	disable_irq(hdmirx_dev->hdmi_irq);
586 
587 	/* Note: In DVI mode, it needs to be written twice to take effect. */
588 	hdmirx_clear_interrupt(hdmirx_dev, MAINUNIT_0_INT_CLEAR, 0xffffffff);
589 	hdmirx_clear_interrupt(hdmirx_dev, MAINUNIT_2_INT_CLEAR, 0xffffffff);
590 	hdmirx_clear_interrupt(hdmirx_dev, MAINUNIT_0_INT_CLEAR, 0xffffffff);
591 	hdmirx_clear_interrupt(hdmirx_dev, MAINUNIT_2_INT_CLEAR, 0xffffffff);
592 	hdmirx_clear_interrupt(hdmirx_dev, AVPUNIT_0_INT_CLEAR, 0xffffffff);
593 
594 	if (en) {
595 		hdmirx_update_bits(hdmirx_dev, MAINUNIT_0_INT_MASK_N,
596 				   TMDSQPCLK_OFF_CHG | TMDSQPCLK_LOCKED_CHG,
597 				   TMDSQPCLK_OFF_CHG | TMDSQPCLK_LOCKED_CHG);
598 		hdmirx_update_bits(hdmirx_dev, MAINUNIT_2_INT_MASK_N,
599 				   TMDSVALID_STABLE_CHG, TMDSVALID_STABLE_CHG);
600 		hdmirx_update_bits(hdmirx_dev, AVPUNIT_0_INT_MASK_N,
601 				   CED_DYN_CNT_CH2_IRQ |
602 				   CED_DYN_CNT_CH1_IRQ |
603 				   CED_DYN_CNT_CH0_IRQ,
604 				   CED_DYN_CNT_CH2_IRQ |
605 				   CED_DYN_CNT_CH1_IRQ |
606 				   CED_DYN_CNT_CH0_IRQ);
607 	} else {
608 		hdmirx_writel(hdmirx_dev, MAINUNIT_0_INT_MASK_N, 0);
609 		hdmirx_writel(hdmirx_dev, MAINUNIT_2_INT_MASK_N, 0);
610 		hdmirx_writel(hdmirx_dev, AVPUNIT_0_INT_MASK_N, 0);
611 	}
612 
613 	enable_irq(hdmirx_dev->hdmi_irq);
614 }
615 
hdmirx_plugout(struct snps_hdmirx_dev * hdmirx_dev)616 static void hdmirx_plugout(struct snps_hdmirx_dev *hdmirx_dev)
617 {
618 	if (!hdmirx_dev->plugged)
619 		return;
620 
621 	hdmirx_update_bits(hdmirx_dev, SCDC_CONFIG, POWERPROVIDED, 0);
622 	hdmirx_interrupts_setup(hdmirx_dev, false);
623 	hdmirx_update_bits(hdmirx_dev, DMA_CONFIG6, HDMIRX_DMA_EN, 0);
624 	hdmirx_update_bits(hdmirx_dev, DMA_CONFIG4,
625 			   LINE_FLAG_INT_EN |
626 			   HDMIRX_DMA_IDLE_INT |
627 			   HDMIRX_LOCK_DISABLE_INT |
628 			   LAST_FRAME_AXI_UNFINISH_INT_EN |
629 			   FIFO_OVERFLOW_INT_EN |
630 			   FIFO_UNDERFLOW_INT_EN |
631 			   HDMIRX_AXI_ERROR_INT_EN, 0);
632 	hdmirx_reset_dma(hdmirx_dev);
633 	hdmirx_update_bits(hdmirx_dev, PHY_CONFIG, HDMI_DISABLE | PHY_RESET |
634 			   PHY_PDDQ, HDMI_DISABLE);
635 	hdmirx_writel(hdmirx_dev, PHYCREG_CONFIG0, 0x0);
636 	cancel_delayed_work(&hdmirx_dev->delayed_work_res_change);
637 
638 	/* will be NULL on driver removal */
639 	if (hdmirx_dev->rgb_range)
640 		v4l2_ctrl_s_ctrl(hdmirx_dev->rgb_range, V4L2_DV_RGB_RANGE_AUTO);
641 
642 	if (hdmirx_dev->content_type)
643 		v4l2_ctrl_s_ctrl(hdmirx_dev->content_type,
644 				 V4L2_DV_IT_CONTENT_TYPE_NO_ITC);
645 
646 	hdmirx_dev->plugged = false;
647 }
648 
hdmirx_set_edid(struct file * file,void * fh,struct v4l2_edid * edid)649 static int hdmirx_set_edid(struct file *file, void *fh, struct v4l2_edid *edid)
650 {
651 	struct hdmirx_stream *stream = video_drvdata(file);
652 	struct snps_hdmirx_dev *hdmirx_dev = stream->hdmirx_dev;
653 	u16 phys_addr;
654 	int err;
655 
656 	if (edid->pad)
657 		return -EINVAL;
658 
659 	if (edid->start_block)
660 		return -EINVAL;
661 
662 	if (edid->blocks > EDID_NUM_BLOCKS_MAX) {
663 		edid->blocks = EDID_NUM_BLOCKS_MAX;
664 		return -E2BIG;
665 	}
666 
667 	if (edid->blocks) {
668 		phys_addr = cec_get_edid_phys_addr(edid->edid,
669 						   edid->blocks * EDID_BLOCK_SIZE,
670 						   NULL);
671 
672 		err = v4l2_phys_addr_validate(phys_addr, &phys_addr, NULL);
673 		if (err)
674 			return err;
675 	}
676 
677 	/*
678 	 * Touching HW registers in parallel with plugin/out handlers
679 	 * will bring hardware into a bad state.
680 	 */
681 	mutex_lock(&hdmirx_dev->work_lock);
682 
683 	hdmirx_hpd_ctrl(hdmirx_dev, false);
684 
685 	if (edid->blocks) {
686 		hdmirx_write_edid(hdmirx_dev, edid);
687 		hdmirx_hpd_ctrl(hdmirx_dev, true);
688 	} else {
689 		cec_phys_addr_invalidate(hdmirx_dev->cec->adap);
690 		hdmirx_dev->edid_blocks_written = 0;
691 	}
692 
693 	mutex_unlock(&hdmirx_dev->work_lock);
694 
695 	return 0;
696 }
697 
hdmirx_get_edid(struct file * file,void * fh,struct v4l2_edid * edid)698 static int hdmirx_get_edid(struct file *file, void *fh, struct v4l2_edid *edid)
699 {
700 	struct hdmirx_stream *stream = video_drvdata(file);
701 	struct snps_hdmirx_dev *hdmirx_dev = stream->hdmirx_dev;
702 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
703 
704 	memset(edid->reserved, 0, sizeof(edid->reserved));
705 
706 	if (edid->pad)
707 		return -EINVAL;
708 
709 	if (!edid->start_block && !edid->blocks) {
710 		edid->blocks = hdmirx_dev->edid_blocks_written;
711 		return 0;
712 	}
713 
714 	if (!hdmirx_dev->edid_blocks_written)
715 		return -ENODATA;
716 
717 	if (edid->start_block >= hdmirx_dev->edid_blocks_written || !edid->blocks)
718 		return -EINVAL;
719 
720 	if (edid->start_block + edid->blocks > hdmirx_dev->edid_blocks_written)
721 		edid->blocks = hdmirx_dev->edid_blocks_written - edid->start_block;
722 
723 	memcpy(edid->edid, hdmirx_dev->edid, edid->blocks * EDID_BLOCK_SIZE);
724 
725 	v4l2_dbg(1, debug, v4l2_dev, "%s: read EDID:\n", __func__);
726 	if (debug > 0)
727 		print_hex_dump(KERN_INFO, "", DUMP_PREFIX_NONE, 16, 1,
728 			       edid->edid, edid->blocks * EDID_BLOCK_SIZE, false);
729 
730 	return 0;
731 }
732 
hdmirx_g_parm(struct file * file,void * priv,struct v4l2_streamparm * parm)733 static int hdmirx_g_parm(struct file *file, void *priv,
734 			 struct v4l2_streamparm *parm)
735 {
736 	struct hdmirx_stream *stream = video_drvdata(file);
737 	struct snps_hdmirx_dev *hdmirx_dev = stream->hdmirx_dev;
738 
739 	if (parm->type != V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
740 		return -EINVAL;
741 
742 	parm->parm.capture.timeperframe = v4l2_calc_timeperframe(&hdmirx_dev->timings);
743 
744 	return 0;
745 }
746 
hdmirx_dv_timings_cap(struct file * file,void * fh,struct v4l2_dv_timings_cap * cap)747 static int hdmirx_dv_timings_cap(struct file *file, void *fh,
748 				 struct v4l2_dv_timings_cap *cap)
749 {
750 	*cap = hdmirx_timings_cap;
751 	return 0;
752 }
753 
hdmirx_enum_dv_timings(struct file * file,void * _fh,struct v4l2_enum_dv_timings * timings)754 static int hdmirx_enum_dv_timings(struct file *file, void *_fh,
755 				  struct v4l2_enum_dv_timings *timings)
756 {
757 	return v4l2_enum_dv_timings_cap(timings, &hdmirx_timings_cap, NULL, NULL);
758 }
759 
hdmirx_scdc_init(struct snps_hdmirx_dev * hdmirx_dev)760 static void hdmirx_scdc_init(struct snps_hdmirx_dev *hdmirx_dev)
761 {
762 	hdmirx_update_bits(hdmirx_dev, I2C_SLAVE_CONFIG1,
763 			   I2C_SDA_OUT_HOLD_VALUE_QST_MASK |
764 			   I2C_SDA_IN_HOLD_VALUE_QST_MASK,
765 			   I2C_SDA_OUT_HOLD_VALUE_QST(0x80) |
766 			   I2C_SDA_IN_HOLD_VALUE_QST(0x15));
767 	hdmirx_update_bits(hdmirx_dev, SCDC_REGBANK_CONFIG0,
768 			   SCDC_SINKVERSION_QST_MASK,
769 			   SCDC_SINKVERSION_QST(1));
770 }
771 
wait_reg_bit_status(struct snps_hdmirx_dev * hdmirx_dev,u32 reg,u32 bit_mask,u32 expect_val,bool is_grf,u32 ms)772 static int wait_reg_bit_status(struct snps_hdmirx_dev *hdmirx_dev, u32 reg,
773 			       u32 bit_mask, u32 expect_val, bool is_grf,
774 			       u32 ms)
775 {
776 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
777 	u32 i, val;
778 
779 	for (i = 0; i < ms; i++) {
780 		if (is_grf)
781 			regmap_read(hdmirx_dev->grf, reg, &val);
782 		else
783 			val = hdmirx_readl(hdmirx_dev, reg);
784 
785 		if ((val & bit_mask) == expect_val) {
786 			v4l2_dbg(2, debug, v4l2_dev,
787 				 "%s:  i:%d, time: %dms\n", __func__, i, ms);
788 			break;
789 		}
790 		usleep_range(1000, 1010);
791 	}
792 
793 	if (i == ms)
794 		return -1;
795 
796 	return 0;
797 }
798 
hdmirx_phy_register_write(struct snps_hdmirx_dev * hdmirx_dev,u32 phy_reg,u32 val)799 static int hdmirx_phy_register_write(struct snps_hdmirx_dev *hdmirx_dev,
800 				     u32 phy_reg, u32 val)
801 {
802 	struct device *dev = hdmirx_dev->dev;
803 
804 	reinit_completion(&hdmirx_dev->cr_write_done);
805 	/* clear irq status */
806 	hdmirx_clear_interrupt(hdmirx_dev, MAINUNIT_2_INT_CLEAR, 0xffffffff);
807 	/* en irq */
808 	hdmirx_update_bits(hdmirx_dev, MAINUNIT_2_INT_MASK_N,
809 			   PHYCREG_CR_WRITE_DONE, PHYCREG_CR_WRITE_DONE);
810 	/* write phy reg addr */
811 	hdmirx_writel(hdmirx_dev, PHYCREG_CONFIG1, phy_reg);
812 	/* write phy reg val */
813 	hdmirx_writel(hdmirx_dev, PHYCREG_CONFIG2, val);
814 	/* config write enable */
815 	hdmirx_writel(hdmirx_dev, PHYCREG_CONTROL, PHYCREG_CR_PARA_WRITE_P);
816 
817 	if (!wait_for_completion_timeout(&hdmirx_dev->cr_write_done,
818 					 msecs_to_jiffies(20))) {
819 		dev_err(dev, "%s wait cr write done failed\n", __func__);
820 		return -1;
821 	}
822 
823 	return 0;
824 }
825 
hdmirx_tmds_clk_ratio_config(struct snps_hdmirx_dev * hdmirx_dev)826 static void hdmirx_tmds_clk_ratio_config(struct snps_hdmirx_dev *hdmirx_dev)
827 {
828 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
829 	u32 val;
830 
831 	val = hdmirx_readl(hdmirx_dev, SCDC_REGBANK_STATUS1);
832 	v4l2_dbg(3, debug, v4l2_dev, "%s: scdc_regbank_st:%#x\n", __func__, val);
833 	hdmirx_dev->tmds_clk_ratio = (val & SCDC_TMDSBITCLKRATIO) > 0;
834 
835 	if (hdmirx_dev->tmds_clk_ratio) {
836 		v4l2_dbg(3, debug, v4l2_dev, "%s: HDMITX greater than 3.4Gbps\n", __func__);
837 		hdmirx_update_bits(hdmirx_dev, PHY_CONFIG,
838 				   TMDS_CLOCK_RATIO, TMDS_CLOCK_RATIO);
839 	} else {
840 		v4l2_dbg(3, debug, v4l2_dev, "%s: HDMITX less than 3.4Gbps\n", __func__);
841 		hdmirx_update_bits(hdmirx_dev, PHY_CONFIG,
842 				   TMDS_CLOCK_RATIO, 0);
843 	}
844 }
845 
hdmirx_phy_config(struct snps_hdmirx_dev * hdmirx_dev)846 static void hdmirx_phy_config(struct snps_hdmirx_dev *hdmirx_dev)
847 {
848 	struct device *dev = hdmirx_dev->dev;
849 
850 	hdmirx_clear_interrupt(hdmirx_dev, SCDC_INT_CLEAR, 0xffffffff);
851 	hdmirx_update_bits(hdmirx_dev, SCDC_INT_MASK_N, SCDCTMDSCCFG_CHG,
852 			   SCDCTMDSCCFG_CHG);
853 	/* cr_para_clk 24M */
854 	hdmirx_update_bits(hdmirx_dev, PHY_CONFIG, REFFREQ_SEL_MASK, REFFREQ_SEL(0));
855 	/* rx data width 40bit valid */
856 	hdmirx_update_bits(hdmirx_dev, PHY_CONFIG, RXDATA_WIDTH, RXDATA_WIDTH);
857 	hdmirx_update_bits(hdmirx_dev, PHY_CONFIG, PHY_RESET, PHY_RESET);
858 	usleep_range(100, 110);
859 	hdmirx_update_bits(hdmirx_dev, PHY_CONFIG, PHY_RESET, 0);
860 	usleep_range(100, 110);
861 	/* select cr para interface */
862 	hdmirx_writel(hdmirx_dev, PHYCREG_CONFIG0, 0x3);
863 
864 	if (wait_reg_bit_status(hdmirx_dev, SYS_GRF_SOC_STATUS1,
865 				HDMIRXPHY_SRAM_INIT_DONE,
866 				HDMIRXPHY_SRAM_INIT_DONE, true, 10))
867 		dev_err(dev, "%s: phy SRAM init failed\n", __func__);
868 
869 	regmap_write(hdmirx_dev->grf, SYS_GRF_SOC_CON1,
870 		     (HDMIRXPHY_SRAM_EXT_LD_DONE << 16) |
871 		     HDMIRXPHY_SRAM_EXT_LD_DONE);
872 	hdmirx_phy_register_write(hdmirx_dev, SUP_DIG_ANA_CREGS_SUP_ANA_NC, 2);
873 	hdmirx_phy_register_write(hdmirx_dev, SUP_DIG_ANA_CREGS_SUP_ANA_NC, 3);
874 	hdmirx_phy_register_write(hdmirx_dev, SUP_DIG_ANA_CREGS_SUP_ANA_NC, 2);
875 	hdmirx_phy_register_write(hdmirx_dev, SUP_DIG_ANA_CREGS_SUP_ANA_NC, 2);
876 	hdmirx_phy_register_write(hdmirx_dev, SUP_DIG_ANA_CREGS_SUP_ANA_NC, 3);
877 	hdmirx_phy_register_write(hdmirx_dev, SUP_DIG_ANA_CREGS_SUP_ANA_NC, 2);
878 	hdmirx_phy_register_write(hdmirx_dev, SUP_DIG_ANA_CREGS_SUP_ANA_NC, 0);
879 	hdmirx_phy_register_write(hdmirx_dev, SUP_DIG_ANA_CREGS_SUP_ANA_NC, 1);
880 	hdmirx_phy_register_write(hdmirx_dev, SUP_DIG_ANA_CREGS_SUP_ANA_NC, 0);
881 	hdmirx_phy_register_write(hdmirx_dev, SUP_DIG_ANA_CREGS_SUP_ANA_NC, 0);
882 
883 	hdmirx_phy_register_write(hdmirx_dev,
884 				  HDMIPCS_DIG_CTRL_PATH_MAIN_FSM_RATE_CALC_HDMI14_CDR_SETTING_3_REG,
885 				  CDR_SETTING_BOUNDARY_3_DEFAULT);
886 	hdmirx_phy_register_write(hdmirx_dev,
887 				  HDMIPCS_DIG_CTRL_PATH_MAIN_FSM_RATE_CALC_HDMI14_CDR_SETTING_4_REG,
888 				  CDR_SETTING_BOUNDARY_4_DEFAULT);
889 	hdmirx_phy_register_write(hdmirx_dev,
890 				  HDMIPCS_DIG_CTRL_PATH_MAIN_FSM_RATE_CALC_HDMI14_CDR_SETTING_5_REG,
891 				  CDR_SETTING_BOUNDARY_5_DEFAULT);
892 	hdmirx_phy_register_write(hdmirx_dev,
893 				  HDMIPCS_DIG_CTRL_PATH_MAIN_FSM_RATE_CALC_HDMI14_CDR_SETTING_6_REG,
894 				  CDR_SETTING_BOUNDARY_6_DEFAULT);
895 	hdmirx_phy_register_write(hdmirx_dev,
896 				  HDMIPCS_DIG_CTRL_PATH_MAIN_FSM_RATE_CALC_HDMI14_CDR_SETTING_7_REG,
897 				  CDR_SETTING_BOUNDARY_7_DEFAULT);
898 
899 	hdmirx_update_bits(hdmirx_dev, PHY_CONFIG, PHY_PDDQ, 0);
900 	if (wait_reg_bit_status(hdmirx_dev, PHY_STATUS, PDDQ_ACK, 0, false, 10))
901 		dev_err(dev, "%s: wait pddq ack failed\n", __func__);
902 
903 	hdmirx_update_bits(hdmirx_dev, PHY_CONFIG, HDMI_DISABLE, 0);
904 	if (wait_reg_bit_status(hdmirx_dev, PHY_STATUS, HDMI_DISABLE_ACK, 0,
905 				false, 50))
906 		dev_err(dev, "%s: wait hdmi disable ack failed\n", __func__);
907 
908 	hdmirx_tmds_clk_ratio_config(hdmirx_dev);
909 }
910 
hdmirx_controller_init(struct snps_hdmirx_dev * hdmirx_dev)911 static void hdmirx_controller_init(struct snps_hdmirx_dev *hdmirx_dev)
912 {
913 	const unsigned long iref_clk_freq_hz = 428571429;
914 	struct device *dev = hdmirx_dev->dev;
915 
916 	reinit_completion(&hdmirx_dev->timer_base_lock);
917 	hdmirx_clear_interrupt(hdmirx_dev, MAINUNIT_0_INT_CLEAR, 0xffffffff);
918 	/* en irq */
919 	hdmirx_update_bits(hdmirx_dev, MAINUNIT_0_INT_MASK_N,
920 			   TIMER_BASE_LOCKED_IRQ, TIMER_BASE_LOCKED_IRQ);
921 	/* write irefclk freq */
922 	hdmirx_writel(hdmirx_dev, GLOBAL_TIMER_REF_BASE, iref_clk_freq_hz);
923 
924 	if (!wait_for_completion_timeout(&hdmirx_dev->timer_base_lock,
925 					 msecs_to_jiffies(20)))
926 		dev_err(dev, "%s wait timer base lock failed\n", __func__);
927 
928 	hdmirx_update_bits(hdmirx_dev, CMU_CONFIG0,
929 			   TMDSQPCLK_STABLE_FREQ_MARGIN_MASK |
930 			   AUDCLK_STABLE_FREQ_MARGIN_MASK,
931 			   TMDSQPCLK_STABLE_FREQ_MARGIN(2) |
932 			   AUDCLK_STABLE_FREQ_MARGIN(1));
933 	hdmirx_update_bits(hdmirx_dev, DESCRAND_EN_CONTROL,
934 			   SCRAMB_EN_SEL_QST_MASK, SCRAMB_EN_SEL_QST(1));
935 	hdmirx_update_bits(hdmirx_dev, CED_CONFIG,
936 			   CED_VIDDATACHECKEN_QST |
937 			   CED_DATAISCHECKEN_QST |
938 			   CED_GBCHECKEN_QST |
939 			   CED_CTRLCHECKEN_QST |
940 			   CED_CHLOCKMAXER_QST_MASK,
941 			   CED_VIDDATACHECKEN_QST |
942 			   CED_GBCHECKEN_QST |
943 			   CED_CTRLCHECKEN_QST |
944 			   CED_CHLOCKMAXER_QST(0x10));
945 	hdmirx_update_bits(hdmirx_dev, DEFRAMER_CONFIG0,
946 			   VS_REMAPFILTER_EN_QST | VS_FILTER_ORDER_QST_MASK,
947 			   VS_REMAPFILTER_EN_QST | VS_FILTER_ORDER_QST(0x3));
948 }
949 
hdmirx_get_colordepth(struct snps_hdmirx_dev * hdmirx_dev)950 static void hdmirx_get_colordepth(struct snps_hdmirx_dev *hdmirx_dev)
951 {
952 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
953 	u32 val, color_depth_reg;
954 
955 	val = hdmirx_readl(hdmirx_dev, DMA_STATUS11);
956 	color_depth_reg = (val & HDMIRX_COLOR_DEPTH_MASK) >> 3;
957 
958 	switch (color_depth_reg) {
959 	case 0x4:
960 		hdmirx_dev->color_depth = 24;
961 		break;
962 	case 0x5:
963 		hdmirx_dev->color_depth = 30;
964 		break;
965 	case 0x6:
966 		hdmirx_dev->color_depth = 36;
967 		break;
968 	case 0x7:
969 		hdmirx_dev->color_depth = 48;
970 		break;
971 	default:
972 		hdmirx_dev->color_depth = 24;
973 		break;
974 	}
975 
976 	v4l2_dbg(1, debug, v4l2_dev, "%s: color_depth: %d, reg_val:%d\n",
977 		 __func__, hdmirx_dev->color_depth, color_depth_reg);
978 }
979 
hdmirx_get_pix_fmt(struct snps_hdmirx_dev * hdmirx_dev)980 static void hdmirx_get_pix_fmt(struct snps_hdmirx_dev *hdmirx_dev)
981 {
982 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
983 	u32 val;
984 
985 	val = hdmirx_readl(hdmirx_dev, DMA_STATUS11);
986 	hdmirx_dev->pix_fmt = val & HDMIRX_FORMAT_MASK;
987 
988 	switch (hdmirx_dev->pix_fmt) {
989 	case HDMIRX_RGB888:
990 		hdmirx_dev->cur_fmt_fourcc = V4L2_PIX_FMT_BGR24;
991 		break;
992 	case HDMIRX_YUV422:
993 		hdmirx_dev->cur_fmt_fourcc = V4L2_PIX_FMT_NV16;
994 		break;
995 	case HDMIRX_YUV444:
996 		hdmirx_dev->cur_fmt_fourcc = V4L2_PIX_FMT_NV24;
997 		break;
998 	case HDMIRX_YUV420:
999 		hdmirx_dev->cur_fmt_fourcc = V4L2_PIX_FMT_NV12;
1000 		break;
1001 	default:
1002 		v4l2_err(v4l2_dev,
1003 			 "%s: err pix_fmt: %d, set RGB888 as default\n",
1004 			 __func__, hdmirx_dev->pix_fmt);
1005 		hdmirx_dev->pix_fmt = HDMIRX_RGB888;
1006 		hdmirx_dev->cur_fmt_fourcc = V4L2_PIX_FMT_BGR24;
1007 		break;
1008 	}
1009 
1010 	v4l2_dbg(1, debug, v4l2_dev, "%s: pix_fmt: %s\n", __func__,
1011 		 pix_fmt_str[hdmirx_dev->pix_fmt]);
1012 }
1013 
hdmirx_read_avi_infoframe(struct snps_hdmirx_dev * hdmirx_dev,u8 * aviif)1014 static void hdmirx_read_avi_infoframe(struct snps_hdmirx_dev *hdmirx_dev,
1015 				      u8 *aviif)
1016 {
1017 	unsigned int i, b, itr = 0;
1018 	u32 val;
1019 
1020 	aviif[itr++] = HDMI_INFOFRAME_TYPE_AVI;
1021 	val = hdmirx_readl(hdmirx_dev, PKTDEC_AVIIF_PH2_1);
1022 	aviif[itr++] = val & 0xff;
1023 	aviif[itr++] = (val >> 8) & 0xff;
1024 
1025 	for (i = 0; i < 7; i++) {
1026 		val = hdmirx_readl(hdmirx_dev, PKTDEC_AVIIF_PB3_0 + 4 * i);
1027 
1028 		for (b = 0; b < 4; b++)
1029 			aviif[itr++] = (val >> (8 * b)) & 0xff;
1030 	}
1031 }
1032 
hdmirx_get_avi_infoframe(struct snps_hdmirx_dev * hdmirx_dev)1033 static void hdmirx_get_avi_infoframe(struct snps_hdmirx_dev *hdmirx_dev)
1034 {
1035 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
1036 	union hdmi_infoframe frame = {};
1037 	u8 aviif[3 + 7 * 4];
1038 	int err;
1039 
1040 	hdmirx_read_avi_infoframe(hdmirx_dev, aviif);
1041 
1042 	err = hdmi_infoframe_unpack(&frame, aviif, sizeof(aviif));
1043 	if (err) {
1044 		v4l2_err(v4l2_dev, "failed to unpack AVI infoframe\n");
1045 		return;
1046 	}
1047 
1048 	v4l2_ctrl_s_ctrl(hdmirx_dev->rgb_range, frame.avi.quantization_range);
1049 
1050 	if (frame.avi.itc)
1051 		v4l2_ctrl_s_ctrl(hdmirx_dev->content_type,
1052 				 frame.avi.content_type);
1053 	else
1054 		v4l2_ctrl_s_ctrl(hdmirx_dev->content_type,
1055 				 V4L2_DV_IT_CONTENT_TYPE_NO_ITC);
1056 }
1057 
1058 static ssize_t
hdmirx_debugfs_if_read(u32 type,void * priv,struct file * filp,char __user * ubuf,size_t count,loff_t * ppos)1059 hdmirx_debugfs_if_read(u32 type, void *priv, struct file *filp,
1060 		       char __user *ubuf, size_t count, loff_t *ppos)
1061 {
1062 	struct snps_hdmirx_dev *hdmirx_dev = priv;
1063 	u8 aviif[V4L2_DEBUGFS_IF_MAX_LEN] = {};
1064 	int len;
1065 
1066 	if (type != V4L2_DEBUGFS_IF_AVI)
1067 		return 0;
1068 
1069 	hdmirx_read_avi_infoframe(hdmirx_dev, aviif);
1070 
1071 	len = aviif[2] + 4;
1072 	if (len > V4L2_DEBUGFS_IF_MAX_LEN)
1073 		len = -ENOENT;
1074 	else
1075 		len = simple_read_from_buffer(ubuf, count, ppos, aviif, len);
1076 
1077 	return len < 0 ? 0 : len;
1078 }
1079 
hdmirx_format_change(struct snps_hdmirx_dev * hdmirx_dev)1080 static void hdmirx_format_change(struct snps_hdmirx_dev *hdmirx_dev)
1081 {
1082 	struct hdmirx_stream *stream = &hdmirx_dev->stream;
1083 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
1084 	static const struct v4l2_event ev_src_chg = {
1085 		.type = V4L2_EVENT_SOURCE_CHANGE,
1086 		.u.src_change.changes = V4L2_EVENT_SRC_CH_RESOLUTION,
1087 	};
1088 
1089 	hdmirx_get_pix_fmt(hdmirx_dev);
1090 	hdmirx_get_colordepth(hdmirx_dev);
1091 	hdmirx_get_avi_infoframe(hdmirx_dev);
1092 
1093 	v4l2_dbg(1, debug, v4l2_dev, "%s: queue res_chg_event\n", __func__);
1094 	v4l2_event_queue(&stream->vdev, &ev_src_chg);
1095 }
1096 
hdmirx_set_ddr_store_fmt(struct snps_hdmirx_dev * hdmirx_dev)1097 static void hdmirx_set_ddr_store_fmt(struct snps_hdmirx_dev *hdmirx_dev)
1098 {
1099 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
1100 	enum ddr_store_fmt store_fmt;
1101 	u32 dma_cfg1;
1102 
1103 	switch (hdmirx_dev->pix_fmt) {
1104 	case HDMIRX_RGB888:
1105 		store_fmt = STORE_RGB888;
1106 		break;
1107 	case HDMIRX_YUV444:
1108 		store_fmt = STORE_YUV444_8BIT;
1109 		break;
1110 	case HDMIRX_YUV422:
1111 		store_fmt = STORE_YUV422_8BIT;
1112 		break;
1113 	case HDMIRX_YUV420:
1114 		store_fmt = STORE_YUV420_8BIT;
1115 		break;
1116 	default:
1117 		store_fmt = STORE_RGB888;
1118 		break;
1119 	}
1120 
1121 	hdmirx_update_bits(hdmirx_dev, DMA_CONFIG1,
1122 			   DDR_STORE_FORMAT_MASK, DDR_STORE_FORMAT(store_fmt));
1123 	dma_cfg1 = hdmirx_readl(hdmirx_dev, DMA_CONFIG1);
1124 	v4l2_dbg(1, debug, v4l2_dev, "%s: pix_fmt: %s, DMA_CONFIG1:%#x\n",
1125 		 __func__, pix_fmt_str[hdmirx_dev->pix_fmt], dma_cfg1);
1126 }
1127 
hdmirx_dma_config(struct snps_hdmirx_dev * hdmirx_dev)1128 static void hdmirx_dma_config(struct snps_hdmirx_dev *hdmirx_dev)
1129 {
1130 	hdmirx_set_ddr_store_fmt(hdmirx_dev);
1131 
1132 	/* Note: uv_swap, rb can not swap, doc err */
1133 	if (hdmirx_dev->cur_fmt_fourcc != V4L2_PIX_FMT_NV16)
1134 		hdmirx_update_bits(hdmirx_dev, DMA_CONFIG6, RB_SWAP_EN, RB_SWAP_EN);
1135 	else
1136 		hdmirx_update_bits(hdmirx_dev, DMA_CONFIG6, RB_SWAP_EN, 0);
1137 
1138 	hdmirx_update_bits(hdmirx_dev, DMA_CONFIG7,
1139 			   LOCK_FRAME_NUM_MASK,
1140 			   LOCK_FRAME_NUM(2));
1141 	hdmirx_update_bits(hdmirx_dev, DMA_CONFIG1,
1142 			   UV_WID_MASK | Y_WID_MASK | ABANDON_EN,
1143 			   UV_WID(1) | Y_WID(2) | ABANDON_EN);
1144 }
1145 
hdmirx_submodule_init(struct snps_hdmirx_dev * hdmirx_dev)1146 static void hdmirx_submodule_init(struct snps_hdmirx_dev *hdmirx_dev)
1147 {
1148 	/* Note: if not config HDCP2_CONFIG, there will be some errors; */
1149 	hdmirx_update_bits(hdmirx_dev, HDCP2_CONFIG,
1150 			   HDCP2_SWITCH_OVR_VALUE |
1151 			   HDCP2_SWITCH_OVR_EN,
1152 			   HDCP2_SWITCH_OVR_EN);
1153 	hdmirx_scdc_init(hdmirx_dev);
1154 	hdmirx_controller_init(hdmirx_dev);
1155 }
1156 
hdmirx_enum_input(struct file * file,void * priv,struct v4l2_input * input)1157 static int hdmirx_enum_input(struct file *file, void *priv,
1158 			     struct v4l2_input *input)
1159 {
1160 	if (input->index > 0)
1161 		return -EINVAL;
1162 
1163 	input->type = V4L2_INPUT_TYPE_CAMERA;
1164 	input->std = 0;
1165 	strscpy(input->name, "HDMI IN", sizeof(input->name));
1166 	input->capabilities = V4L2_IN_CAP_DV_TIMINGS;
1167 
1168 	return 0;
1169 }
1170 
hdmirx_get_input(struct file * file,void * priv,unsigned int * i)1171 static int hdmirx_get_input(struct file *file, void *priv, unsigned int *i)
1172 {
1173 	*i = 0;
1174 	return 0;
1175 }
1176 
hdmirx_set_input(struct file * file,void * priv,unsigned int i)1177 static int hdmirx_set_input(struct file *file, void *priv, unsigned int i)
1178 {
1179 	if (i)
1180 		return -EINVAL;
1181 	return 0;
1182 }
1183 
hdmirx_set_fmt(struct hdmirx_stream * stream,struct v4l2_pix_format_mplane * pixm,bool try)1184 static void hdmirx_set_fmt(struct hdmirx_stream *stream,
1185 			   struct v4l2_pix_format_mplane *pixm, bool try)
1186 {
1187 	struct snps_hdmirx_dev *hdmirx_dev = stream->hdmirx_dev;
1188 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
1189 	struct v4l2_bt_timings *bt = &hdmirx_dev->timings.bt;
1190 	const struct v4l2_format_info *finfo;
1191 	unsigned int imagesize = 0;
1192 	unsigned int i;
1193 
1194 	memset(&pixm->plane_fmt[0], 0, sizeof(struct v4l2_plane_pix_format));
1195 	finfo = v4l2_format_info(pixm->pixelformat);
1196 	if (!finfo) {
1197 		finfo = v4l2_format_info(V4L2_PIX_FMT_BGR24);
1198 		v4l2_dbg(1, debug, v4l2_dev,
1199 			 "%s: set_fmt:%#x not supported, use def_fmt:%x\n",
1200 			 __func__, pixm->pixelformat, finfo->format);
1201 	}
1202 
1203 	if (!bt->width || !bt->height)
1204 		v4l2_dbg(1, debug, v4l2_dev, "%s: invalid resolution:%#xx%#x\n",
1205 			 __func__, bt->width, bt->height);
1206 
1207 	pixm->pixelformat = finfo->format;
1208 	pixm->width = bt->width;
1209 	pixm->height = bt->height;
1210 	pixm->num_planes = finfo->mem_planes;
1211 	pixm->quantization = V4L2_QUANTIZATION_DEFAULT;
1212 	pixm->colorspace = V4L2_COLORSPACE_SRGB;
1213 	pixm->ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT;
1214 
1215 	if (bt->interlaced == V4L2_DV_INTERLACED)
1216 		pixm->field = V4L2_FIELD_INTERLACED_TB;
1217 	else
1218 		pixm->field = V4L2_FIELD_NONE;
1219 
1220 	memset(pixm->reserved, 0, sizeof(pixm->reserved));
1221 
1222 	v4l2_fill_pixfmt_mp(pixm, finfo->format, pixm->width, pixm->height);
1223 
1224 	for (i = 0; i < finfo->comp_planes; i++) {
1225 		struct v4l2_plane_pix_format *plane_fmt;
1226 		int width, height, bpl, size, bpp = 0;
1227 		const unsigned int hw_align = 64;
1228 
1229 		if (!i) {
1230 			width = pixm->width;
1231 			height = pixm->height;
1232 		} else {
1233 			width = pixm->width / finfo->hdiv;
1234 			height = pixm->height / finfo->vdiv;
1235 		}
1236 
1237 		switch (finfo->format) {
1238 		case V4L2_PIX_FMT_NV24:
1239 		case V4L2_PIX_FMT_NV16:
1240 		case V4L2_PIX_FMT_NV12:
1241 		case V4L2_PIX_FMT_BGR24:
1242 			bpp = finfo->bpp[i];
1243 			break;
1244 		default:
1245 			v4l2_dbg(1, debug, v4l2_dev,
1246 				 "fourcc: %#x is not supported\n",
1247 				 finfo->format);
1248 			break;
1249 		}
1250 
1251 		bpl = ALIGN(width * bpp, hw_align);
1252 		size = bpl * height;
1253 		imagesize += size;
1254 
1255 		if (finfo->mem_planes > i) {
1256 			/* Set bpl and size for each mplane */
1257 			plane_fmt = pixm->plane_fmt + i;
1258 			plane_fmt->bytesperline = bpl;
1259 			plane_fmt->sizeimage = size;
1260 		}
1261 
1262 		v4l2_dbg(1, debug, v4l2_dev,
1263 			 "C-Plane %u size: %d, Total imagesize: %d\n",
1264 			 i, size, imagesize);
1265 	}
1266 
1267 	/* Convert to non-MPLANE format as we want to unify non-MPLANE and MPLANE */
1268 	if (finfo->mem_planes == 1)
1269 		pixm->plane_fmt[0].sizeimage = imagesize;
1270 
1271 	if (!try) {
1272 		stream->out_finfo = finfo;
1273 		stream->pixm = *pixm;
1274 		v4l2_dbg(1, debug, v4l2_dev,
1275 			 "%s: req(%d, %d), out(%d, %d), fmt:%#x\n", __func__,
1276 			 pixm->width, pixm->height, stream->pixm.width,
1277 			 stream->pixm.height, finfo->format);
1278 	}
1279 }
1280 
hdmirx_enum_fmt_vid_cap_mplane(struct file * file,void * priv,struct v4l2_fmtdesc * f)1281 static int hdmirx_enum_fmt_vid_cap_mplane(struct file *file, void *priv,
1282 					  struct v4l2_fmtdesc *f)
1283 {
1284 	struct hdmirx_stream *stream = video_drvdata(file);
1285 	struct snps_hdmirx_dev *hdmirx_dev = stream->hdmirx_dev;
1286 
1287 	if (f->index >= 1)
1288 		return -EINVAL;
1289 
1290 	f->pixelformat = hdmirx_dev->cur_fmt_fourcc;
1291 
1292 	return 0;
1293 }
1294 
hdmirx_s_fmt_vid_cap_mplane(struct file * file,void * priv,struct v4l2_format * f)1295 static int hdmirx_s_fmt_vid_cap_mplane(struct file *file,
1296 				       void *priv, struct v4l2_format *f)
1297 {
1298 	struct hdmirx_stream *stream = video_drvdata(file);
1299 	struct snps_hdmirx_dev *hdmirx_dev = stream->hdmirx_dev;
1300 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
1301 
1302 	if (vb2_is_busy(&stream->buf_queue)) {
1303 		v4l2_err(v4l2_dev, "%s: queue busy\n", __func__);
1304 		return -EBUSY;
1305 	}
1306 
1307 	hdmirx_set_fmt(stream, &f->fmt.pix_mp, false);
1308 
1309 	return 0;
1310 }
1311 
hdmirx_g_fmt_vid_cap_mplane(struct file * file,void * fh,struct v4l2_format * f)1312 static int hdmirx_g_fmt_vid_cap_mplane(struct file *file, void *fh,
1313 				       struct v4l2_format *f)
1314 {
1315 	struct hdmirx_stream *stream = video_drvdata(file);
1316 	struct snps_hdmirx_dev *hdmirx_dev = stream->hdmirx_dev;
1317 	struct v4l2_pix_format_mplane pixm = {};
1318 
1319 	pixm.pixelformat = hdmirx_dev->cur_fmt_fourcc;
1320 	hdmirx_set_fmt(stream, &pixm, true);
1321 	f->fmt.pix_mp = pixm;
1322 
1323 	return 0;
1324 }
1325 
hdmirx_g_dv_timings(struct file * file,void * _fh,struct v4l2_dv_timings * timings)1326 static int hdmirx_g_dv_timings(struct file *file, void *_fh,
1327 			       struct v4l2_dv_timings *timings)
1328 {
1329 	struct hdmirx_stream *stream = video_drvdata(file);
1330 	struct snps_hdmirx_dev *hdmirx_dev = stream->hdmirx_dev;
1331 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
1332 	u32 dma_cfg1;
1333 
1334 	*timings = hdmirx_dev->timings;
1335 	dma_cfg1 = hdmirx_readl(hdmirx_dev, DMA_CONFIG1);
1336 	v4l2_dbg(1, debug, v4l2_dev, "%s: pix_fmt: %s, DMA_CONFIG1:%#x\n",
1337 		 __func__, pix_fmt_str[hdmirx_dev->pix_fmt], dma_cfg1);
1338 
1339 	return 0;
1340 }
1341 
hdmirx_s_dv_timings(struct file * file,void * _fh,struct v4l2_dv_timings * timings)1342 static int hdmirx_s_dv_timings(struct file *file, void *_fh,
1343 			       struct v4l2_dv_timings *timings)
1344 {
1345 	struct hdmirx_stream *stream = video_drvdata(file);
1346 	struct snps_hdmirx_dev *hdmirx_dev = stream->hdmirx_dev;
1347 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
1348 
1349 	if (!timings)
1350 		return -EINVAL;
1351 
1352 	if (debug)
1353 		v4l2_print_dv_timings(hdmirx_dev->v4l2_dev.name,
1354 				      "s_dv_timings: ", timings, false);
1355 
1356 	if (!v4l2_valid_dv_timings(timings, &hdmirx_timings_cap, NULL, NULL)) {
1357 		v4l2_dbg(1, debug, v4l2_dev,
1358 			 "%s: timings out of range\n", __func__);
1359 		return -ERANGE;
1360 	}
1361 
1362 	/* Check if the timings are part of the CEA-861 timings. */
1363 	v4l2_find_dv_timings_cap(timings, &hdmirx_timings_cap, 0, NULL, NULL);
1364 
1365 	if (v4l2_match_dv_timings(&hdmirx_dev->timings, timings, 0, false)) {
1366 		v4l2_dbg(1, debug, v4l2_dev, "%s: no change\n", __func__);
1367 		return 0;
1368 	}
1369 
1370 	/*
1371 	 * Changing the timings implies a format change, which is not allowed
1372 	 * while buffers for use with streaming have already been allocated.
1373 	 */
1374 	if (vb2_is_busy(&stream->buf_queue))
1375 		return -EBUSY;
1376 
1377 	hdmirx_dev->timings = *timings;
1378 	/* Update the internal format */
1379 	hdmirx_set_fmt(stream, &stream->pixm, false);
1380 
1381 	return 0;
1382 }
1383 
hdmirx_querycap(struct file * file,void * priv,struct v4l2_capability * cap)1384 static int hdmirx_querycap(struct file *file, void *priv,
1385 			   struct v4l2_capability *cap)
1386 {
1387 	struct hdmirx_stream *stream = video_drvdata(file);
1388 	struct device *dev = stream->hdmirx_dev->dev;
1389 
1390 	strscpy(cap->driver, dev->driver->name, sizeof(cap->driver));
1391 	strscpy(cap->card, dev->driver->name, sizeof(cap->card));
1392 
1393 	return 0;
1394 }
1395 
hdmirx_queue_setup(struct vb2_queue * queue,unsigned int * num_buffers,unsigned int * num_planes,unsigned int sizes[],struct device * alloc_ctxs[])1396 static int hdmirx_queue_setup(struct vb2_queue *queue,
1397 			      unsigned int *num_buffers,
1398 			      unsigned int *num_planes,
1399 			      unsigned int sizes[],
1400 			      struct device *alloc_ctxs[])
1401 {
1402 	struct hdmirx_stream *stream = vb2_get_drv_priv(queue);
1403 	struct snps_hdmirx_dev *hdmirx_dev = stream->hdmirx_dev;
1404 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
1405 	const struct v4l2_pix_format_mplane *pixm = NULL;
1406 	const struct v4l2_format_info *out_finfo;
1407 	u32 i;
1408 
1409 	pixm = &stream->pixm;
1410 	out_finfo = stream->out_finfo;
1411 
1412 	if (!out_finfo) {
1413 		v4l2_err(v4l2_dev, "%s: out_fmt not set\n", __func__);
1414 		return -EINVAL;
1415 	}
1416 
1417 	if (*num_planes) {
1418 		if (*num_planes != pixm->num_planes)
1419 			return -EINVAL;
1420 
1421 		for (i = 0; i < *num_planes; i++)
1422 			if (sizes[i] < pixm->plane_fmt[i].sizeimage)
1423 				return -EINVAL;
1424 		return 0;
1425 	}
1426 
1427 	*num_planes = out_finfo->mem_planes;
1428 
1429 	for (i = 0; i < out_finfo->mem_planes; i++)
1430 		sizes[i] = pixm->plane_fmt[i].sizeimage;
1431 
1432 	v4l2_dbg(1, debug, v4l2_dev, "%s: count %d, size %d\n",
1433 		 v4l2_type_names[queue->type], *num_buffers, sizes[0]);
1434 
1435 	return 0;
1436 }
1437 
1438 /*
1439  * The vb2_buffer are stored in hdmirx_buffer, in order to unify
1440  * mplane buffer and none-mplane buffer.
1441  */
hdmirx_buf_queue(struct vb2_buffer * vb)1442 static void hdmirx_buf_queue(struct vb2_buffer *vb)
1443 {
1444 	const struct v4l2_pix_format_mplane *pixm;
1445 	const struct v4l2_format_info *out_finfo;
1446 	struct hdmirx_buffer *hdmirx_buf;
1447 	struct vb2_v4l2_buffer *vbuf;
1448 	struct hdmirx_stream *stream;
1449 	struct vb2_queue *queue;
1450 	unsigned long flags;
1451 	unsigned int i;
1452 
1453 	vbuf = to_vb2_v4l2_buffer(vb);
1454 	hdmirx_buf = container_of(vbuf, struct hdmirx_buffer, vb);
1455 	queue = vb->vb2_queue;
1456 	stream = vb2_get_drv_priv(queue);
1457 	pixm = &stream->pixm;
1458 	out_finfo = stream->out_finfo;
1459 
1460 	memset(hdmirx_buf->buff_addr, 0, sizeof(hdmirx_buf->buff_addr));
1461 
1462 	/*
1463 	 * If mplanes > 1, every c-plane has its own m-plane,
1464 	 * otherwise, multiple c-planes are in the same m-plane
1465 	 */
1466 	for (i = 0; i < out_finfo->mem_planes; i++)
1467 		hdmirx_buf->buff_addr[i] = vb2_dma_contig_plane_dma_addr(vb, i);
1468 
1469 	if (out_finfo->mem_planes == 1) {
1470 		if (out_finfo->comp_planes == 1) {
1471 			hdmirx_buf->buff_addr[HDMIRX_PLANE_CBCR] =
1472 				hdmirx_buf->buff_addr[HDMIRX_PLANE_Y];
1473 		} else {
1474 			for (i = 0; i < out_finfo->comp_planes - 1; i++)
1475 				hdmirx_buf->buff_addr[i + 1] =
1476 					hdmirx_buf->buff_addr[i] +
1477 					pixm->plane_fmt[i].bytesperline *
1478 					pixm->height;
1479 		}
1480 	}
1481 
1482 	spin_lock_irqsave(&stream->vbq_lock, flags);
1483 	list_add_tail(&hdmirx_buf->queue, &stream->buf_head);
1484 	spin_unlock_irqrestore(&stream->vbq_lock, flags);
1485 }
1486 
return_all_buffers(struct hdmirx_stream * stream,enum vb2_buffer_state state)1487 static void return_all_buffers(struct hdmirx_stream *stream,
1488 			       enum vb2_buffer_state state)
1489 {
1490 	struct hdmirx_buffer *buf, *tmp;
1491 	unsigned long flags;
1492 
1493 	spin_lock_irqsave(&stream->vbq_lock, flags);
1494 	if (stream->curr_buf)
1495 		list_add_tail(&stream->curr_buf->queue, &stream->buf_head);
1496 	if (stream->next_buf && stream->next_buf != stream->curr_buf)
1497 		list_add_tail(&stream->next_buf->queue, &stream->buf_head);
1498 	stream->curr_buf = NULL;
1499 	stream->next_buf = NULL;
1500 
1501 	list_for_each_entry_safe(buf, tmp, &stream->buf_head, queue) {
1502 		list_del(&buf->queue);
1503 		vb2_buffer_done(&buf->vb.vb2_buf, state);
1504 	}
1505 	spin_unlock_irqrestore(&stream->vbq_lock, flags);
1506 }
1507 
hdmirx_stop_streaming(struct vb2_queue * queue)1508 static void hdmirx_stop_streaming(struct vb2_queue *queue)
1509 {
1510 	struct hdmirx_stream *stream = vb2_get_drv_priv(queue);
1511 	struct snps_hdmirx_dev *hdmirx_dev = stream->hdmirx_dev;
1512 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
1513 	int ret;
1514 
1515 	v4l2_dbg(1, debug, v4l2_dev, "stream start stopping\n");
1516 	mutex_lock(&hdmirx_dev->stream_lock);
1517 	WRITE_ONCE(stream->stopping, true);
1518 
1519 	/* wait last irq to return the buffer */
1520 	ret = wait_event_timeout(stream->wq_stopped, !stream->stopping,
1521 				 msecs_to_jiffies(500));
1522 	if (!ret)
1523 		v4l2_dbg(1, debug, v4l2_dev, "%s: timeout waiting last irq\n",
1524 			 __func__);
1525 
1526 	hdmirx_update_bits(hdmirx_dev, DMA_CONFIG6, HDMIRX_DMA_EN, 0);
1527 	return_all_buffers(stream, VB2_BUF_STATE_ERROR);
1528 	mutex_unlock(&hdmirx_dev->stream_lock);
1529 	v4l2_dbg(1, debug, v4l2_dev, "stream stopping finished\n");
1530 }
1531 
hdmirx_start_streaming(struct vb2_queue * queue,unsigned int count)1532 static int hdmirx_start_streaming(struct vb2_queue *queue, unsigned int count)
1533 {
1534 	struct hdmirx_stream *stream = vb2_get_drv_priv(queue);
1535 	struct snps_hdmirx_dev *hdmirx_dev = stream->hdmirx_dev;
1536 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
1537 	struct v4l2_dv_timings timings = hdmirx_dev->timings;
1538 	struct v4l2_bt_timings *bt = &timings.bt;
1539 	unsigned long lock_flags = 0;
1540 	int line_flag;
1541 
1542 	mutex_lock(&hdmirx_dev->stream_lock);
1543 	stream->frame_idx = 0;
1544 	stream->line_flag_int_cnt = 0;
1545 	stream->curr_buf = NULL;
1546 	stream->next_buf = NULL;
1547 	stream->irq_stat = 0;
1548 
1549 	WRITE_ONCE(stream->stopping, false);
1550 
1551 	spin_lock_irqsave(&stream->vbq_lock, lock_flags);
1552 	if (!stream->curr_buf) {
1553 		if (!list_empty(&stream->buf_head)) {
1554 			stream->curr_buf = list_first_entry(&stream->buf_head,
1555 							    struct hdmirx_buffer,
1556 							    queue);
1557 			list_del(&stream->curr_buf->queue);
1558 		} else {
1559 			stream->curr_buf = NULL;
1560 		}
1561 	}
1562 	spin_unlock_irqrestore(&stream->vbq_lock, lock_flags);
1563 
1564 	if (!stream->curr_buf) {
1565 		mutex_unlock(&hdmirx_dev->stream_lock);
1566 		return -ENOMEM;
1567 	}
1568 
1569 	v4l2_dbg(2, debug, v4l2_dev,
1570 		 "%s: start_stream cur_buf y_addr:%#x, uv_addr:%#x\n",
1571 		 __func__, stream->curr_buf->buff_addr[HDMIRX_PLANE_Y],
1572 		 stream->curr_buf->buff_addr[HDMIRX_PLANE_CBCR]);
1573 	hdmirx_writel(hdmirx_dev, DMA_CONFIG2,
1574 		      stream->curr_buf->buff_addr[HDMIRX_PLANE_Y]);
1575 	hdmirx_writel(hdmirx_dev, DMA_CONFIG3,
1576 		      stream->curr_buf->buff_addr[HDMIRX_PLANE_CBCR]);
1577 
1578 	if (bt->height) {
1579 		if (bt->interlaced == V4L2_DV_INTERLACED)
1580 			line_flag = bt->height / 4;
1581 		else
1582 			line_flag = bt->height / 2;
1583 		hdmirx_update_bits(hdmirx_dev, DMA_CONFIG7,
1584 				   LINE_FLAG_NUM_MASK,
1585 				   LINE_FLAG_NUM(line_flag));
1586 	} else {
1587 		v4l2_err(v4l2_dev, "invalid BT timing height=%d\n", bt->height);
1588 	}
1589 
1590 	hdmirx_writel(hdmirx_dev, DMA_CONFIG5, 0xffffffff);
1591 	hdmirx_writel(hdmirx_dev, CED_DYN_CONTROL, 0x1);
1592 	hdmirx_update_bits(hdmirx_dev, DMA_CONFIG4,
1593 			   LINE_FLAG_INT_EN |
1594 			   HDMIRX_DMA_IDLE_INT |
1595 			   HDMIRX_LOCK_DISABLE_INT |
1596 			   LAST_FRAME_AXI_UNFINISH_INT_EN |
1597 			   FIFO_OVERFLOW_INT_EN |
1598 			   FIFO_UNDERFLOW_INT_EN |
1599 			   HDMIRX_AXI_ERROR_INT_EN,
1600 			   LINE_FLAG_INT_EN |
1601 			   HDMIRX_DMA_IDLE_INT |
1602 			   HDMIRX_LOCK_DISABLE_INT |
1603 			   LAST_FRAME_AXI_UNFINISH_INT_EN |
1604 			   FIFO_OVERFLOW_INT_EN |
1605 			   FIFO_UNDERFLOW_INT_EN |
1606 			   HDMIRX_AXI_ERROR_INT_EN);
1607 	hdmirx_update_bits(hdmirx_dev, DMA_CONFIG6, HDMIRX_DMA_EN, HDMIRX_DMA_EN);
1608 	v4l2_dbg(1, debug, v4l2_dev, "%s: enable dma", __func__);
1609 	mutex_unlock(&hdmirx_dev->stream_lock);
1610 
1611 	return 0;
1612 }
1613 
1614 /* vb2 queue */
1615 static const struct vb2_ops hdmirx_vb2_ops = {
1616 	.queue_setup = hdmirx_queue_setup,
1617 	.buf_queue = hdmirx_buf_queue,
1618 	.stop_streaming = hdmirx_stop_streaming,
1619 	.start_streaming = hdmirx_start_streaming,
1620 };
1621 
hdmirx_init_vb2_queue(struct vb2_queue * q,struct hdmirx_stream * stream,enum v4l2_buf_type buf_type)1622 static int hdmirx_init_vb2_queue(struct vb2_queue *q,
1623 				 struct hdmirx_stream *stream,
1624 				 enum v4l2_buf_type buf_type)
1625 {
1626 	struct snps_hdmirx_dev *hdmirx_dev = stream->hdmirx_dev;
1627 
1628 	q->type = buf_type;
1629 	q->io_modes = VB2_MMAP | VB2_DMABUF;
1630 	q->drv_priv = stream;
1631 	q->ops = &hdmirx_vb2_ops;
1632 	q->mem_ops = &vb2_dma_contig_memops;
1633 	q->buf_struct_size = sizeof(struct hdmirx_buffer);
1634 	q->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC;
1635 	q->lock = &stream->vlock;
1636 	q->dev = hdmirx_dev->dev;
1637 	q->min_queued_buffers = 1;
1638 
1639 	return vb2_queue_init(q);
1640 }
1641 
1642 /* video device */
1643 static const struct v4l2_ioctl_ops hdmirx_v4l2_ioctl_ops = {
1644 	.vidioc_querycap = hdmirx_querycap,
1645 	.vidioc_try_fmt_vid_cap_mplane = hdmirx_g_fmt_vid_cap_mplane,
1646 	.vidioc_s_fmt_vid_cap_mplane = hdmirx_s_fmt_vid_cap_mplane,
1647 	.vidioc_g_fmt_vid_cap_mplane = hdmirx_g_fmt_vid_cap_mplane,
1648 	.vidioc_enum_fmt_vid_cap = hdmirx_enum_fmt_vid_cap_mplane,
1649 
1650 	.vidioc_s_dv_timings = hdmirx_s_dv_timings,
1651 	.vidioc_g_dv_timings = hdmirx_g_dv_timings,
1652 	.vidioc_enum_dv_timings = hdmirx_enum_dv_timings,
1653 	.vidioc_query_dv_timings = hdmirx_query_dv_timings,
1654 	.vidioc_dv_timings_cap = hdmirx_dv_timings_cap,
1655 	.vidioc_enum_input = hdmirx_enum_input,
1656 	.vidioc_g_input = hdmirx_get_input,
1657 	.vidioc_s_input = hdmirx_set_input,
1658 	.vidioc_g_edid = hdmirx_get_edid,
1659 	.vidioc_s_edid = hdmirx_set_edid,
1660 	.vidioc_g_parm = hdmirx_g_parm,
1661 
1662 	.vidioc_reqbufs = vb2_ioctl_reqbufs,
1663 	.vidioc_querybuf = vb2_ioctl_querybuf,
1664 	.vidioc_create_bufs = vb2_ioctl_create_bufs,
1665 	.vidioc_qbuf = vb2_ioctl_qbuf,
1666 	.vidioc_expbuf = vb2_ioctl_expbuf,
1667 	.vidioc_dqbuf = vb2_ioctl_dqbuf,
1668 	.vidioc_prepare_buf = vb2_ioctl_prepare_buf,
1669 	.vidioc_streamon = vb2_ioctl_streamon,
1670 	.vidioc_streamoff = vb2_ioctl_streamoff,
1671 
1672 	.vidioc_log_status = v4l2_ctrl_log_status,
1673 	.vidioc_subscribe_event = hdmirx_subscribe_event,
1674 	.vidioc_unsubscribe_event = v4l2_event_unsubscribe,
1675 };
1676 
1677 static const struct v4l2_file_operations hdmirx_fops = {
1678 	.owner = THIS_MODULE,
1679 	.open = v4l2_fh_open,
1680 	.release = vb2_fop_release,
1681 	.unlocked_ioctl = video_ioctl2,
1682 	.poll = vb2_fop_poll,
1683 	.mmap = vb2_fop_mmap,
1684 };
1685 
hdmirx_register_stream_vdev(struct hdmirx_stream * stream)1686 static int hdmirx_register_stream_vdev(struct hdmirx_stream *stream)
1687 {
1688 	struct snps_hdmirx_dev *hdmirx_dev = stream->hdmirx_dev;
1689 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
1690 	struct video_device *vdev = &stream->vdev;
1691 	int ret;
1692 
1693 	strscpy(vdev->name, "stream_hdmirx", sizeof(vdev->name));
1694 	INIT_LIST_HEAD(&stream->buf_head);
1695 	spin_lock_init(&stream->vbq_lock);
1696 	mutex_init(&stream->vlock);
1697 	init_waitqueue_head(&stream->wq_stopped);
1698 	stream->curr_buf = NULL;
1699 	stream->next_buf = NULL;
1700 
1701 	vdev->ioctl_ops = &hdmirx_v4l2_ioctl_ops;
1702 	vdev->release = video_device_release_empty;
1703 	vdev->fops = &hdmirx_fops;
1704 	vdev->minor = -1;
1705 	vdev->v4l2_dev = v4l2_dev;
1706 	vdev->lock = &stream->vlock;
1707 	vdev->device_caps = V4L2_CAP_VIDEO_CAPTURE_MPLANE |
1708 			    V4L2_CAP_STREAMING;
1709 	vdev->vfl_dir = VFL_DIR_RX;
1710 
1711 	video_set_drvdata(vdev, stream);
1712 
1713 	hdmirx_init_vb2_queue(&stream->buf_queue, stream,
1714 			      V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
1715 	vdev->queue = &stream->buf_queue;
1716 
1717 	ret = video_register_device(vdev, VFL_TYPE_VIDEO, -1);
1718 	if (ret < 0) {
1719 		v4l2_err(v4l2_dev, "video_register_device failed: %d\n", ret);
1720 		return ret;
1721 	}
1722 
1723 	return 0;
1724 }
1725 
process_signal_change(struct snps_hdmirx_dev * hdmirx_dev)1726 static void process_signal_change(struct snps_hdmirx_dev *hdmirx_dev)
1727 {
1728 	hdmirx_update_bits(hdmirx_dev, DMA_CONFIG6, HDMIRX_DMA_EN, 0);
1729 	hdmirx_update_bits(hdmirx_dev, DMA_CONFIG4,
1730 			   LINE_FLAG_INT_EN |
1731 			   HDMIRX_DMA_IDLE_INT |
1732 			   HDMIRX_LOCK_DISABLE_INT |
1733 			   LAST_FRAME_AXI_UNFINISH_INT_EN |
1734 			   FIFO_OVERFLOW_INT_EN |
1735 			   FIFO_UNDERFLOW_INT_EN |
1736 			   HDMIRX_AXI_ERROR_INT_EN, 0);
1737 	hdmirx_reset_dma(hdmirx_dev);
1738 	queue_delayed_work(system_unbound_wq,
1739 			   &hdmirx_dev->delayed_work_res_change,
1740 			   msecs_to_jiffies(50));
1741 }
1742 
avpunit_0_int_handler(struct snps_hdmirx_dev * hdmirx_dev,int status,bool * handled)1743 static void avpunit_0_int_handler(struct snps_hdmirx_dev *hdmirx_dev,
1744 				  int status, bool *handled)
1745 {
1746 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
1747 
1748 	if (status & (CED_DYN_CNT_CH2_IRQ |
1749 		      CED_DYN_CNT_CH1_IRQ |
1750 		      CED_DYN_CNT_CH0_IRQ)) {
1751 		process_signal_change(hdmirx_dev);
1752 		v4l2_dbg(2, debug, v4l2_dev, "%s: avp0_st:%#x\n",
1753 			 __func__, status);
1754 		*handled = true;
1755 	}
1756 
1757 	hdmirx_clear_interrupt(hdmirx_dev, AVPUNIT_0_INT_CLEAR, 0xffffffff);
1758 	hdmirx_writel(hdmirx_dev, AVPUNIT_0_INT_FORCE, 0x0);
1759 }
1760 
avpunit_1_int_handler(struct snps_hdmirx_dev * hdmirx_dev,int status,bool * handled)1761 static void avpunit_1_int_handler(struct snps_hdmirx_dev *hdmirx_dev,
1762 				  int status, bool *handled)
1763 {
1764 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
1765 
1766 	if (status & DEFRAMER_VSYNC_THR_REACHED_IRQ) {
1767 		v4l2_dbg(2, debug, v4l2_dev,
1768 			 "Vertical Sync threshold reached interrupt %#x", status);
1769 		hdmirx_update_bits(hdmirx_dev, AVPUNIT_1_INT_MASK_N,
1770 				   DEFRAMER_VSYNC_THR_REACHED_MASK_N, 0);
1771 		*handled = true;
1772 	}
1773 }
1774 
mainunit_0_int_handler(struct snps_hdmirx_dev * hdmirx_dev,int status,bool * handled)1775 static void mainunit_0_int_handler(struct snps_hdmirx_dev *hdmirx_dev,
1776 				   int status, bool *handled)
1777 {
1778 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
1779 
1780 	v4l2_dbg(2, debug, v4l2_dev, "mu0_st:%#x\n", status);
1781 	if (status & TIMER_BASE_LOCKED_IRQ) {
1782 		hdmirx_update_bits(hdmirx_dev, MAINUNIT_0_INT_MASK_N,
1783 				   TIMER_BASE_LOCKED_IRQ, 0);
1784 		complete(&hdmirx_dev->timer_base_lock);
1785 		*handled = true;
1786 	}
1787 
1788 	if (status & TMDSQPCLK_OFF_CHG) {
1789 		process_signal_change(hdmirx_dev);
1790 		v4l2_dbg(2, debug, v4l2_dev, "%s: TMDSQPCLK_OFF_CHG\n", __func__);
1791 		*handled = true;
1792 	}
1793 
1794 	if (status & TMDSQPCLK_LOCKED_CHG) {
1795 		process_signal_change(hdmirx_dev);
1796 		v4l2_dbg(2, debug, v4l2_dev, "%s: TMDSQPCLK_LOCKED_CHG\n", __func__);
1797 		*handled = true;
1798 	}
1799 
1800 	hdmirx_clear_interrupt(hdmirx_dev, MAINUNIT_0_INT_CLEAR, 0xffffffff);
1801 	hdmirx_writel(hdmirx_dev, MAINUNIT_0_INT_FORCE, 0x0);
1802 }
1803 
mainunit_2_int_handler(struct snps_hdmirx_dev * hdmirx_dev,int status,bool * handled)1804 static void mainunit_2_int_handler(struct snps_hdmirx_dev *hdmirx_dev,
1805 				   int status, bool *handled)
1806 {
1807 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
1808 
1809 	v4l2_dbg(2, debug, v4l2_dev, "mu2_st:%#x\n", status);
1810 	if (status & PHYCREG_CR_WRITE_DONE) {
1811 		hdmirx_update_bits(hdmirx_dev, MAINUNIT_2_INT_MASK_N,
1812 				   PHYCREG_CR_WRITE_DONE, 0);
1813 		complete(&hdmirx_dev->cr_write_done);
1814 		*handled = true;
1815 	}
1816 
1817 	if (status & TMDSVALID_STABLE_CHG) {
1818 		process_signal_change(hdmirx_dev);
1819 		v4l2_dbg(2, debug, v4l2_dev, "%s: TMDSVALID_STABLE_CHG\n", __func__);
1820 		*handled = true;
1821 	}
1822 
1823 	hdmirx_clear_interrupt(hdmirx_dev, MAINUNIT_2_INT_CLEAR, 0xffffffff);
1824 	hdmirx_writel(hdmirx_dev, MAINUNIT_2_INT_FORCE, 0x0);
1825 }
1826 
pkt_2_int_handler(struct snps_hdmirx_dev * hdmirx_dev,int status,bool * handled)1827 static void pkt_2_int_handler(struct snps_hdmirx_dev *hdmirx_dev,
1828 			      int status, bool *handled)
1829 {
1830 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
1831 
1832 	v4l2_dbg(2, debug, v4l2_dev, "%s: pk2_st:%#x\n", __func__, status);
1833 	if (status & PKTDEC_AVIIF_RCV_IRQ) {
1834 		hdmirx_update_bits(hdmirx_dev, PKT_2_INT_MASK_N,
1835 				   PKTDEC_AVIIF_RCV_IRQ, 0);
1836 		complete(&hdmirx_dev->avi_pkt_rcv);
1837 		v4l2_dbg(2, debug, v4l2_dev, "%s: AVIIF_RCV_IRQ\n", __func__);
1838 		*handled = true;
1839 	}
1840 
1841 	hdmirx_clear_interrupt(hdmirx_dev, PKT_2_INT_CLEAR, 0xffffffff);
1842 }
1843 
scdc_int_handler(struct snps_hdmirx_dev * hdmirx_dev,int status,bool * handled)1844 static void scdc_int_handler(struct snps_hdmirx_dev *hdmirx_dev,
1845 			     int status, bool *handled)
1846 {
1847 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
1848 
1849 	v4l2_dbg(2, debug, v4l2_dev, "%s: scdc_st:%#x\n", __func__, status);
1850 	if (status & SCDCTMDSCCFG_CHG) {
1851 		hdmirx_tmds_clk_ratio_config(hdmirx_dev);
1852 		*handled = true;
1853 	}
1854 
1855 	hdmirx_clear_interrupt(hdmirx_dev, SCDC_INT_CLEAR, 0xffffffff);
1856 }
1857 
hdmirx_hdmi_irq_handler(int irq,void * dev_id)1858 static irqreturn_t hdmirx_hdmi_irq_handler(int irq, void *dev_id)
1859 {
1860 	struct snps_hdmirx_dev *hdmirx_dev = dev_id;
1861 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
1862 	u32 mu0_st, mu2_st, pk2_st, scdc_st, avp1_st, avp0_st;
1863 	u32 mu0_mask, mu2_mask, pk2_mask, scdc_mask, avp1_msk, avp0_msk;
1864 	bool handled = false;
1865 
1866 	mu0_mask = hdmirx_readl(hdmirx_dev, MAINUNIT_0_INT_MASK_N);
1867 	mu2_mask = hdmirx_readl(hdmirx_dev, MAINUNIT_2_INT_MASK_N);
1868 	pk2_mask = hdmirx_readl(hdmirx_dev, PKT_2_INT_MASK_N);
1869 	scdc_mask = hdmirx_readl(hdmirx_dev, SCDC_INT_MASK_N);
1870 	mu0_st = hdmirx_readl(hdmirx_dev, MAINUNIT_0_INT_STATUS);
1871 	mu2_st = hdmirx_readl(hdmirx_dev, MAINUNIT_2_INT_STATUS);
1872 	pk2_st = hdmirx_readl(hdmirx_dev, PKT_2_INT_STATUS);
1873 	scdc_st = hdmirx_readl(hdmirx_dev, SCDC_INT_STATUS);
1874 	avp0_st = hdmirx_readl(hdmirx_dev, AVPUNIT_0_INT_STATUS);
1875 	avp1_st = hdmirx_readl(hdmirx_dev, AVPUNIT_1_INT_STATUS);
1876 	avp0_msk = hdmirx_readl(hdmirx_dev, AVPUNIT_0_INT_MASK_N);
1877 	avp1_msk = hdmirx_readl(hdmirx_dev, AVPUNIT_1_INT_MASK_N);
1878 	mu0_st &= mu0_mask;
1879 	mu2_st &= mu2_mask;
1880 	pk2_st &= pk2_mask;
1881 	avp1_st &= avp1_msk;
1882 	avp0_st &= avp0_msk;
1883 	scdc_st &= scdc_mask;
1884 
1885 	if (avp0_st)
1886 		avpunit_0_int_handler(hdmirx_dev, avp0_st, &handled);
1887 	if (avp1_st)
1888 		avpunit_1_int_handler(hdmirx_dev, avp1_st, &handled);
1889 	if (mu0_st)
1890 		mainunit_0_int_handler(hdmirx_dev, mu0_st, &handled);
1891 	if (mu2_st)
1892 		mainunit_2_int_handler(hdmirx_dev, mu2_st, &handled);
1893 	if (pk2_st)
1894 		pkt_2_int_handler(hdmirx_dev, pk2_st, &handled);
1895 	if (scdc_st)
1896 		scdc_int_handler(hdmirx_dev, scdc_st, &handled);
1897 
1898 	if (!handled) {
1899 		v4l2_dbg(2, debug, v4l2_dev, "%s: hdmi irq not handled", __func__);
1900 		v4l2_dbg(2, debug, v4l2_dev,
1901 			 "avp0:%#x, avp1:%#x, mu0:%#x, mu2:%#x, pk2:%#x, scdc:%#x\n",
1902 			 avp0_st, avp1_st, mu0_st, mu2_st, pk2_st, scdc_st);
1903 	}
1904 
1905 	v4l2_dbg(2, debug, v4l2_dev, "%s: en_fiq", __func__);
1906 
1907 	return handled ? IRQ_HANDLED : IRQ_NONE;
1908 }
1909 
hdmirx_vb_done(struct hdmirx_stream * stream,struct vb2_v4l2_buffer * vb_done)1910 static void hdmirx_vb_done(struct hdmirx_stream *stream,
1911 			   struct vb2_v4l2_buffer *vb_done)
1912 {
1913 	const struct v4l2_format_info *finfo = stream->out_finfo;
1914 	u32 i;
1915 
1916 	/* Dequeue a filled buffer */
1917 	for (i = 0; i < finfo->mem_planes; i++) {
1918 		vb2_set_plane_payload(&vb_done->vb2_buf, i,
1919 				      stream->pixm.plane_fmt[i].sizeimage);
1920 	}
1921 
1922 	vb_done->vb2_buf.timestamp = ktime_get_ns();
1923 	vb2_buffer_done(&vb_done->vb2_buf, VB2_BUF_STATE_DONE);
1924 }
1925 
dma_idle_int_handler(struct snps_hdmirx_dev * hdmirx_dev,bool * handled)1926 static void dma_idle_int_handler(struct snps_hdmirx_dev *hdmirx_dev,
1927 				 bool *handled)
1928 {
1929 	struct hdmirx_stream *stream = &hdmirx_dev->stream;
1930 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
1931 	struct v4l2_dv_timings timings = hdmirx_dev->timings;
1932 	struct v4l2_bt_timings *bt = &timings.bt;
1933 	struct vb2_v4l2_buffer *vb_done = NULL;
1934 
1935 	if (!(stream->irq_stat) && !(stream->irq_stat & LINE_FLAG_INT_EN))
1936 		v4l2_dbg(1, debug, v4l2_dev,
1937 			 "%s: last time have no line_flag_irq\n", __func__);
1938 
1939 	/* skip first frames that are expected to come out zeroed from DMA */
1940 	if (stream->line_flag_int_cnt <= FILTER_FRAME_CNT)
1941 		goto DMA_IDLE_OUT;
1942 
1943 	if (bt->interlaced != V4L2_DV_INTERLACED ||
1944 	    !(stream->line_flag_int_cnt % 2)) {
1945 		if (stream->next_buf) {
1946 			if (stream->curr_buf)
1947 				vb_done = &stream->curr_buf->vb;
1948 
1949 			if (vb_done) {
1950 				vb_done->vb2_buf.timestamp = ktime_get_ns();
1951 				vb_done->sequence = stream->frame_idx;
1952 
1953 				if (bt->interlaced)
1954 					vb_done->field = V4L2_FIELD_INTERLACED_TB;
1955 				else
1956 					vb_done->field = V4L2_FIELD_NONE;
1957 
1958 				hdmirx_vb_done(stream, vb_done);
1959 				stream->frame_idx++;
1960 				if (stream->frame_idx == 30)
1961 					v4l2_dbg(1, debug, v4l2_dev,
1962 						 "rcv frames\n");
1963 			}
1964 
1965 			stream->curr_buf = NULL;
1966 			if (stream->next_buf) {
1967 				stream->curr_buf = stream->next_buf;
1968 				stream->next_buf = NULL;
1969 			}
1970 		} else {
1971 			v4l2_dbg(3, debug, v4l2_dev,
1972 				 "%s: next_buf NULL, skip vb_done\n", __func__);
1973 		}
1974 	}
1975 
1976 DMA_IDLE_OUT:
1977 	*handled = true;
1978 }
1979 
line_flag_int_handler(struct snps_hdmirx_dev * hdmirx_dev,bool * handled)1980 static void line_flag_int_handler(struct snps_hdmirx_dev *hdmirx_dev,
1981 				  bool *handled)
1982 {
1983 	struct hdmirx_stream *stream = &hdmirx_dev->stream;
1984 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
1985 	struct v4l2_dv_timings timings = hdmirx_dev->timings;
1986 	struct v4l2_bt_timings *bt = &timings.bt;
1987 	u32 dma_cfg6;
1988 
1989 	stream->line_flag_int_cnt++;
1990 	if (!(stream->irq_stat) && !(stream->irq_stat & HDMIRX_DMA_IDLE_INT))
1991 		v4l2_dbg(1, debug, v4l2_dev,
1992 			 "%s: last have no dma_idle_irq\n", __func__);
1993 	dma_cfg6 = hdmirx_readl(hdmirx_dev, DMA_CONFIG6);
1994 	if (!(dma_cfg6 & HDMIRX_DMA_EN)) {
1995 		v4l2_dbg(2, debug, v4l2_dev, "%s: dma not on\n", __func__);
1996 		goto LINE_FLAG_OUT;
1997 	}
1998 
1999 	if (stream->line_flag_int_cnt <= FILTER_FRAME_CNT)
2000 		goto LINE_FLAG_OUT;
2001 
2002 	if (bt->interlaced != V4L2_DV_INTERLACED ||
2003 	    !(stream->line_flag_int_cnt % 2)) {
2004 		if (!stream->next_buf) {
2005 			spin_lock(&stream->vbq_lock);
2006 			if (!list_empty(&stream->buf_head)) {
2007 				stream->next_buf = list_first_entry(&stream->buf_head,
2008 								    struct hdmirx_buffer,
2009 								    queue);
2010 				list_del(&stream->next_buf->queue);
2011 			} else {
2012 				stream->next_buf = NULL;
2013 			}
2014 			spin_unlock(&stream->vbq_lock);
2015 
2016 			if (stream->next_buf) {
2017 				hdmirx_writel(hdmirx_dev, DMA_CONFIG2,
2018 					      stream->next_buf->buff_addr[HDMIRX_PLANE_Y]);
2019 				hdmirx_writel(hdmirx_dev, DMA_CONFIG3,
2020 					      stream->next_buf->buff_addr[HDMIRX_PLANE_CBCR]);
2021 			} else {
2022 				v4l2_dbg(3, debug, v4l2_dev,
2023 					 "%s: no buffer is available\n", __func__);
2024 			}
2025 		}
2026 	} else {
2027 		v4l2_dbg(3, debug, v4l2_dev, "%s: interlace:%d, line_flag_int_cnt:%d\n",
2028 			 __func__, bt->interlaced, stream->line_flag_int_cnt);
2029 	}
2030 
2031 LINE_FLAG_OUT:
2032 	*handled = true;
2033 }
2034 
hdmirx_dma_irq_handler(int irq,void * dev_id)2035 static irqreturn_t hdmirx_dma_irq_handler(int irq, void *dev_id)
2036 {
2037 	struct snps_hdmirx_dev *hdmirx_dev = dev_id;
2038 	struct hdmirx_stream *stream = &hdmirx_dev->stream;
2039 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
2040 	u32 dma_stat1, dma_stat13;
2041 	bool handled = false;
2042 
2043 	dma_stat1 = hdmirx_readl(hdmirx_dev, DMA_STATUS1);
2044 	dma_stat13 = hdmirx_readl(hdmirx_dev, DMA_STATUS13);
2045 	v4l2_dbg(3, debug, v4l2_dev, "dma_irq st1:%#x, st13:%d\n",
2046 		 dma_stat1, dma_stat13);
2047 
2048 	if (READ_ONCE(stream->stopping)) {
2049 		v4l2_dbg(1, debug, v4l2_dev, "%s: stop stream\n", __func__);
2050 		hdmirx_writel(hdmirx_dev, DMA_CONFIG5, 0xffffffff);
2051 		hdmirx_update_bits(hdmirx_dev, DMA_CONFIG4,
2052 				   LINE_FLAG_INT_EN |
2053 				   HDMIRX_DMA_IDLE_INT |
2054 				   HDMIRX_LOCK_DISABLE_INT |
2055 				   LAST_FRAME_AXI_UNFINISH_INT_EN |
2056 				   FIFO_OVERFLOW_INT_EN |
2057 				   FIFO_UNDERFLOW_INT_EN |
2058 				   HDMIRX_AXI_ERROR_INT_EN, 0);
2059 		WRITE_ONCE(stream->stopping, false);
2060 		wake_up(&stream->wq_stopped);
2061 		return IRQ_HANDLED;
2062 	}
2063 
2064 	if (dma_stat1 & HDMIRX_DMA_IDLE_INT)
2065 		dma_idle_int_handler(hdmirx_dev, &handled);
2066 
2067 	if (dma_stat1 & LINE_FLAG_INT_EN)
2068 		line_flag_int_handler(hdmirx_dev, &handled);
2069 
2070 	if (!handled)
2071 		v4l2_dbg(3, debug, v4l2_dev,
2072 			 "%s: dma irq not handled, dma_stat1:%#x\n",
2073 			 __func__, dma_stat1);
2074 
2075 	stream->irq_stat = dma_stat1;
2076 	hdmirx_writel(hdmirx_dev, DMA_CONFIG5, 0xffffffff);
2077 
2078 	return IRQ_HANDLED;
2079 }
2080 
hdmirx_wait_signal_lock(struct snps_hdmirx_dev * hdmirx_dev)2081 static int hdmirx_wait_signal_lock(struct snps_hdmirx_dev *hdmirx_dev)
2082 {
2083 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
2084 	u32 mu_status, scdc_status, dma_st10, cmu_st;
2085 	u32 i;
2086 
2087 	for (i = 0; i < 300; i++) {
2088 		mu_status = hdmirx_readl(hdmirx_dev, MAINUNIT_STATUS);
2089 		scdc_status = hdmirx_readl(hdmirx_dev, SCDC_REGBANK_STATUS3);
2090 		dma_st10 = hdmirx_readl(hdmirx_dev, DMA_STATUS10);
2091 		cmu_st = hdmirx_readl(hdmirx_dev, CMU_STATUS);
2092 
2093 		if ((mu_status & TMDSVALID_STABLE_ST) &&
2094 		    (dma_st10 & HDMIRX_LOCK) &&
2095 		    (cmu_st & TMDSQPCLK_LOCKED_ST))
2096 			break;
2097 
2098 		if (!tx_5v_power_present(hdmirx_dev)) {
2099 			v4l2_dbg(1, debug, v4l2_dev,
2100 				 "%s: HDMI pull out, return\n", __func__);
2101 			return -1;
2102 		}
2103 
2104 		hdmirx_tmds_clk_ratio_config(hdmirx_dev);
2105 	}
2106 
2107 	if (i == 300) {
2108 		v4l2_err(v4l2_dev, "%s: signal not lock, tmds_clk_ratio:%d\n",
2109 			 __func__, hdmirx_dev->tmds_clk_ratio);
2110 		v4l2_err(v4l2_dev, "%s: mu_st:%#x, scdc_st:%#x, dma_st10:%#x\n",
2111 			 __func__, mu_status, scdc_status, dma_st10);
2112 		return -1;
2113 	}
2114 
2115 	v4l2_dbg(1, debug, v4l2_dev, "%s: signal lock ok, i:%d\n", __func__, i);
2116 	hdmirx_writel(hdmirx_dev, GLOBAL_SWRESET_REQUEST, DATAPATH_SWRESETREQ);
2117 
2118 	reinit_completion(&hdmirx_dev->avi_pkt_rcv);
2119 	hdmirx_clear_interrupt(hdmirx_dev, PKT_2_INT_CLEAR, 0xffffffff);
2120 	hdmirx_update_bits(hdmirx_dev, PKT_2_INT_MASK_N,
2121 			   PKTDEC_AVIIF_RCV_IRQ, PKTDEC_AVIIF_RCV_IRQ);
2122 
2123 	if (!wait_for_completion_timeout(&hdmirx_dev->avi_pkt_rcv,
2124 					 msecs_to_jiffies(300))) {
2125 		v4l2_err(v4l2_dev, "%s wait avi_pkt_rcv failed\n", __func__);
2126 		hdmirx_update_bits(hdmirx_dev, PKT_2_INT_MASK_N,
2127 				   PKTDEC_AVIIF_RCV_IRQ, 0);
2128 	}
2129 
2130 	msleep(50);
2131 	hdmirx_format_change(hdmirx_dev);
2132 
2133 	return 0;
2134 }
2135 
hdmirx_plugin(struct snps_hdmirx_dev * hdmirx_dev)2136 static void hdmirx_plugin(struct snps_hdmirx_dev *hdmirx_dev)
2137 {
2138 	if (hdmirx_dev->plugged)
2139 		return;
2140 
2141 	hdmirx_submodule_init(hdmirx_dev);
2142 	hdmirx_update_bits(hdmirx_dev, SCDC_CONFIG, POWERPROVIDED,
2143 			   POWERPROVIDED);
2144 	hdmirx_phy_config(hdmirx_dev);
2145 	hdmirx_interrupts_setup(hdmirx_dev, true);
2146 
2147 	hdmirx_dev->plugged = true;
2148 }
2149 
hdmirx_delayed_work_hotplug(struct work_struct * work)2150 static void hdmirx_delayed_work_hotplug(struct work_struct *work)
2151 {
2152 	struct snps_hdmirx_dev *hdmirx_dev;
2153 	bool plugin;
2154 
2155 	hdmirx_dev = container_of(work, struct snps_hdmirx_dev,
2156 				  delayed_work_hotplug.work);
2157 
2158 	mutex_lock(&hdmirx_dev->work_lock);
2159 	plugin = tx_5v_power_present(hdmirx_dev);
2160 	v4l2_ctrl_s_ctrl(hdmirx_dev->detect_tx_5v_ctrl, plugin);
2161 	v4l2_dbg(1, debug, &hdmirx_dev->v4l2_dev, "%s: plugin:%d\n",
2162 		 __func__, plugin);
2163 
2164 	hdmirx_plugout(hdmirx_dev);
2165 
2166 	if (plugin)
2167 		hdmirx_plugin(hdmirx_dev);
2168 
2169 	mutex_unlock(&hdmirx_dev->work_lock);
2170 }
2171 
hdmirx_delayed_work_res_change(struct work_struct * work)2172 static void hdmirx_delayed_work_res_change(struct work_struct *work)
2173 {
2174 	struct snps_hdmirx_dev *hdmirx_dev;
2175 	bool plugin;
2176 
2177 	hdmirx_dev = container_of(work, struct snps_hdmirx_dev,
2178 				  delayed_work_res_change.work);
2179 
2180 	mutex_lock(&hdmirx_dev->work_lock);
2181 	plugin = tx_5v_power_present(hdmirx_dev);
2182 	v4l2_dbg(1, debug, &hdmirx_dev->v4l2_dev, "%s: plugin:%d\n",
2183 		 __func__, plugin);
2184 	if (plugin) {
2185 		hdmirx_interrupts_setup(hdmirx_dev, false);
2186 		hdmirx_submodule_init(hdmirx_dev);
2187 		hdmirx_update_bits(hdmirx_dev, SCDC_CONFIG, POWERPROVIDED,
2188 				   POWERPROVIDED);
2189 		hdmirx_phy_config(hdmirx_dev);
2190 
2191 		if (hdmirx_wait_signal_lock(hdmirx_dev)) {
2192 			hdmirx_plugout(hdmirx_dev);
2193 			queue_delayed_work(system_unbound_wq,
2194 					   &hdmirx_dev->delayed_work_hotplug,
2195 					   msecs_to_jiffies(200));
2196 		} else {
2197 			hdmirx_dma_config(hdmirx_dev);
2198 			hdmirx_interrupts_setup(hdmirx_dev, true);
2199 		}
2200 	}
2201 	mutex_unlock(&hdmirx_dev->work_lock);
2202 }
2203 
hdmirx_5v_det_irq_handler(int irq,void * dev_id)2204 static irqreturn_t hdmirx_5v_det_irq_handler(int irq, void *dev_id)
2205 {
2206 	struct snps_hdmirx_dev *hdmirx_dev = dev_id;
2207 	u32 val;
2208 
2209 	val = gpiod_get_value(hdmirx_dev->detect_5v_gpio);
2210 	v4l2_dbg(3, debug, &hdmirx_dev->v4l2_dev, "%s: 5v:%d\n", __func__, val);
2211 
2212 	queue_delayed_work(system_unbound_wq,
2213 			   &hdmirx_dev->delayed_work_hotplug,
2214 			   msecs_to_jiffies(10));
2215 
2216 	return IRQ_HANDLED;
2217 }
2218 
2219 static const struct hdmirx_cec_ops hdmirx_cec_ops = {
2220 	.write = hdmirx_writel,
2221 	.read = hdmirx_readl,
2222 };
2223 
devm_hdmirx_of_reserved_mem_device_release(void * dev)2224 static void devm_hdmirx_of_reserved_mem_device_release(void *dev)
2225 {
2226 	of_reserved_mem_device_release(dev);
2227 }
2228 
hdmirx_parse_dt(struct snps_hdmirx_dev * hdmirx_dev)2229 static int hdmirx_parse_dt(struct snps_hdmirx_dev *hdmirx_dev)
2230 {
2231 	struct device *dev = hdmirx_dev->dev;
2232 	int ret;
2233 
2234 	hdmirx_dev->num_clks = devm_clk_bulk_get_all(dev, &hdmirx_dev->clks);
2235 	if (hdmirx_dev->num_clks < 1)
2236 		return -ENODEV;
2237 
2238 	hdmirx_dev->resets[HDMIRX_RST_A].id = "axi";
2239 	hdmirx_dev->resets[HDMIRX_RST_P].id = "apb";
2240 	hdmirx_dev->resets[HDMIRX_RST_REF].id = "ref";
2241 	hdmirx_dev->resets[HDMIRX_RST_BIU].id = "biu";
2242 
2243 	ret = devm_reset_control_bulk_get_exclusive(dev, HDMIRX_NUM_RST,
2244 						    hdmirx_dev->resets);
2245 	if (ret < 0) {
2246 		dev_err(dev, "failed to get reset controls\n");
2247 		return ret;
2248 	}
2249 
2250 	hdmirx_dev->detect_5v_gpio =
2251 		devm_gpiod_get_optional(dev, "hpd", GPIOD_IN);
2252 
2253 	if (IS_ERR(hdmirx_dev->detect_5v_gpio)) {
2254 		dev_err(dev, "failed to get hdmirx hot plug detection gpio\n");
2255 		return PTR_ERR(hdmirx_dev->detect_5v_gpio);
2256 	}
2257 
2258 	hdmirx_dev->grf = syscon_regmap_lookup_by_phandle(dev->of_node,
2259 							  "rockchip,grf");
2260 	if (IS_ERR(hdmirx_dev->grf)) {
2261 		dev_err(dev, "failed to get rockchip,grf\n");
2262 		return PTR_ERR(hdmirx_dev->grf);
2263 	}
2264 
2265 	hdmirx_dev->vo1_grf = syscon_regmap_lookup_by_phandle(dev->of_node,
2266 							      "rockchip,vo1-grf");
2267 	if (IS_ERR(hdmirx_dev->vo1_grf)) {
2268 		dev_err(dev, "failed to get rockchip,vo1-grf\n");
2269 		return PTR_ERR(hdmirx_dev->vo1_grf);
2270 	}
2271 
2272 	if (!device_property_read_bool(dev, "hpd-is-active-low"))
2273 		hdmirx_dev->hpd_trigger_level_high = true;
2274 
2275 	ret = of_reserved_mem_device_init(dev);
2276 	if (ret) {
2277 		dev_warn(dev, "no reserved memory for HDMIRX, use default CMA\n");
2278 	} else {
2279 		ret = devm_add_action_or_reset(dev,
2280 					       devm_hdmirx_of_reserved_mem_device_release,
2281 					       dev);
2282 		if (ret)
2283 			return ret;
2284 	}
2285 
2286 	return 0;
2287 }
2288 
hdmirx_disable_all_interrupts(struct snps_hdmirx_dev * hdmirx_dev)2289 static void hdmirx_disable_all_interrupts(struct snps_hdmirx_dev *hdmirx_dev)
2290 {
2291 	hdmirx_writel(hdmirx_dev, MAINUNIT_0_INT_MASK_N, 0);
2292 	hdmirx_writel(hdmirx_dev, MAINUNIT_1_INT_MASK_N, 0);
2293 	hdmirx_writel(hdmirx_dev, MAINUNIT_2_INT_MASK_N, 0);
2294 	hdmirx_writel(hdmirx_dev, AVPUNIT_0_INT_MASK_N, 0);
2295 	hdmirx_writel(hdmirx_dev, AVPUNIT_1_INT_MASK_N, 0);
2296 	hdmirx_writel(hdmirx_dev, PKT_0_INT_MASK_N, 0);
2297 	hdmirx_writel(hdmirx_dev, PKT_1_INT_MASK_N, 0);
2298 	hdmirx_writel(hdmirx_dev, PKT_2_INT_MASK_N, 0);
2299 	hdmirx_writel(hdmirx_dev, SCDC_INT_MASK_N, 0);
2300 	hdmirx_writel(hdmirx_dev, CEC_INT_MASK_N, 0);
2301 
2302 	hdmirx_clear_interrupt(hdmirx_dev, MAINUNIT_0_INT_CLEAR, 0xffffffff);
2303 	hdmirx_clear_interrupt(hdmirx_dev, MAINUNIT_1_INT_CLEAR, 0xffffffff);
2304 	hdmirx_clear_interrupt(hdmirx_dev, MAINUNIT_2_INT_CLEAR, 0xffffffff);
2305 	hdmirx_clear_interrupt(hdmirx_dev, AVPUNIT_0_INT_CLEAR, 0xffffffff);
2306 	hdmirx_clear_interrupt(hdmirx_dev, AVPUNIT_1_INT_CLEAR, 0xffffffff);
2307 	hdmirx_clear_interrupt(hdmirx_dev, PKT_0_INT_CLEAR, 0xffffffff);
2308 	hdmirx_clear_interrupt(hdmirx_dev, PKT_1_INT_CLEAR, 0xffffffff);
2309 	hdmirx_clear_interrupt(hdmirx_dev, PKT_2_INT_CLEAR, 0xffffffff);
2310 	hdmirx_clear_interrupt(hdmirx_dev, SCDC_INT_CLEAR, 0xffffffff);
2311 	hdmirx_clear_interrupt(hdmirx_dev, HDCP_INT_CLEAR, 0xffffffff);
2312 	hdmirx_clear_interrupt(hdmirx_dev, HDCP_1_INT_CLEAR, 0xffffffff);
2313 	hdmirx_clear_interrupt(hdmirx_dev, CEC_INT_CLEAR, 0xffffffff);
2314 }
2315 
hdmirx_init(struct snps_hdmirx_dev * hdmirx_dev)2316 static void hdmirx_init(struct snps_hdmirx_dev *hdmirx_dev)
2317 {
2318 	hdmirx_update_bits(hdmirx_dev, PHY_CONFIG, PHY_RESET | PHY_PDDQ, 0);
2319 
2320 	regmap_write(hdmirx_dev->vo1_grf, VO1_GRF_VO1_CON2,
2321 		     (HDMIRX_SDAIN_MSK | HDMIRX_SCLIN_MSK) |
2322 		     ((HDMIRX_SDAIN_MSK | HDMIRX_SCLIN_MSK) << 16));
2323 	/*
2324 	 * Some interrupts are enabled by default, so we disable
2325 	 * all interrupts and clear interrupts status first.
2326 	 */
2327 	hdmirx_disable_all_interrupts(hdmirx_dev);
2328 }
2329 
2330 /* hdmi-4k-300mhz EDID produced by v4l2-ctl tool */
2331 static u8 __maybe_unused edid_default[] = {
2332 	0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00,
2333 	0x31, 0xd8, 0x34, 0x12, 0x00, 0x00, 0x00, 0x00,
2334 	0x22, 0x1a, 0x01, 0x03, 0x80, 0x60, 0x36, 0x78,
2335 	0x0f, 0xee, 0x91, 0xa3, 0x54, 0x4c, 0x99, 0x26,
2336 	0x0f, 0x50, 0x54, 0x2f, 0xcf, 0x00, 0x31, 0x59,
2337 	0x45, 0x59, 0x81, 0x80, 0x81, 0x40, 0x90, 0x40,
2338 	0x95, 0x00, 0xa9, 0x40, 0xb3, 0x00, 0x04, 0x74,
2339 	0x00, 0x30, 0xf2, 0x70, 0x5a, 0x80, 0xb0, 0x58,
2340 	0x8a, 0x00, 0xc0, 0x1c, 0x32, 0x00, 0x00, 0x1e,
2341 	0x00, 0x00, 0x00, 0xfd, 0x00, 0x18, 0x55, 0x18,
2342 	0x87, 0x1e, 0x00, 0x0a, 0x20, 0x20, 0x20, 0x20,
2343 	0x20, 0x20, 0x00, 0x00, 0x00, 0xfc, 0x00, 0x68,
2344 	0x64, 0x6d, 0x69, 0x2d, 0x34, 0x6b, 0x2d, 0x33,
2345 	0x30, 0x30, 0x0a, 0x20, 0x00, 0x00, 0x00, 0x10,
2346 	0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2347 	0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0xc5,
2348 
2349 	0x02, 0x03, 0x40, 0xf1, 0x4f, 0x5f, 0x5e, 0x5d,
2350 	0x10, 0x1f, 0x04, 0x13, 0x22, 0x21, 0x20, 0x05,
2351 	0x14, 0x02, 0x11, 0x01, 0x23, 0x09, 0x07, 0x07,
2352 	0x83, 0x01, 0x00, 0x00, 0x6d, 0x03, 0x0c, 0x00,
2353 	0x10, 0x00, 0x00, 0x3c, 0x21, 0x00, 0x60, 0x01,
2354 	0x02, 0x03, 0x67, 0xd8, 0x5d, 0xc4, 0x01, 0x00,
2355 	0x00, 0x00, 0xe2, 0x00, 0xca, 0xe3, 0x05, 0x00,
2356 	0x00, 0xe3, 0x06, 0x01, 0x00, 0xe2, 0x0d, 0x5f,
2357 	0xa3, 0x66, 0x00, 0xa0, 0xf0, 0x70, 0x1f, 0x80,
2358 	0x30, 0x20, 0x35, 0x00, 0xc0, 0x1c, 0x32, 0x00,
2359 	0x00, 0x1e, 0x1a, 0x36, 0x80, 0xa0, 0x70, 0x38,
2360 	0x1f, 0x40, 0x30, 0x20, 0x35, 0x00, 0xc0, 0x1c,
2361 	0x32, 0x00, 0x00, 0x1a, 0x1a, 0x1d, 0x00, 0x80,
2362 	0x51, 0xd0, 0x1c, 0x20, 0x40, 0x80, 0x35, 0x00,
2363 	0xc0, 0x1c, 0x32, 0x00, 0x00, 0x1c, 0x00, 0x00,
2364 	0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xa1,
2365 };
2366 
hdmirx_load_default_edid(struct snps_hdmirx_dev * hdmirx_dev)2367 static void hdmirx_load_default_edid(struct snps_hdmirx_dev *hdmirx_dev)
2368 {
2369 	struct v4l2_edid def_edid = {};
2370 
2371 	hdmirx_hpd_ctrl(hdmirx_dev, false);
2372 
2373 	if (!IS_ENABLED(CONFIG_VIDEO_SYNOPSYS_HDMIRX_LOAD_DEFAULT_EDID))
2374 		return;
2375 
2376 	/* disable hpd and write edid */
2377 	def_edid.blocks = sizeof(edid_default) / EDID_BLOCK_SIZE;
2378 	def_edid.edid = edid_default;
2379 
2380 	hdmirx_write_edid(hdmirx_dev, &def_edid);
2381 	hdmirx_hpd_ctrl(hdmirx_dev, true);
2382 }
2383 
hdmirx_disable(struct device * dev)2384 static int hdmirx_disable(struct device *dev)
2385 {
2386 	struct snps_hdmirx_dev *hdmirx_dev = dev_get_drvdata(dev);
2387 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
2388 
2389 	hdmirx_plugout(hdmirx_dev);
2390 	hdmirx_hpd_ctrl(hdmirx_dev, false);
2391 
2392 	clk_bulk_disable_unprepare(hdmirx_dev->num_clks, hdmirx_dev->clks);
2393 
2394 	v4l2_dbg(2, debug, v4l2_dev, "%s: suspend\n", __func__);
2395 
2396 	return pinctrl_pm_select_sleep_state(dev);
2397 }
2398 
hdmirx_enable(struct device * dev)2399 static int hdmirx_enable(struct device *dev)
2400 {
2401 	struct snps_hdmirx_dev *hdmirx_dev = dev_get_drvdata(dev);
2402 	struct v4l2_device *v4l2_dev = &hdmirx_dev->v4l2_dev;
2403 	int ret;
2404 
2405 	v4l2_dbg(2, debug, v4l2_dev, "%s: resume\n", __func__);
2406 	ret = pinctrl_pm_select_default_state(dev);
2407 	if (ret < 0)
2408 		return ret;
2409 
2410 	ret = clk_bulk_prepare_enable(hdmirx_dev->num_clks, hdmirx_dev->clks);
2411 	if (ret) {
2412 		dev_err(dev, "failed to enable hdmirx bulk clks: %d\n", ret);
2413 		return ret;
2414 	}
2415 
2416 	reset_control_bulk_assert(HDMIRX_NUM_RST, hdmirx_dev->resets);
2417 	usleep_range(150, 160);
2418 	reset_control_bulk_deassert(HDMIRX_NUM_RST, hdmirx_dev->resets);
2419 	usleep_range(150, 160);
2420 
2421 	return 0;
2422 }
2423 
hdmirx_disable_irq(struct device * dev)2424 static void hdmirx_disable_irq(struct device *dev)
2425 {
2426 	struct snps_hdmirx_dev *hdmirx_dev = dev_get_drvdata(dev);
2427 
2428 	disable_irq(hdmirx_dev->det_irq);
2429 	disable_irq(hdmirx_dev->dma_irq);
2430 	disable_irq(hdmirx_dev->hdmi_irq);
2431 
2432 	cancel_delayed_work_sync(&hdmirx_dev->delayed_work_hotplug);
2433 	cancel_delayed_work_sync(&hdmirx_dev->delayed_work_res_change);
2434 }
2435 
hdmirx_enable_irq(struct device * dev)2436 static void hdmirx_enable_irq(struct device *dev)
2437 {
2438 	struct snps_hdmirx_dev *hdmirx_dev = dev_get_drvdata(dev);
2439 
2440 	enable_irq(hdmirx_dev->hdmi_irq);
2441 	enable_irq(hdmirx_dev->dma_irq);
2442 	enable_irq(hdmirx_dev->det_irq);
2443 
2444 	queue_delayed_work(system_unbound_wq,
2445 			   &hdmirx_dev->delayed_work_hotplug,
2446 			   msecs_to_jiffies(110));
2447 }
2448 
hdmirx_suspend(struct device * dev)2449 static __maybe_unused int hdmirx_suspend(struct device *dev)
2450 {
2451 	struct snps_hdmirx_dev *hdmirx_dev = dev_get_drvdata(dev);
2452 
2453 	hdmirx_disable_irq(dev);
2454 
2455 	/* TODO store CEC HW state */
2456 	disable_irq(hdmirx_dev->cec->irq);
2457 
2458 	return hdmirx_disable(dev);
2459 }
2460 
hdmirx_resume(struct device * dev)2461 static __maybe_unused int hdmirx_resume(struct device *dev)
2462 {
2463 	struct snps_hdmirx_dev *hdmirx_dev = dev_get_drvdata(dev);
2464 	int ret = hdmirx_enable(dev);
2465 
2466 	if (ret)
2467 		return ret;
2468 
2469 	if (hdmirx_dev->edid_blocks_written) {
2470 		hdmirx_write_edid_data(hdmirx_dev, hdmirx_dev->edid,
2471 				       hdmirx_dev->edid_blocks_written);
2472 		hdmirx_hpd_ctrl(hdmirx_dev, true);
2473 	}
2474 
2475 	/* TODO restore CEC HW state */
2476 	enable_irq(hdmirx_dev->cec->irq);
2477 
2478 	hdmirx_enable_irq(dev);
2479 
2480 	return 0;
2481 }
2482 
2483 static const struct dev_pm_ops snps_hdmirx_pm_ops = {
2484 	SET_SYSTEM_SLEEP_PM_OPS(hdmirx_suspend, hdmirx_resume)
2485 };
2486 
hdmirx_setup_irq(struct snps_hdmirx_dev * hdmirx_dev,struct platform_device * pdev)2487 static int hdmirx_setup_irq(struct snps_hdmirx_dev *hdmirx_dev,
2488 			    struct platform_device *pdev)
2489 {
2490 	struct device *dev = hdmirx_dev->dev;
2491 	int ret, irq;
2492 
2493 	irq = platform_get_irq_byname(pdev, "hdmi");
2494 	if (irq < 0) {
2495 		dev_err_probe(dev, irq, "failed to get hdmi irq\n");
2496 		return irq;
2497 	}
2498 
2499 	irq_set_status_flags(irq, IRQ_NOAUTOEN);
2500 
2501 	hdmirx_dev->hdmi_irq = irq;
2502 	ret = devm_request_irq(dev, irq, hdmirx_hdmi_irq_handler, 0,
2503 			       "rk_hdmirx-hdmi", hdmirx_dev);
2504 	if (ret) {
2505 		dev_err_probe(dev, ret, "failed to request hdmi irq\n");
2506 		return ret;
2507 	}
2508 
2509 	irq = platform_get_irq_byname(pdev, "dma");
2510 	if (irq < 0) {
2511 		dev_err_probe(dev, irq, "failed to get dma irq\n");
2512 		return irq;
2513 	}
2514 
2515 	irq_set_status_flags(irq, IRQ_NOAUTOEN);
2516 
2517 	hdmirx_dev->dma_irq = irq;
2518 	ret = devm_request_threaded_irq(dev, irq, NULL, hdmirx_dma_irq_handler,
2519 					IRQF_ONESHOT, "rk_hdmirx-dma",
2520 					hdmirx_dev);
2521 	if (ret) {
2522 		dev_err_probe(dev, ret, "failed to request dma irq\n");
2523 		return ret;
2524 	}
2525 
2526 	irq = gpiod_to_irq(hdmirx_dev->detect_5v_gpio);
2527 	if (irq < 0) {
2528 		dev_err_probe(dev, irq, "failed to get hdmirx-5v irq\n");
2529 		return irq;
2530 	}
2531 
2532 	irq_set_status_flags(irq, IRQ_NOAUTOEN);
2533 
2534 	hdmirx_dev->det_irq = irq;
2535 	ret = devm_request_irq(dev, irq, hdmirx_5v_det_irq_handler,
2536 			       IRQF_TRIGGER_FALLING | IRQF_TRIGGER_RISING,
2537 			       "rk_hdmirx-5v", hdmirx_dev);
2538 	if (ret) {
2539 		dev_err_probe(dev, ret, "failed to request hdmirx-5v irq\n");
2540 		return ret;
2541 	}
2542 
2543 	return 0;
2544 }
2545 
hdmirx_register_cec(struct snps_hdmirx_dev * hdmirx_dev,struct platform_device * pdev)2546 static int hdmirx_register_cec(struct snps_hdmirx_dev *hdmirx_dev,
2547 			       struct platform_device *pdev)
2548 {
2549 	struct device *dev = hdmirx_dev->dev;
2550 	struct hdmirx_cec_data cec_data;
2551 	int irq;
2552 
2553 	irq = platform_get_irq_byname(pdev, "cec");
2554 	if (irq < 0) {
2555 		dev_err_probe(dev, irq, "failed to get cec irq\n");
2556 		return irq;
2557 	}
2558 
2559 	cec_data.hdmirx = hdmirx_dev;
2560 	cec_data.dev = hdmirx_dev->dev;
2561 	cec_data.ops = &hdmirx_cec_ops;
2562 	cec_data.irq = irq;
2563 
2564 	hdmirx_dev->cec = snps_hdmirx_cec_register(&cec_data);
2565 	if (IS_ERR(hdmirx_dev->cec))
2566 		return dev_err_probe(dev, PTR_ERR(hdmirx_dev->cec),
2567 				     "failed to register cec\n");
2568 
2569 	return 0;
2570 }
2571 
hdmirx_probe(struct platform_device * pdev)2572 static int hdmirx_probe(struct platform_device *pdev)
2573 {
2574 	struct snps_hdmirx_dev *hdmirx_dev;
2575 	struct device *dev = &pdev->dev;
2576 	struct v4l2_ctrl_handler *hdl;
2577 	struct hdmirx_stream *stream;
2578 	struct v4l2_device *v4l2_dev;
2579 	int ret;
2580 
2581 	hdmirx_dev = devm_kzalloc(dev, sizeof(*hdmirx_dev), GFP_KERNEL);
2582 	if (!hdmirx_dev)
2583 		return -ENOMEM;
2584 
2585 	/*
2586 	 * RK3588 HDMIRX SoC integration doesn't use IOMMU and can
2587 	 * address only first 32bit of the physical address space.
2588 	 */
2589 	ret = dma_set_mask_and_coherent(dev, DMA_BIT_MASK(32));
2590 	if (ret)
2591 		return ret;
2592 
2593 	hdmirx_dev->dev = dev;
2594 	dev_set_drvdata(dev, hdmirx_dev);
2595 
2596 	ret = hdmirx_parse_dt(hdmirx_dev);
2597 	if (ret)
2598 		return ret;
2599 
2600 	ret = hdmirx_setup_irq(hdmirx_dev, pdev);
2601 	if (ret)
2602 		return ret;
2603 
2604 	hdmirx_dev->regs = devm_platform_ioremap_resource(pdev, 0);
2605 	if (IS_ERR(hdmirx_dev->regs))
2606 		return dev_err_probe(dev, PTR_ERR(hdmirx_dev->regs),
2607 				     "failed to remap regs resource\n");
2608 
2609 	mutex_init(&hdmirx_dev->stream_lock);
2610 	mutex_init(&hdmirx_dev->work_lock);
2611 	spin_lock_init(&hdmirx_dev->rst_lock);
2612 
2613 	init_completion(&hdmirx_dev->cr_write_done);
2614 	init_completion(&hdmirx_dev->timer_base_lock);
2615 	init_completion(&hdmirx_dev->avi_pkt_rcv);
2616 
2617 	INIT_DELAYED_WORK(&hdmirx_dev->delayed_work_hotplug,
2618 			  hdmirx_delayed_work_hotplug);
2619 	INIT_DELAYED_WORK(&hdmirx_dev->delayed_work_res_change,
2620 			  hdmirx_delayed_work_res_change);
2621 
2622 	hdmirx_dev->cur_fmt_fourcc = V4L2_PIX_FMT_BGR24;
2623 	hdmirx_dev->timings = cea640x480;
2624 
2625 	hdmirx_enable(dev);
2626 	hdmirx_init(hdmirx_dev);
2627 
2628 	v4l2_dev = &hdmirx_dev->v4l2_dev;
2629 	strscpy(v4l2_dev->name, dev_name(dev), sizeof(v4l2_dev->name));
2630 
2631 	hdl = &hdmirx_dev->hdl;
2632 	v4l2_ctrl_handler_init(hdl, 3);
2633 
2634 	hdmirx_dev->detect_tx_5v_ctrl = v4l2_ctrl_new_std(hdl, NULL,
2635 							  V4L2_CID_DV_RX_POWER_PRESENT,
2636 							  0, 1, 0, 0);
2637 
2638 	hdmirx_dev->rgb_range = v4l2_ctrl_new_std_menu(hdl, NULL,
2639 						       V4L2_CID_DV_RX_RGB_RANGE,
2640 						       V4L2_DV_RGB_RANGE_FULL, 0,
2641 						       V4L2_DV_RGB_RANGE_AUTO);
2642 
2643 	hdmirx_dev->rgb_range->flags |= V4L2_CTRL_FLAG_READ_ONLY;
2644 
2645 	hdmirx_dev->content_type =
2646 		v4l2_ctrl_new_std_menu(hdl, NULL, V4L2_CID_DV_RX_IT_CONTENT_TYPE,
2647 				       V4L2_DV_IT_CONTENT_TYPE_NO_ITC, 0,
2648 				       V4L2_DV_IT_CONTENT_TYPE_NO_ITC);
2649 
2650 	if (hdl->error) {
2651 		ret = hdl->error;
2652 		dev_err_probe(dev, ret, "v4l2 ctrl handler init failed\n");
2653 		goto err_pm;
2654 	}
2655 	hdmirx_dev->v4l2_dev.ctrl_handler = hdl;
2656 
2657 	ret = v4l2_device_register(dev, &hdmirx_dev->v4l2_dev);
2658 	if (ret < 0) {
2659 		dev_err_probe(dev, ret, "v4l2 device registration failed\n");
2660 		goto err_hdl;
2661 	}
2662 
2663 	stream = &hdmirx_dev->stream;
2664 	stream->hdmirx_dev = hdmirx_dev;
2665 	ret = hdmirx_register_stream_vdev(stream);
2666 	if (ret < 0) {
2667 		dev_err_probe(dev, ret, "video device registration failed\n");
2668 		goto err_unreg_v4l2_dev;
2669 	}
2670 
2671 	ret = hdmirx_register_cec(hdmirx_dev, pdev);
2672 	if (ret)
2673 		goto err_unreg_video_dev;
2674 
2675 	hdmirx_load_default_edid(hdmirx_dev);
2676 
2677 	hdmirx_enable_irq(dev);
2678 
2679 	hdmirx_dev->debugfs_dir = debugfs_create_dir(hdmirx_dev->v4l2_dev.name,
2680 						     v4l2_debugfs_root());
2681 
2682 	hdmirx_dev->infoframes = v4l2_debugfs_if_alloc(hdmirx_dev->debugfs_dir,
2683 						       V4L2_DEBUGFS_IF_AVI, hdmirx_dev,
2684 						       hdmirx_debugfs_if_read);
2685 
2686 	return 0;
2687 
2688 err_unreg_video_dev:
2689 	vb2_video_unregister_device(&hdmirx_dev->stream.vdev);
2690 err_unreg_v4l2_dev:
2691 	v4l2_device_unregister(&hdmirx_dev->v4l2_dev);
2692 err_hdl:
2693 	v4l2_ctrl_handler_free(&hdmirx_dev->hdl);
2694 err_pm:
2695 	hdmirx_disable(dev);
2696 
2697 	return ret;
2698 }
2699 
hdmirx_remove(struct platform_device * pdev)2700 static void hdmirx_remove(struct platform_device *pdev)
2701 {
2702 	struct device *dev = &pdev->dev;
2703 	struct snps_hdmirx_dev *hdmirx_dev = dev_get_drvdata(dev);
2704 
2705 	v4l2_debugfs_if_free(hdmirx_dev->infoframes);
2706 	debugfs_remove_recursive(hdmirx_dev->debugfs_dir);
2707 
2708 	snps_hdmirx_cec_unregister(hdmirx_dev->cec);
2709 
2710 	hdmirx_disable_irq(dev);
2711 
2712 	vb2_video_unregister_device(&hdmirx_dev->stream.vdev);
2713 	v4l2_ctrl_handler_free(&hdmirx_dev->hdl);
2714 	v4l2_device_unregister(&hdmirx_dev->v4l2_dev);
2715 
2716 	/* touched by hdmirx_disable()->hdmirx_plugout() */
2717 	hdmirx_dev->rgb_range = NULL;
2718 	hdmirx_dev->content_type = NULL;
2719 
2720 	hdmirx_disable(dev);
2721 
2722 	reset_control_bulk_assert(HDMIRX_NUM_RST, hdmirx_dev->resets);
2723 }
2724 
2725 static const struct of_device_id hdmirx_id[] = {
2726 	{ .compatible = "rockchip,rk3588-hdmirx-ctrler" },
2727 	{ }
2728 };
2729 MODULE_DEVICE_TABLE(of, hdmirx_id);
2730 
2731 static struct platform_driver hdmirx_driver = {
2732 	.probe = hdmirx_probe,
2733 	.remove = hdmirx_remove,
2734 	.driver = {
2735 		.name = "snps_hdmirx",
2736 		.of_match_table = hdmirx_id,
2737 		.pm = &snps_hdmirx_pm_ops,
2738 	}
2739 };
2740 module_platform_driver(hdmirx_driver);
2741 
2742 MODULE_DESCRIPTION("Synopsys HDMI Receiver Driver");
2743 MODULE_AUTHOR("Dingxian Wen <shawn.wen@rock-chips.com>");
2744 MODULE_AUTHOR("Shreeya Patel <shreeya.patel@collabora.com>");
2745 MODULE_AUTHOR("Dmitry Osipenko <dmitry.osipenko@collabora.com>");
2746 MODULE_LICENSE("GPL");
2747