1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * camss-vfe-4-7.c
4  *
5  * Qualcomm MSM Camera Subsystem - VFE (Video Front End) Module v4.7
6  *
7  * Copyright (c) 2013-2015, The Linux Foundation. All rights reserved.
8  * Copyright (C) 2015-2018 Linaro Ltd.
9  */
10 
11 #include <linux/device.h>
12 #include <linux/interrupt.h>
13 #include <linux/io.h>
14 #include <linux/iopoll.h>
15 
16 #include "camss.h"
17 #include "camss-vfe.h"
18 #include "camss-vfe-gen1.h"
19 
20 
21 #define VFE_0_GLOBAL_RESET_CMD		0x018
22 #define VFE_0_GLOBAL_RESET_CMD_CORE	BIT(0)
23 #define VFE_0_GLOBAL_RESET_CMD_CAMIF	BIT(1)
24 #define VFE_0_GLOBAL_RESET_CMD_BUS	BIT(2)
25 #define VFE_0_GLOBAL_RESET_CMD_BUS_BDG	BIT(3)
26 #define VFE_0_GLOBAL_RESET_CMD_REGISTER	BIT(4)
27 #define VFE_0_GLOBAL_RESET_CMD_PM	BIT(5)
28 #define VFE_0_GLOBAL_RESET_CMD_BUS_MISR	BIT(6)
29 #define VFE_0_GLOBAL_RESET_CMD_TESTGEN	BIT(7)
30 #define VFE_0_GLOBAL_RESET_CMD_DSP	BIT(8)
31 #define VFE_0_GLOBAL_RESET_CMD_IDLE_CGC	BIT(9)
32 
33 #define VFE_0_MODULE_LENS_EN		0x040
34 #define VFE_0_MODULE_LENS_EN_DEMUX		BIT(2)
35 #define VFE_0_MODULE_LENS_EN_CHROMA_UPSAMPLE	BIT(3)
36 
37 #define VFE_0_MODULE_ZOOM_EN		0x04c
38 #define VFE_0_MODULE_ZOOM_EN_SCALE_ENC		BIT(1)
39 #define VFE_0_MODULE_ZOOM_EN_CROP_ENC		BIT(2)
40 #define VFE_0_MODULE_ZOOM_EN_REALIGN_BUF	BIT(9)
41 
42 #define VFE_0_CORE_CFG			0x050
43 #define VFE_0_CORE_CFG_PIXEL_PATTERN_YCBYCR	0x4
44 #define VFE_0_CORE_CFG_PIXEL_PATTERN_YCRYCB	0x5
45 #define VFE_0_CORE_CFG_PIXEL_PATTERN_CBYCRY	0x6
46 #define VFE_0_CORE_CFG_PIXEL_PATTERN_CRYCBY	0x7
47 #define VFE_0_CORE_CFG_COMPOSITE_REG_UPDATE_EN	BIT(4)
48 
49 #define VFE_0_IRQ_CMD			0x058
50 #define VFE_0_IRQ_CMD_GLOBAL_CLEAR	BIT(0)
51 
52 #define VFE_0_IRQ_MASK_0		0x05c
53 #define VFE_0_IRQ_MASK_0_CAMIF_SOF			BIT(0)
54 #define VFE_0_IRQ_MASK_0_CAMIF_EOF			BIT(1)
55 #define VFE_0_IRQ_MASK_0_RDIn_REG_UPDATE(n)		BIT((n) + 5)
56 #define VFE_0_IRQ_MASK_0_line_n_REG_UPDATE(n)		\
57 	((n) == VFE_LINE_PIX ? BIT(4) : VFE_0_IRQ_MASK_0_RDIn_REG_UPDATE(n))
58 #define VFE_0_IRQ_MASK_0_IMAGE_MASTER_n_PING_PONG(n)	BIT((n) + 8)
59 #define VFE_0_IRQ_MASK_0_IMAGE_COMPOSITE_DONE_n(n)	BIT((n) + 25)
60 #define VFE_0_IRQ_MASK_0_RESET_ACK			BIT(31)
61 #define VFE_0_IRQ_MASK_1		0x060
62 #define VFE_0_IRQ_MASK_1_CAMIF_ERROR			BIT(0)
63 #define VFE_0_IRQ_MASK_1_VIOLATION			BIT(7)
64 #define VFE_0_IRQ_MASK_1_BUS_BDG_HALT_ACK		BIT(8)
65 #define VFE_0_IRQ_MASK_1_IMAGE_MASTER_n_BUS_OVERFLOW(n)	BIT((n) + 9)
66 #define VFE_0_IRQ_MASK_1_RDIn_SOF(n)			BIT((n) + 29)
67 
68 #define VFE_0_IRQ_CLEAR_0		0x064
69 #define VFE_0_IRQ_CLEAR_1		0x068
70 
71 #define VFE_0_IRQ_STATUS_0		0x06c
72 #define VFE_0_IRQ_STATUS_0_CAMIF_SOF			BIT(0)
73 #define VFE_0_IRQ_STATUS_0_RDIn_REG_UPDATE(n)		BIT((n) + 5)
74 #define VFE_0_IRQ_STATUS_0_line_n_REG_UPDATE(n)		\
75 	((n) == VFE_LINE_PIX ? BIT(4) : VFE_0_IRQ_STATUS_0_RDIn_REG_UPDATE(n))
76 #define VFE_0_IRQ_STATUS_0_IMAGE_MASTER_n_PING_PONG(n)	BIT((n) + 8)
77 #define VFE_0_IRQ_STATUS_0_IMAGE_COMPOSITE_DONE_n(n)	BIT((n) + 25)
78 #define VFE_0_IRQ_STATUS_0_RESET_ACK			BIT(31)
79 #define VFE_0_IRQ_STATUS_1		0x070
80 #define VFE_0_IRQ_STATUS_1_VIOLATION			BIT(7)
81 #define VFE_0_IRQ_STATUS_1_BUS_BDG_HALT_ACK		BIT(8)
82 #define VFE_0_IRQ_STATUS_1_RDIn_SOF(n)			BIT((n) + 29)
83 
84 #define VFE_0_IRQ_COMPOSITE_MASK_0	0x074
85 #define VFE_0_VIOLATION_STATUS		0x07c
86 
87 #define VFE_0_BUS_CMD			0x80
88 #define VFE_0_BUS_CMD_Mx_RLD_CMD(x)	BIT(x)
89 
90 #define VFE_0_BUS_CFG			0x084
91 
92 #define VFE_0_BUS_XBAR_CFG_x(x)		(0x90 + 0x4 * ((x) / 2))
93 #define VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_EN			BIT(2)
94 #define VFE_0_BUS_XBAR_CFG_x_M_REALIGN_BUF_EN			BIT(3)
95 #define VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_SWAP_INTRA		(0x1 << 4)
96 #define VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_SWAP_INTER		(0x2 << 4)
97 #define VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_SWAP_INTER_INTRA	(0x3 << 4)
98 #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT		8
99 #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_LUMA		0x0
100 #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI0	0xc
101 #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI1	0xd
102 #define VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI2	0xe
103 
104 #define VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(n)		(0x0a0 + 0x2c * (n))
105 #define VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_WR_PATH_SHIFT	0
106 #define VFE_0_BUS_IMAGE_MASTER_n_WR_PING_ADDR(n)	(0x0a4 + 0x2c * (n))
107 #define VFE_0_BUS_IMAGE_MASTER_n_WR_PONG_ADDR(n)	(0x0ac + 0x2c * (n))
108 #define VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(n)		(0x0b4 + 0x2c * (n))
109 #define VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_BASED_SHIFT	1
110 #define VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_SHIFT	2
111 #define VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_MASK	(0x1f << 2)
112 #define VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG(n)		(0x0b8 + 0x2c * (n))
113 #define VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG_OFFSET_SHIFT	16
114 #define VFE_0_BUS_IMAGE_MASTER_n_WR_IMAGE_SIZE(n)	(0x0bc + 0x2c * (n))
115 #define VFE_0_BUS_IMAGE_MASTER_n_WR_BUFFER_CFG(n)	(0x0c0 + 0x2c * (n))
116 #define VFE_0_BUS_IMAGE_MASTER_n_WR_FRAMEDROP_PATTERN(n)	\
117 							(0x0c4 + 0x2c * (n))
118 #define VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN(n)	\
119 							(0x0c8 + 0x2c * (n))
120 #define VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN_DEF	0xffffffff
121 
122 #define VFE_0_BUS_PING_PONG_STATUS	0x338
123 
124 #define VFE_0_BUS_BDG_CMD		0x400
125 #define VFE_0_BUS_BDG_CMD_HALT_REQ	1
126 
127 #define VFE_0_BUS_BDG_QOS_CFG_0		0x404
128 #define VFE_0_BUS_BDG_QOS_CFG_0_CFG	0xaaa9aaa9
129 #define VFE_0_BUS_BDG_QOS_CFG_1		0x408
130 #define VFE_0_BUS_BDG_QOS_CFG_2		0x40c
131 #define VFE_0_BUS_BDG_QOS_CFG_3		0x410
132 #define VFE_0_BUS_BDG_QOS_CFG_4		0x414
133 #define VFE_0_BUS_BDG_QOS_CFG_5		0x418
134 #define VFE_0_BUS_BDG_QOS_CFG_6		0x41c
135 #define VFE_0_BUS_BDG_QOS_CFG_7		0x420
136 #define VFE_0_BUS_BDG_QOS_CFG_7_CFG	0x0001aaa9
137 
138 #define VFE48_0_BUS_BDG_QOS_CFG_0_CFG	0xaaa5aaa5
139 #define VFE48_0_BUS_BDG_QOS_CFG_3_CFG	0xaa55aaa5
140 #define VFE48_0_BUS_BDG_QOS_CFG_4_CFG	0xaa55aa55
141 #define VFE48_0_BUS_BDG_QOS_CFG_7_CFG	0x0005aa55
142 
143 #define VFE_0_BUS_BDG_DS_CFG_0		0x424
144 #define VFE_0_BUS_BDG_DS_CFG_0_CFG	0xcccc0011
145 #define VFE_0_BUS_BDG_DS_CFG_1		0x428
146 #define VFE_0_BUS_BDG_DS_CFG_2		0x42c
147 #define VFE_0_BUS_BDG_DS_CFG_3		0x430
148 #define VFE_0_BUS_BDG_DS_CFG_4		0x434
149 #define VFE_0_BUS_BDG_DS_CFG_5		0x438
150 #define VFE_0_BUS_BDG_DS_CFG_6		0x43c
151 #define VFE_0_BUS_BDG_DS_CFG_7		0x440
152 #define VFE_0_BUS_BDG_DS_CFG_8		0x444
153 #define VFE_0_BUS_BDG_DS_CFG_9		0x448
154 #define VFE_0_BUS_BDG_DS_CFG_10		0x44c
155 #define VFE_0_BUS_BDG_DS_CFG_11		0x450
156 #define VFE_0_BUS_BDG_DS_CFG_12		0x454
157 #define VFE_0_BUS_BDG_DS_CFG_13		0x458
158 #define VFE_0_BUS_BDG_DS_CFG_14		0x45c
159 #define VFE_0_BUS_BDG_DS_CFG_15		0x460
160 #define VFE_0_BUS_BDG_DS_CFG_16		0x464
161 #define VFE_0_BUS_BDG_DS_CFG_16_CFG	0x40000103
162 
163 #define VFE48_0_BUS_BDG_DS_CFG_0_CFG	0xcccc1111
164 #define VFE48_0_BUS_BDG_DS_CFG_16_CFG	0x00000110
165 
166 #define VFE_0_RDI_CFG_x(x)		(0x46c + (0x4 * (x)))
167 #define VFE_0_RDI_CFG_x_RDI_STREAM_SEL_SHIFT	28
168 #define VFE_0_RDI_CFG_x_RDI_STREAM_SEL_MASK	(0xf << 28)
169 #define VFE_0_RDI_CFG_x_RDI_M0_SEL_SHIFT	4
170 #define VFE_0_RDI_CFG_x_RDI_M0_SEL_MASK		(0xf << 4)
171 #define VFE_0_RDI_CFG_x_RDI_EN_BIT		BIT(2)
172 #define VFE_0_RDI_CFG_x_MIPI_EN_BITS		0x3
173 
174 #define VFE_0_CAMIF_CMD				0x478
175 #define VFE_0_CAMIF_CMD_DISABLE_FRAME_BOUNDARY	0
176 #define VFE_0_CAMIF_CMD_ENABLE_FRAME_BOUNDARY	1
177 #define VFE_0_CAMIF_CMD_NO_CHANGE		3
178 #define VFE_0_CAMIF_CMD_CLEAR_CAMIF_STATUS	BIT(2)
179 #define VFE_0_CAMIF_CFG				0x47c
180 #define VFE_0_CAMIF_CFG_VFE_OUTPUT_EN		BIT(6)
181 #define VFE_0_CAMIF_FRAME_CFG			0x484
182 #define VFE_0_CAMIF_WINDOW_WIDTH_CFG		0x488
183 #define VFE_0_CAMIF_WINDOW_HEIGHT_CFG		0x48c
184 #define VFE_0_CAMIF_SUBSAMPLE_CFG		0x490
185 #define VFE_0_CAMIF_IRQ_FRAMEDROP_PATTERN	0x498
186 #define VFE_0_CAMIF_IRQ_SUBSAMPLE_PATTERN	0x49c
187 #define VFE_0_CAMIF_STATUS			0x4a4
188 #define VFE_0_CAMIF_STATUS_HALT			BIT(31)
189 
190 #define VFE_0_REG_UPDATE		0x4ac
191 #define VFE_0_REG_UPDATE_RDIn(n)		BIT(1 + (n))
192 #define VFE_0_REG_UPDATE_line_n(n)		\
193 			((n) == VFE_LINE_PIX ? 1 : VFE_0_REG_UPDATE_RDIn(n))
194 
195 #define VFE_0_DEMUX_CFG				0x560
196 #define VFE_0_DEMUX_CFG_PERIOD			0x3
197 #define VFE_0_DEMUX_GAIN_0			0x564
198 #define VFE_0_DEMUX_GAIN_0_CH0_EVEN		(0x80 << 0)
199 #define VFE_0_DEMUX_GAIN_0_CH0_ODD		(0x80 << 16)
200 #define VFE_0_DEMUX_GAIN_1			0x568
201 #define VFE_0_DEMUX_GAIN_1_CH1			(0x80 << 0)
202 #define VFE_0_DEMUX_GAIN_1_CH2			(0x80 << 16)
203 #define VFE_0_DEMUX_EVEN_CFG			0x574
204 #define VFE_0_DEMUX_EVEN_CFG_PATTERN_YUYV	0x9cac
205 #define VFE_0_DEMUX_EVEN_CFG_PATTERN_YVYU	0xac9c
206 #define VFE_0_DEMUX_EVEN_CFG_PATTERN_UYVY	0xc9ca
207 #define VFE_0_DEMUX_EVEN_CFG_PATTERN_VYUY	0xcac9
208 #define VFE_0_DEMUX_ODD_CFG			0x578
209 #define VFE_0_DEMUX_ODD_CFG_PATTERN_YUYV	0x9cac
210 #define VFE_0_DEMUX_ODD_CFG_PATTERN_YVYU	0xac9c
211 #define VFE_0_DEMUX_ODD_CFG_PATTERN_UYVY	0xc9ca
212 #define VFE_0_DEMUX_ODD_CFG_PATTERN_VYUY	0xcac9
213 
214 #define VFE_0_SCALE_ENC_Y_CFG			0x91c
215 #define VFE_0_SCALE_ENC_Y_H_IMAGE_SIZE		0x920
216 #define VFE_0_SCALE_ENC_Y_H_PHASE		0x924
217 #define VFE_0_SCALE_ENC_Y_V_IMAGE_SIZE		0x934
218 #define VFE_0_SCALE_ENC_Y_V_PHASE		0x938
219 #define VFE_0_SCALE_ENC_CBCR_CFG		0x948
220 #define VFE_0_SCALE_ENC_CBCR_H_IMAGE_SIZE	0x94c
221 #define VFE_0_SCALE_ENC_CBCR_H_PHASE		0x950
222 #define VFE_0_SCALE_ENC_CBCR_V_IMAGE_SIZE	0x960
223 #define VFE_0_SCALE_ENC_CBCR_V_PHASE		0x964
224 
225 #define VFE_0_CROP_ENC_Y_WIDTH			0x974
226 #define VFE_0_CROP_ENC_Y_HEIGHT			0x978
227 #define VFE_0_CROP_ENC_CBCR_WIDTH		0x97c
228 #define VFE_0_CROP_ENC_CBCR_HEIGHT		0x980
229 
230 #define VFE_0_CLAMP_ENC_MAX_CFG			0x984
231 #define VFE_0_CLAMP_ENC_MAX_CFG_CH0		(0xff << 0)
232 #define VFE_0_CLAMP_ENC_MAX_CFG_CH1		(0xff << 8)
233 #define VFE_0_CLAMP_ENC_MAX_CFG_CH2		(0xff << 16)
234 #define VFE_0_CLAMP_ENC_MIN_CFG			0x988
235 #define VFE_0_CLAMP_ENC_MIN_CFG_CH0		(0x0 << 0)
236 #define VFE_0_CLAMP_ENC_MIN_CFG_CH1		(0x0 << 8)
237 #define VFE_0_CLAMP_ENC_MIN_CFG_CH2		(0x0 << 16)
238 
239 #define VFE_0_REALIGN_BUF_CFG			0xaac
240 #define VFE_0_REALIGN_BUF_CFG_CB_ODD_PIXEL     BIT(2)
241 #define VFE_0_REALIGN_BUF_CFG_CR_ODD_PIXEL     BIT(3)
242 #define VFE_0_REALIGN_BUF_CFG_HSUB_ENABLE      BIT(4)
243 
244 #define VFE48_0_BUS_IMAGE_MASTER_CMD		0xcec
245 #define VFE48_0_BUS_IMAGE_MASTER_n_SHIFT(x)	(2 * (x))
246 
247 #define CAMIF_TIMEOUT_SLEEP_US 1000
248 #define CAMIF_TIMEOUT_ALL_US 1000000
249 
250 #define MSM_VFE_VFE0_UB_SIZE 2047
251 #define MSM_VFE_VFE0_UB_SIZE_RDI (MSM_VFE_VFE0_UB_SIZE / 3)
252 #define MSM_VFE_VFE1_UB_SIZE 1535
253 #define MSM_VFE_VFE1_UB_SIZE_RDI (MSM_VFE_VFE1_UB_SIZE / 3)
254 
vfe_get_ub_size(u8 vfe_id)255 static u16 vfe_get_ub_size(u8 vfe_id)
256 {
257 	if (vfe_id == 0)
258 		return MSM_VFE_VFE0_UB_SIZE_RDI;
259 	else if (vfe_id == 1)
260 		return MSM_VFE_VFE1_UB_SIZE_RDI;
261 
262 	return 0;
263 }
264 
vfe_reg_clr(struct vfe_device * vfe,u32 reg,u32 clr_bits)265 static inline void vfe_reg_clr(struct vfe_device *vfe, u32 reg, u32 clr_bits)
266 {
267 	u32 bits = readl_relaxed(vfe->base + reg);
268 
269 	writel_relaxed(bits & ~clr_bits, vfe->base + reg);
270 }
271 
vfe_reg_set(struct vfe_device * vfe,u32 reg,u32 set_bits)272 static inline void vfe_reg_set(struct vfe_device *vfe, u32 reg, u32 set_bits)
273 {
274 	u32 bits = readl_relaxed(vfe->base + reg);
275 
276 	writel_relaxed(bits | set_bits, vfe->base + reg);
277 }
278 
vfe_global_reset(struct vfe_device * vfe)279 static void vfe_global_reset(struct vfe_device *vfe)
280 {
281 	u32 reset_bits = VFE_0_GLOBAL_RESET_CMD_IDLE_CGC	|
282 			 VFE_0_GLOBAL_RESET_CMD_DSP		|
283 			 VFE_0_GLOBAL_RESET_CMD_TESTGEN		|
284 			 VFE_0_GLOBAL_RESET_CMD_BUS_MISR	|
285 			 VFE_0_GLOBAL_RESET_CMD_PM		|
286 			 VFE_0_GLOBAL_RESET_CMD_REGISTER	|
287 			 VFE_0_GLOBAL_RESET_CMD_BUS_BDG		|
288 			 VFE_0_GLOBAL_RESET_CMD_BUS		|
289 			 VFE_0_GLOBAL_RESET_CMD_CAMIF		|
290 			 VFE_0_GLOBAL_RESET_CMD_CORE;
291 
292 	writel_relaxed(BIT(31), vfe->base + VFE_0_IRQ_MASK_0);
293 
294 	/* Enforce barrier between IRQ mask setup and global reset */
295 	wmb();
296 	writel_relaxed(reset_bits, vfe->base + VFE_0_GLOBAL_RESET_CMD);
297 }
298 
vfe_halt_request(struct vfe_device * vfe)299 static void vfe_halt_request(struct vfe_device *vfe)
300 {
301 	writel_relaxed(VFE_0_BUS_BDG_CMD_HALT_REQ,
302 		       vfe->base + VFE_0_BUS_BDG_CMD);
303 }
304 
vfe_halt_clear(struct vfe_device * vfe)305 static void vfe_halt_clear(struct vfe_device *vfe)
306 {
307 	writel_relaxed(0x0, vfe->base + VFE_0_BUS_BDG_CMD);
308 }
309 
vfe_wm_enable(struct vfe_device * vfe,u8 wm,u8 enable)310 static void vfe_wm_enable(struct vfe_device *vfe, u8 wm, u8 enable)
311 {
312 	if (enable)
313 		vfe_reg_set(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(wm),
314 			    1 << VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_WR_PATH_SHIFT);
315 	else
316 		vfe_reg_clr(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_CFG(wm),
317 			    1 << VFE_0_BUS_IMAGE_MASTER_n_WR_CFG_WR_PATH_SHIFT);
318 }
319 
vfe_wm_frame_based(struct vfe_device * vfe,u8 wm,u8 enable)320 static void vfe_wm_frame_based(struct vfe_device *vfe, u8 wm, u8 enable)
321 {
322 	if (enable)
323 		vfe_reg_set(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(wm),
324 			1 << VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_BASED_SHIFT);
325 	else
326 		vfe_reg_clr(vfe, VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(wm),
327 			1 << VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_BASED_SHIFT);
328 }
329 
330 #define CALC_WORD(width, M, N) (((width) * (M) + (N) - 1) / (N))
331 
vfe_word_per_line_by_pixel(u32 format,u32 pixel_per_line)332 static int vfe_word_per_line_by_pixel(u32 format, u32 pixel_per_line)
333 {
334 	int val = 0;
335 
336 	switch (format) {
337 	case V4L2_PIX_FMT_NV12:
338 	case V4L2_PIX_FMT_NV21:
339 	case V4L2_PIX_FMT_NV16:
340 	case V4L2_PIX_FMT_NV61:
341 		val = CALC_WORD(pixel_per_line, 1, 8);
342 		break;
343 	case V4L2_PIX_FMT_YUYV:
344 	case V4L2_PIX_FMT_YVYU:
345 	case V4L2_PIX_FMT_UYVY:
346 	case V4L2_PIX_FMT_VYUY:
347 		val = CALC_WORD(pixel_per_line, 2, 8);
348 		break;
349 	}
350 
351 	return val;
352 }
353 
vfe_word_per_line_by_bytes(u32 bytes_per_line)354 static int vfe_word_per_line_by_bytes(u32 bytes_per_line)
355 {
356 	return CALC_WORD(bytes_per_line, 1, 8);
357 }
358 
vfe_get_wm_sizes(struct v4l2_pix_format_mplane * pix,u8 plane,u16 * width,u16 * height,u16 * bytesperline)359 static void vfe_get_wm_sizes(struct v4l2_pix_format_mplane *pix, u8 plane,
360 			     u16 *width, u16 *height, u16 *bytesperline)
361 {
362 	*width = pix->width;
363 	*height = pix->height;
364 
365 	switch (pix->pixelformat) {
366 	case V4L2_PIX_FMT_NV12:
367 	case V4L2_PIX_FMT_NV21:
368 		*bytesperline = pix->plane_fmt[0].bytesperline;
369 		if (plane == 1)
370 			*height /= 2;
371 		break;
372 	case V4L2_PIX_FMT_NV16:
373 	case V4L2_PIX_FMT_NV61:
374 		*bytesperline = pix->plane_fmt[0].bytesperline;
375 		break;
376 	case V4L2_PIX_FMT_YUYV:
377 	case V4L2_PIX_FMT_YVYU:
378 	case V4L2_PIX_FMT_VYUY:
379 	case V4L2_PIX_FMT_UYVY:
380 		*bytesperline = pix->plane_fmt[plane].bytesperline;
381 		break;
382 	}
383 }
384 
vfe_wm_line_based(struct vfe_device * vfe,u32 wm,struct v4l2_pix_format_mplane * pix,u8 plane,u32 enable)385 static void vfe_wm_line_based(struct vfe_device *vfe, u32 wm,
386 			      struct v4l2_pix_format_mplane *pix,
387 			      u8 plane, u32 enable)
388 {
389 	u32 reg;
390 
391 	if (enable) {
392 		u16 width = 0, height = 0, bytesperline = 0, wpl;
393 
394 		vfe_get_wm_sizes(pix, plane, &width, &height, &bytesperline);
395 
396 		wpl = vfe_word_per_line_by_pixel(pix->pixelformat, width);
397 
398 		reg = height - 1;
399 		reg |= ((wpl + 3) / 4 - 1) << 16;
400 
401 		writel_relaxed(reg, vfe->base +
402 			       VFE_0_BUS_IMAGE_MASTER_n_WR_IMAGE_SIZE(wm));
403 
404 		wpl = vfe_word_per_line_by_bytes(bytesperline);
405 
406 		reg = 0x3;
407 		reg |= (height - 1) << 2;
408 		reg |= ((wpl + 1) / 2) << 16;
409 
410 		writel_relaxed(reg, vfe->base +
411 			       VFE_0_BUS_IMAGE_MASTER_n_WR_BUFFER_CFG(wm));
412 	} else {
413 		writel_relaxed(0, vfe->base +
414 			       VFE_0_BUS_IMAGE_MASTER_n_WR_IMAGE_SIZE(wm));
415 		writel_relaxed(0, vfe->base +
416 			       VFE_0_BUS_IMAGE_MASTER_n_WR_BUFFER_CFG(wm));
417 	}
418 }
419 
vfe_wm_set_framedrop_period(struct vfe_device * vfe,u8 wm,u8 per)420 static void vfe_wm_set_framedrop_period(struct vfe_device *vfe, u8 wm, u8 per)
421 {
422 	u32 reg;
423 
424 	reg = readl_relaxed(vfe->base +
425 			    VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(wm));
426 
427 	reg &= ~(VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_MASK);
428 
429 	reg |= (per << VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_SHIFT)
430 		& VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG_FRM_DROP_PER_MASK;
431 
432 	writel_relaxed(reg,
433 		       vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_ADDR_CFG(wm));
434 }
435 
vfe_wm_set_framedrop_pattern(struct vfe_device * vfe,u8 wm,u32 pattern)436 static void vfe_wm_set_framedrop_pattern(struct vfe_device *vfe, u8 wm,
437 					 u32 pattern)
438 {
439 	writel_relaxed(pattern,
440 	       vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_FRAMEDROP_PATTERN(wm));
441 }
442 
vfe_wm_set_ub_cfg(struct vfe_device * vfe,u8 wm,u16 offset,u16 depth)443 static void vfe_wm_set_ub_cfg(struct vfe_device *vfe, u8 wm,
444 			      u16 offset, u16 depth)
445 {
446 	u32 reg;
447 
448 	reg = (offset << VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG_OFFSET_SHIFT) |
449 		depth;
450 	writel_relaxed(reg, vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_UB_CFG(wm));
451 }
452 
vfe_bus_reload_wm(struct vfe_device * vfe,u8 wm)453 static void vfe_bus_reload_wm(struct vfe_device *vfe, u8 wm)
454 {
455 	/* Enforce barrier between any outstanding register write */
456 	wmb();
457 
458 	writel_relaxed(VFE_0_BUS_CMD_Mx_RLD_CMD(wm), vfe->base + VFE_0_BUS_CMD);
459 
460 	/* Use barrier to make sure bus reload is issued before anything else */
461 	wmb();
462 }
463 
vfe_wm_set_ping_addr(struct vfe_device * vfe,u8 wm,u32 addr)464 static void vfe_wm_set_ping_addr(struct vfe_device *vfe, u8 wm, u32 addr)
465 {
466 	writel_relaxed(addr,
467 		       vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_PING_ADDR(wm));
468 }
469 
vfe_wm_set_pong_addr(struct vfe_device * vfe,u8 wm,u32 addr)470 static void vfe_wm_set_pong_addr(struct vfe_device *vfe, u8 wm, u32 addr)
471 {
472 	writel_relaxed(addr,
473 		       vfe->base + VFE_0_BUS_IMAGE_MASTER_n_WR_PONG_ADDR(wm));
474 }
475 
vfe_wm_get_ping_pong_status(struct vfe_device * vfe,u8 wm)476 static int vfe_wm_get_ping_pong_status(struct vfe_device *vfe, u8 wm)
477 {
478 	u32 reg;
479 
480 	reg = readl_relaxed(vfe->base + VFE_0_BUS_PING_PONG_STATUS);
481 
482 	return (reg >> wm) & 0x1;
483 }
484 
vfe_bus_enable_wr_if(struct vfe_device * vfe,u8 enable)485 static void vfe_bus_enable_wr_if(struct vfe_device *vfe, u8 enable)
486 {
487 	if (enable)
488 		writel_relaxed(0x101, vfe->base + VFE_0_BUS_CFG);
489 	else
490 		writel_relaxed(0, vfe->base + VFE_0_BUS_CFG);
491 }
492 
vfe_bus_connect_wm_to_rdi(struct vfe_device * vfe,u8 wm,enum vfe_line_id id)493 static void vfe_bus_connect_wm_to_rdi(struct vfe_device *vfe, u8 wm,
494 				      enum vfe_line_id id)
495 {
496 	u32 reg;
497 
498 	reg = VFE_0_RDI_CFG_x_MIPI_EN_BITS;
499 	vfe_reg_set(vfe, VFE_0_RDI_CFG_x(0), reg);
500 
501 	reg = VFE_0_RDI_CFG_x_RDI_EN_BIT;
502 	reg |= ((3 * id) << VFE_0_RDI_CFG_x_RDI_STREAM_SEL_SHIFT) &
503 		VFE_0_RDI_CFG_x_RDI_STREAM_SEL_MASK;
504 	vfe_reg_set(vfe, VFE_0_RDI_CFG_x(id), reg);
505 
506 	switch (id) {
507 	case VFE_LINE_RDI0:
508 	default:
509 		reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI0 <<
510 		      VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
511 		break;
512 	case VFE_LINE_RDI1:
513 		reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI1 <<
514 		      VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
515 		break;
516 	case VFE_LINE_RDI2:
517 		reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI2 <<
518 		      VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
519 		break;
520 	}
521 
522 	if (wm % 2 == 1)
523 		reg <<= 16;
524 
525 	vfe_reg_set(vfe, VFE_0_BUS_XBAR_CFG_x(wm), reg);
526 }
527 
vfe_wm_set_subsample(struct vfe_device * vfe,u8 wm)528 static void vfe_wm_set_subsample(struct vfe_device *vfe, u8 wm)
529 {
530 	writel_relaxed(VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN_DEF,
531 	       vfe->base +
532 	       VFE_0_BUS_IMAGE_MASTER_n_WR_IRQ_SUBSAMPLE_PATTERN(wm));
533 }
534 
vfe_bus_disconnect_wm_from_rdi(struct vfe_device * vfe,u8 wm,enum vfe_line_id id)535 static void vfe_bus_disconnect_wm_from_rdi(struct vfe_device *vfe, u8 wm,
536 					   enum vfe_line_id id)
537 {
538 	u32 reg;
539 
540 	reg = VFE_0_RDI_CFG_x_RDI_EN_BIT;
541 	vfe_reg_clr(vfe, VFE_0_RDI_CFG_x(id), reg);
542 
543 	switch (id) {
544 	case VFE_LINE_RDI0:
545 	default:
546 		reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI0 <<
547 		      VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
548 		break;
549 	case VFE_LINE_RDI1:
550 		reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI1 <<
551 		      VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
552 		break;
553 	case VFE_LINE_RDI2:
554 		reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_VAL_RDI2 <<
555 		      VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
556 		break;
557 	}
558 
559 	if (wm % 2 == 1)
560 		reg <<= 16;
561 
562 	vfe_reg_clr(vfe, VFE_0_BUS_XBAR_CFG_x(wm), reg);
563 }
564 
vfe_set_xbar_cfg(struct vfe_device * vfe,struct vfe_output * output,u8 enable)565 static void vfe_set_xbar_cfg(struct vfe_device *vfe, struct vfe_output *output,
566 			     u8 enable)
567 {
568 	struct vfe_line *line = container_of(output, struct vfe_line, output);
569 	u32 p = line->video_out.active_fmt.fmt.pix_mp.pixelformat;
570 	u32 reg;
571 
572 	switch (p) {
573 	case V4L2_PIX_FMT_NV12:
574 	case V4L2_PIX_FMT_NV21:
575 	case V4L2_PIX_FMT_NV16:
576 	case V4L2_PIX_FMT_NV61:
577 		reg = VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_LUMA <<
578 			VFE_0_BUS_XBAR_CFG_x_M_SINGLE_STREAM_SEL_SHIFT;
579 
580 		if (output->wm_idx[0] % 2 == 1)
581 			reg <<= 16;
582 
583 		if (enable)
584 			vfe_reg_set(vfe,
585 				    VFE_0_BUS_XBAR_CFG_x(output->wm_idx[0]),
586 				    reg);
587 		else
588 			vfe_reg_clr(vfe,
589 				    VFE_0_BUS_XBAR_CFG_x(output->wm_idx[0]),
590 				    reg);
591 
592 		reg = VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_EN;
593 		if (p == V4L2_PIX_FMT_NV12 || p == V4L2_PIX_FMT_NV16)
594 			reg |= VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_SWAP_INTER_INTRA;
595 
596 		if (output->wm_idx[1] % 2 == 1)
597 			reg <<= 16;
598 
599 		if (enable)
600 			vfe_reg_set(vfe,
601 				    VFE_0_BUS_XBAR_CFG_x(output->wm_idx[1]),
602 				    reg);
603 		else
604 			vfe_reg_clr(vfe,
605 				    VFE_0_BUS_XBAR_CFG_x(output->wm_idx[1]),
606 				    reg);
607 		break;
608 	case V4L2_PIX_FMT_YUYV:
609 	case V4L2_PIX_FMT_YVYU:
610 	case V4L2_PIX_FMT_VYUY:
611 	case V4L2_PIX_FMT_UYVY:
612 		reg = VFE_0_BUS_XBAR_CFG_x_M_REALIGN_BUF_EN;
613 		reg |= VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_EN;
614 
615 		if (p == V4L2_PIX_FMT_YUYV || p == V4L2_PIX_FMT_YVYU)
616 			reg |= VFE_0_BUS_XBAR_CFG_x_M_PAIR_STREAM_SWAP_INTER_INTRA;
617 
618 		if (output->wm_idx[0] % 2 == 1)
619 			reg <<= 16;
620 
621 		if (enable)
622 			vfe_reg_set(vfe,
623 				    VFE_0_BUS_XBAR_CFG_x(output->wm_idx[0]),
624 				    reg);
625 		else
626 			vfe_reg_clr(vfe,
627 				    VFE_0_BUS_XBAR_CFG_x(output->wm_idx[0]),
628 				    reg);
629 		break;
630 	default:
631 		break;
632 	}
633 }
634 
vfe_set_realign_cfg(struct vfe_device * vfe,struct vfe_line * line,u8 enable)635 static void vfe_set_realign_cfg(struct vfe_device *vfe, struct vfe_line *line,
636 				u8 enable)
637 {
638 	u32 p = line->video_out.active_fmt.fmt.pix_mp.pixelformat;
639 	u32 val = VFE_0_MODULE_ZOOM_EN_REALIGN_BUF;
640 
641 	if (p != V4L2_PIX_FMT_YUYV && p != V4L2_PIX_FMT_YVYU &&
642 			p != V4L2_PIX_FMT_VYUY && p != V4L2_PIX_FMT_UYVY)
643 		return;
644 
645 	if (enable) {
646 		vfe_reg_set(vfe, VFE_0_MODULE_ZOOM_EN, val);
647 	} else {
648 		vfe_reg_clr(vfe, VFE_0_MODULE_ZOOM_EN, val);
649 		return;
650 	}
651 
652 	val = VFE_0_REALIGN_BUF_CFG_HSUB_ENABLE;
653 
654 	if (p == V4L2_PIX_FMT_UYVY || p == V4L2_PIX_FMT_YUYV)
655 		val |= VFE_0_REALIGN_BUF_CFG_CR_ODD_PIXEL;
656 	else
657 		val |= VFE_0_REALIGN_BUF_CFG_CB_ODD_PIXEL;
658 
659 	writel_relaxed(val, vfe->base + VFE_0_REALIGN_BUF_CFG);
660 }
661 
vfe_set_rdi_cid(struct vfe_device * vfe,enum vfe_line_id id,u8 cid)662 static void vfe_set_rdi_cid(struct vfe_device *vfe, enum vfe_line_id id, u8 cid)
663 {
664 	vfe_reg_clr(vfe, VFE_0_RDI_CFG_x(id),
665 		    VFE_0_RDI_CFG_x_RDI_M0_SEL_MASK);
666 
667 	vfe_reg_set(vfe, VFE_0_RDI_CFG_x(id),
668 		    cid << VFE_0_RDI_CFG_x_RDI_M0_SEL_SHIFT);
669 }
670 
vfe_reg_update(struct vfe_device * vfe,enum vfe_line_id line_id)671 static void vfe_reg_update(struct vfe_device *vfe, enum vfe_line_id line_id)
672 {
673 	vfe->reg_update |= VFE_0_REG_UPDATE_line_n(line_id);
674 
675 	/* Enforce barrier between line update and commit */
676 	wmb();
677 	writel_relaxed(vfe->reg_update, vfe->base + VFE_0_REG_UPDATE);
678 
679 	/* Make sure register update is issued before further reg writes */
680 	wmb();
681 }
682 
vfe_reg_update_clear(struct vfe_device * vfe,enum vfe_line_id line_id)683 static inline void vfe_reg_update_clear(struct vfe_device *vfe,
684 					enum vfe_line_id line_id)
685 {
686 	vfe->reg_update &= ~VFE_0_REG_UPDATE_line_n(line_id);
687 }
688 
vfe_enable_irq_wm_line(struct vfe_device * vfe,u8 wm,enum vfe_line_id line_id,u8 enable)689 static void vfe_enable_irq_wm_line(struct vfe_device *vfe, u8 wm,
690 				   enum vfe_line_id line_id, u8 enable)
691 {
692 	u32 irq_en0 = VFE_0_IRQ_MASK_0_IMAGE_MASTER_n_PING_PONG(wm) |
693 		      VFE_0_IRQ_MASK_0_line_n_REG_UPDATE(line_id);
694 	u32 irq_en1 = VFE_0_IRQ_MASK_1_IMAGE_MASTER_n_BUS_OVERFLOW(wm) |
695 		      VFE_0_IRQ_MASK_1_RDIn_SOF(line_id);
696 
697 	if (enable) {
698 		vfe_reg_set(vfe, VFE_0_IRQ_MASK_0, irq_en0);
699 		vfe_reg_set(vfe, VFE_0_IRQ_MASK_1, irq_en1);
700 	} else {
701 		vfe_reg_clr(vfe, VFE_0_IRQ_MASK_0, irq_en0);
702 		vfe_reg_clr(vfe, VFE_0_IRQ_MASK_1, irq_en1);
703 	}
704 }
705 
vfe_enable_irq_pix_line(struct vfe_device * vfe,u8 comp,enum vfe_line_id line_id,u8 enable)706 static void vfe_enable_irq_pix_line(struct vfe_device *vfe, u8 comp,
707 				    enum vfe_line_id line_id, u8 enable)
708 {
709 	struct vfe_output *output = &vfe->line[line_id].output;
710 	unsigned int i;
711 	u32 irq_en0;
712 	u32 irq_en1;
713 	u32 comp_mask = 0;
714 
715 	irq_en0 = VFE_0_IRQ_MASK_0_CAMIF_SOF;
716 	irq_en0 |= VFE_0_IRQ_MASK_0_CAMIF_EOF;
717 	irq_en0 |= VFE_0_IRQ_MASK_0_IMAGE_COMPOSITE_DONE_n(comp);
718 	irq_en0 |= VFE_0_IRQ_MASK_0_line_n_REG_UPDATE(line_id);
719 	irq_en1 = VFE_0_IRQ_MASK_1_CAMIF_ERROR;
720 	for (i = 0; i < output->wm_num; i++) {
721 		irq_en1 |= VFE_0_IRQ_MASK_1_IMAGE_MASTER_n_BUS_OVERFLOW(
722 							output->wm_idx[i]);
723 		comp_mask |= (1 << output->wm_idx[i]) << comp * 8;
724 	}
725 
726 	if (enable) {
727 		vfe_reg_set(vfe, VFE_0_IRQ_MASK_0, irq_en0);
728 		vfe_reg_set(vfe, VFE_0_IRQ_MASK_1, irq_en1);
729 		vfe_reg_set(vfe, VFE_0_IRQ_COMPOSITE_MASK_0, comp_mask);
730 	} else {
731 		vfe_reg_clr(vfe, VFE_0_IRQ_MASK_0, irq_en0);
732 		vfe_reg_clr(vfe, VFE_0_IRQ_MASK_1, irq_en1);
733 		vfe_reg_clr(vfe, VFE_0_IRQ_COMPOSITE_MASK_0, comp_mask);
734 	}
735 }
736 
vfe_enable_irq_common(struct vfe_device * vfe)737 static void vfe_enable_irq_common(struct vfe_device *vfe)
738 {
739 	u32 irq_en0 = VFE_0_IRQ_MASK_0_RESET_ACK;
740 	u32 irq_en1 = VFE_0_IRQ_MASK_1_VIOLATION |
741 		      VFE_0_IRQ_MASK_1_BUS_BDG_HALT_ACK;
742 
743 	vfe_reg_set(vfe, VFE_0_IRQ_MASK_0, irq_en0);
744 	vfe_reg_set(vfe, VFE_0_IRQ_MASK_1, irq_en1);
745 }
746 
vfe_set_demux_cfg(struct vfe_device * vfe,struct vfe_line * line)747 static void vfe_set_demux_cfg(struct vfe_device *vfe, struct vfe_line *line)
748 {
749 	u32 val, even_cfg, odd_cfg;
750 
751 	writel_relaxed(VFE_0_DEMUX_CFG_PERIOD, vfe->base + VFE_0_DEMUX_CFG);
752 
753 	val = VFE_0_DEMUX_GAIN_0_CH0_EVEN | VFE_0_DEMUX_GAIN_0_CH0_ODD;
754 	writel_relaxed(val, vfe->base + VFE_0_DEMUX_GAIN_0);
755 
756 	val = VFE_0_DEMUX_GAIN_1_CH1 | VFE_0_DEMUX_GAIN_1_CH2;
757 	writel_relaxed(val, vfe->base + VFE_0_DEMUX_GAIN_1);
758 
759 	switch (line->fmt[MSM_VFE_PAD_SINK].code) {
760 	case MEDIA_BUS_FMT_YUYV8_1X16:
761 		even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_YUYV;
762 		odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_YUYV;
763 		break;
764 	case MEDIA_BUS_FMT_YVYU8_1X16:
765 		even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_YVYU;
766 		odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_YVYU;
767 		break;
768 	case MEDIA_BUS_FMT_UYVY8_1X16:
769 	default:
770 		even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_UYVY;
771 		odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_UYVY;
772 		break;
773 	case MEDIA_BUS_FMT_VYUY8_1X16:
774 		even_cfg = VFE_0_DEMUX_EVEN_CFG_PATTERN_VYUY;
775 		odd_cfg = VFE_0_DEMUX_ODD_CFG_PATTERN_VYUY;
776 		break;
777 	}
778 
779 	writel_relaxed(even_cfg, vfe->base + VFE_0_DEMUX_EVEN_CFG);
780 	writel_relaxed(odd_cfg, vfe->base + VFE_0_DEMUX_ODD_CFG);
781 }
782 
vfe_set_scale_cfg(struct vfe_device * vfe,struct vfe_line * line)783 static void vfe_set_scale_cfg(struct vfe_device *vfe, struct vfe_line *line)
784 {
785 	u32 p = line->video_out.active_fmt.fmt.pix_mp.pixelformat;
786 	u32 reg;
787 	u16 input, output;
788 	u8 interp_reso;
789 	u32 phase_mult;
790 
791 	writel_relaxed(0x3, vfe->base + VFE_0_SCALE_ENC_Y_CFG);
792 
793 	input = line->fmt[MSM_VFE_PAD_SINK].width - 1;
794 	output = line->compose.width - 1;
795 	reg = (output << 16) | input;
796 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_H_IMAGE_SIZE);
797 
798 	interp_reso = vfe_calc_interp_reso(input, output);
799 	phase_mult = input * (1 << (14 + interp_reso)) / output;
800 	reg = (interp_reso << 28) | phase_mult;
801 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_H_PHASE);
802 
803 	input = line->fmt[MSM_VFE_PAD_SINK].height - 1;
804 	output = line->compose.height - 1;
805 	reg = (output << 16) | input;
806 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_V_IMAGE_SIZE);
807 
808 	interp_reso = vfe_calc_interp_reso(input, output);
809 	phase_mult = input * (1 << (14 + interp_reso)) / output;
810 	reg = (interp_reso << 28) | phase_mult;
811 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_Y_V_PHASE);
812 
813 	writel_relaxed(0x3, vfe->base + VFE_0_SCALE_ENC_CBCR_CFG);
814 
815 	input = line->fmt[MSM_VFE_PAD_SINK].width - 1;
816 	output = line->compose.width / 2 - 1;
817 	reg = (output << 16) | input;
818 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_H_IMAGE_SIZE);
819 
820 	interp_reso = vfe_calc_interp_reso(input, output);
821 	phase_mult = input * (1 << (14 + interp_reso)) / output;
822 	reg = (interp_reso << 28) | phase_mult;
823 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_H_PHASE);
824 
825 	input = line->fmt[MSM_VFE_PAD_SINK].height - 1;
826 	output = line->compose.height - 1;
827 	if (p == V4L2_PIX_FMT_NV12 || p == V4L2_PIX_FMT_NV21)
828 		output = line->compose.height / 2 - 1;
829 	reg = (output << 16) | input;
830 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_V_IMAGE_SIZE);
831 
832 	interp_reso = vfe_calc_interp_reso(input, output);
833 	phase_mult = input * (1 << (14 + interp_reso)) / output;
834 	reg = (interp_reso << 28) | phase_mult;
835 	writel_relaxed(reg, vfe->base + VFE_0_SCALE_ENC_CBCR_V_PHASE);
836 }
837 
vfe_set_crop_cfg(struct vfe_device * vfe,struct vfe_line * line)838 static void vfe_set_crop_cfg(struct vfe_device *vfe, struct vfe_line *line)
839 {
840 	u32 p = line->video_out.active_fmt.fmt.pix_mp.pixelformat;
841 	u32 reg;
842 	u16 first, last;
843 
844 	first = line->crop.left;
845 	last = line->crop.left + line->crop.width - 1;
846 	reg = (first << 16) | last;
847 	writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_Y_WIDTH);
848 
849 	first = line->crop.top;
850 	last = line->crop.top + line->crop.height - 1;
851 	reg = (first << 16) | last;
852 	writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_Y_HEIGHT);
853 
854 	first = line->crop.left / 2;
855 	last = line->crop.left / 2 + line->crop.width / 2 - 1;
856 	reg = (first << 16) | last;
857 	writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_CBCR_WIDTH);
858 
859 	first = line->crop.top;
860 	last = line->crop.top + line->crop.height - 1;
861 	if (p == V4L2_PIX_FMT_NV12 || p == V4L2_PIX_FMT_NV21) {
862 		first = line->crop.top / 2;
863 		last = line->crop.top / 2 + line->crop.height / 2 - 1;
864 	}
865 	reg = (first << 16) | last;
866 	writel_relaxed(reg, vfe->base + VFE_0_CROP_ENC_CBCR_HEIGHT);
867 }
868 
vfe_set_clamp_cfg(struct vfe_device * vfe)869 static void vfe_set_clamp_cfg(struct vfe_device *vfe)
870 {
871 	u32 val = VFE_0_CLAMP_ENC_MAX_CFG_CH0 |
872 		VFE_0_CLAMP_ENC_MAX_CFG_CH1 |
873 		VFE_0_CLAMP_ENC_MAX_CFG_CH2;
874 
875 	writel_relaxed(val, vfe->base + VFE_0_CLAMP_ENC_MAX_CFG);
876 
877 	val = VFE_0_CLAMP_ENC_MIN_CFG_CH0 |
878 		VFE_0_CLAMP_ENC_MIN_CFG_CH1 |
879 		VFE_0_CLAMP_ENC_MIN_CFG_CH2;
880 
881 	writel_relaxed(val, vfe->base + VFE_0_CLAMP_ENC_MIN_CFG);
882 }
883 
vfe_set_qos(struct vfe_device * vfe)884 static void vfe_set_qos(struct vfe_device *vfe)
885 {
886 	u32 val = VFE_0_BUS_BDG_QOS_CFG_0_CFG;
887 	u32 val7 = VFE_0_BUS_BDG_QOS_CFG_7_CFG;
888 
889 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_0);
890 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_1);
891 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_2);
892 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_3);
893 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_4);
894 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_5);
895 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_QOS_CFG_6);
896 	writel_relaxed(val7, vfe->base + VFE_0_BUS_BDG_QOS_CFG_7);
897 }
898 
vfe_set_ds(struct vfe_device * vfe)899 static void vfe_set_ds(struct vfe_device *vfe)
900 {
901 	u32 val = VFE_0_BUS_BDG_DS_CFG_0_CFG;
902 	u32 val16 = VFE_0_BUS_BDG_DS_CFG_16_CFG;
903 
904 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_0);
905 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_1);
906 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_2);
907 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_3);
908 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_4);
909 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_5);
910 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_6);
911 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_7);
912 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_8);
913 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_9);
914 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_10);
915 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_11);
916 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_12);
917 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_13);
918 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_14);
919 	writel_relaxed(val, vfe->base + VFE_0_BUS_BDG_DS_CFG_15);
920 	writel_relaxed(val16, vfe->base + VFE_0_BUS_BDG_DS_CFG_16);
921 }
922 
vfe_set_cgc_override(struct vfe_device * vfe,u8 wm,u8 enable)923 static void vfe_set_cgc_override(struct vfe_device *vfe, u8 wm, u8 enable)
924 {
925 	/* empty */
926 }
927 
vfe_set_camif_cfg(struct vfe_device * vfe,struct vfe_line * line)928 static void vfe_set_camif_cfg(struct vfe_device *vfe, struct vfe_line *line)
929 {
930 	u32 val;
931 
932 	switch (line->fmt[MSM_VFE_PAD_SINK].code) {
933 	case MEDIA_BUS_FMT_YUYV8_1X16:
934 		val = VFE_0_CORE_CFG_PIXEL_PATTERN_YCBYCR;
935 		break;
936 	case MEDIA_BUS_FMT_YVYU8_1X16:
937 		val = VFE_0_CORE_CFG_PIXEL_PATTERN_YCRYCB;
938 		break;
939 	case MEDIA_BUS_FMT_UYVY8_1X16:
940 	default:
941 		val = VFE_0_CORE_CFG_PIXEL_PATTERN_CBYCRY;
942 		break;
943 	case MEDIA_BUS_FMT_VYUY8_1X16:
944 		val = VFE_0_CORE_CFG_PIXEL_PATTERN_CRYCBY;
945 		break;
946 	}
947 
948 	val |= VFE_0_CORE_CFG_COMPOSITE_REG_UPDATE_EN;
949 	writel_relaxed(val, vfe->base + VFE_0_CORE_CFG);
950 
951 	val = line->fmt[MSM_VFE_PAD_SINK].width * 2 - 1;
952 	val |= (line->fmt[MSM_VFE_PAD_SINK].height - 1) << 16;
953 	writel_relaxed(val, vfe->base + VFE_0_CAMIF_FRAME_CFG);
954 
955 	val = line->fmt[MSM_VFE_PAD_SINK].width * 2 - 1;
956 	writel_relaxed(val, vfe->base + VFE_0_CAMIF_WINDOW_WIDTH_CFG);
957 
958 	val = line->fmt[MSM_VFE_PAD_SINK].height - 1;
959 	writel_relaxed(val, vfe->base + VFE_0_CAMIF_WINDOW_HEIGHT_CFG);
960 
961 	val = 0xffffffff;
962 	writel_relaxed(val, vfe->base + VFE_0_CAMIF_SUBSAMPLE_CFG);
963 
964 	val = 0xffffffff;
965 	writel_relaxed(val, vfe->base + VFE_0_CAMIF_IRQ_FRAMEDROP_PATTERN);
966 
967 	val = 0xffffffff;
968 	writel_relaxed(val, vfe->base + VFE_0_CAMIF_IRQ_SUBSAMPLE_PATTERN);
969 
970 	val = VFE_0_RDI_CFG_x_MIPI_EN_BITS;
971 	vfe_reg_set(vfe, VFE_0_RDI_CFG_x(0), val);
972 
973 	val = VFE_0_CAMIF_CFG_VFE_OUTPUT_EN;
974 	writel_relaxed(val, vfe->base + VFE_0_CAMIF_CFG);
975 }
976 
vfe_set_camif_cmd(struct vfe_device * vfe,u8 enable)977 static void vfe_set_camif_cmd(struct vfe_device *vfe, u8 enable)
978 {
979 	u32 cmd;
980 
981 	cmd = VFE_0_CAMIF_CMD_CLEAR_CAMIF_STATUS | VFE_0_CAMIF_CMD_NO_CHANGE;
982 	writel_relaxed(cmd, vfe->base + VFE_0_CAMIF_CMD);
983 
984 	/* Make sure camif command is issued written before it is changed again */
985 	wmb();
986 
987 	if (enable)
988 		cmd = VFE_0_CAMIF_CMD_ENABLE_FRAME_BOUNDARY;
989 	else
990 		cmd = VFE_0_CAMIF_CMD_DISABLE_FRAME_BOUNDARY;
991 
992 	writel_relaxed(cmd, vfe->base + VFE_0_CAMIF_CMD);
993 }
994 
vfe_set_module_cfg(struct vfe_device * vfe,u8 enable)995 static void vfe_set_module_cfg(struct vfe_device *vfe, u8 enable)
996 {
997 	u32 val_lens = VFE_0_MODULE_LENS_EN_DEMUX |
998 		       VFE_0_MODULE_LENS_EN_CHROMA_UPSAMPLE;
999 	u32 val_zoom = VFE_0_MODULE_ZOOM_EN_SCALE_ENC |
1000 		       VFE_0_MODULE_ZOOM_EN_CROP_ENC;
1001 
1002 	if (enable) {
1003 		vfe_reg_set(vfe, VFE_0_MODULE_LENS_EN, val_lens);
1004 		vfe_reg_set(vfe, VFE_0_MODULE_ZOOM_EN, val_zoom);
1005 	} else {
1006 		vfe_reg_clr(vfe, VFE_0_MODULE_LENS_EN, val_lens);
1007 		vfe_reg_clr(vfe, VFE_0_MODULE_ZOOM_EN, val_zoom);
1008 	}
1009 }
1010 
vfe_camif_wait_for_stop(struct vfe_device * vfe,struct device * dev)1011 static int vfe_camif_wait_for_stop(struct vfe_device *vfe, struct device *dev)
1012 {
1013 	u32 val;
1014 	int ret;
1015 
1016 	ret = readl_poll_timeout(vfe->base + VFE_0_CAMIF_STATUS,
1017 				 val,
1018 				 (val & VFE_0_CAMIF_STATUS_HALT),
1019 				 CAMIF_TIMEOUT_SLEEP_US,
1020 				 CAMIF_TIMEOUT_ALL_US);
1021 	if (ret < 0)
1022 		dev_err(dev, "%s: camif stop timeout\n", __func__);
1023 
1024 	return ret;
1025 }
1026 
1027 
1028 
1029 /*
1030  * vfe_isr - VFE module interrupt handler
1031  * @irq: Interrupt line
1032  * @dev: VFE device
1033  *
1034  * Return IRQ_HANDLED on success
1035  */
vfe_isr(int irq,void * dev)1036 static irqreturn_t vfe_isr(int irq, void *dev)
1037 {
1038 	struct vfe_device *vfe = dev;
1039 	u32 value0, value1;
1040 	int i, j;
1041 
1042 	vfe->res->hw_ops->isr_read(vfe, &value0, &value1);
1043 
1044 	dev_dbg(vfe->camss->dev, "VFE: status0 = 0x%08x, status1 = 0x%08x\n",
1045 		value0, value1);
1046 
1047 	if (value0 & VFE_0_IRQ_STATUS_0_RESET_ACK)
1048 		vfe->isr_ops.reset_ack(vfe);
1049 
1050 	if (value1 & VFE_0_IRQ_STATUS_1_VIOLATION)
1051 		vfe->res->hw_ops->violation_read(vfe);
1052 
1053 	if (value1 & VFE_0_IRQ_STATUS_1_BUS_BDG_HALT_ACK)
1054 		vfe->isr_ops.halt_ack(vfe);
1055 
1056 	for (i = VFE_LINE_RDI0; i < vfe->res->line_num; i++)
1057 		if (value0 & VFE_0_IRQ_STATUS_0_line_n_REG_UPDATE(i))
1058 			vfe->isr_ops.reg_update(vfe, i);
1059 
1060 	if (value0 & VFE_0_IRQ_STATUS_0_CAMIF_SOF)
1061 		vfe->isr_ops.sof(vfe, VFE_LINE_PIX);
1062 
1063 	for (i = VFE_LINE_RDI0; i <= VFE_LINE_RDI2; i++)
1064 		if (value1 & VFE_0_IRQ_STATUS_1_RDIn_SOF(i))
1065 			vfe->isr_ops.sof(vfe, i);
1066 
1067 	for (i = 0; i < MSM_VFE_COMPOSITE_IRQ_NUM; i++)
1068 		if (value0 & VFE_0_IRQ_STATUS_0_IMAGE_COMPOSITE_DONE_n(i)) {
1069 			vfe->isr_ops.comp_done(vfe, i);
1070 			for (j = 0; j < ARRAY_SIZE(vfe->wm_output_map); j++)
1071 				if (vfe->wm_output_map[j] == VFE_LINE_PIX)
1072 					value0 &= ~VFE_0_IRQ_MASK_0_IMAGE_MASTER_n_PING_PONG(j);
1073 		}
1074 
1075 	for (i = 0; i < MSM_VFE_IMAGE_MASTERS_NUM; i++)
1076 		if (value0 & VFE_0_IRQ_STATUS_0_IMAGE_MASTER_n_PING_PONG(i))
1077 			vfe->isr_ops.wm_done(vfe, i);
1078 
1079 	return IRQ_HANDLED;
1080 }
1081 
vfe_isr_read(struct vfe_device * vfe,u32 * value0,u32 * value1)1082 static void vfe_isr_read(struct vfe_device *vfe, u32 *value0, u32 *value1)
1083 {
1084 	*value0 = readl_relaxed(vfe->base + VFE_0_IRQ_STATUS_0);
1085 	*value1 = readl_relaxed(vfe->base + VFE_0_IRQ_STATUS_1);
1086 
1087 	writel_relaxed(*value0, vfe->base + VFE_0_IRQ_CLEAR_0);
1088 	writel_relaxed(*value1, vfe->base + VFE_0_IRQ_CLEAR_1);
1089 
1090 	/* Enforce barrier between local & global IRQ clear */
1091 	wmb();
1092 	writel_relaxed(VFE_0_IRQ_CMD_GLOBAL_CLEAR, vfe->base + VFE_0_IRQ_CMD);
1093 }
1094 
vfe_violation_read(struct vfe_device * vfe)1095 static void vfe_violation_read(struct vfe_device *vfe)
1096 {
1097 	u32 violation = readl_relaxed(vfe->base + VFE_0_VIOLATION_STATUS);
1098 
1099 	pr_err_ratelimited("VFE: violation = 0x%08x\n", violation);
1100 }
1101 
1102 static const struct vfe_hw_ops_gen1 vfe_ops_gen1_4_7 = {
1103 	.bus_connect_wm_to_rdi = vfe_bus_connect_wm_to_rdi,
1104 	.bus_disconnect_wm_from_rdi = vfe_bus_disconnect_wm_from_rdi,
1105 	.bus_enable_wr_if = vfe_bus_enable_wr_if,
1106 	.bus_reload_wm = vfe_bus_reload_wm,
1107 	.camif_wait_for_stop = vfe_camif_wait_for_stop,
1108 	.enable_irq_common = vfe_enable_irq_common,
1109 	.enable_irq_pix_line = vfe_enable_irq_pix_line,
1110 	.enable_irq_wm_line = vfe_enable_irq_wm_line,
1111 	.get_ub_size = vfe_get_ub_size,
1112 	.halt_clear = vfe_halt_clear,
1113 	.halt_request = vfe_halt_request,
1114 	.set_camif_cfg = vfe_set_camif_cfg,
1115 	.set_camif_cmd = vfe_set_camif_cmd,
1116 	.set_cgc_override = vfe_set_cgc_override,
1117 	.set_clamp_cfg = vfe_set_clamp_cfg,
1118 	.set_crop_cfg = vfe_set_crop_cfg,
1119 	.set_demux_cfg = vfe_set_demux_cfg,
1120 	.set_ds = vfe_set_ds,
1121 	.set_module_cfg = vfe_set_module_cfg,
1122 	.set_qos = vfe_set_qos,
1123 	.set_rdi_cid = vfe_set_rdi_cid,
1124 	.set_realign_cfg = vfe_set_realign_cfg,
1125 	.set_scale_cfg = vfe_set_scale_cfg,
1126 	.set_xbar_cfg = vfe_set_xbar_cfg,
1127 	.wm_enable = vfe_wm_enable,
1128 	.wm_frame_based = vfe_wm_frame_based,
1129 	.wm_get_ping_pong_status = vfe_wm_get_ping_pong_status,
1130 	.wm_line_based = vfe_wm_line_based,
1131 	.wm_set_framedrop_pattern = vfe_wm_set_framedrop_pattern,
1132 	.wm_set_framedrop_period = vfe_wm_set_framedrop_period,
1133 	.wm_set_ping_addr = vfe_wm_set_ping_addr,
1134 	.wm_set_pong_addr = vfe_wm_set_pong_addr,
1135 	.wm_set_subsample = vfe_wm_set_subsample,
1136 	.wm_set_ub_cfg = vfe_wm_set_ub_cfg,
1137 };
1138 
vfe_subdev_init(struct device * dev,struct vfe_device * vfe)1139 static void vfe_subdev_init(struct device *dev, struct vfe_device *vfe)
1140 {
1141 	vfe->isr_ops = vfe_isr_ops_gen1;
1142 	vfe->ops_gen1 = &vfe_ops_gen1_4_7;
1143 	vfe->video_ops = vfe_video_ops_gen1;
1144 }
1145 
1146 const struct vfe_hw_ops vfe_ops_4_7 = {
1147 	.global_reset = vfe_global_reset,
1148 	.hw_version = vfe_hw_version,
1149 	.isr_read = vfe_isr_read,
1150 	.isr = vfe_isr,
1151 	.pm_domain_off = vfe_pm_domain_off,
1152 	.pm_domain_on = vfe_pm_domain_on,
1153 	.reg_update_clear = vfe_reg_update_clear,
1154 	.reg_update = vfe_reg_update,
1155 	.subdev_init = vfe_subdev_init,
1156 	.vfe_disable = vfe_gen1_disable,
1157 	.vfe_enable = vfe_gen1_enable,
1158 	.vfe_halt = vfe_gen1_halt,
1159 	.violation_read = vfe_violation_read,
1160 };
1161