xref: /linux/drivers/media/platform/mediatek/jpeg/mtk_jpeg_enc_hw.c (revision ab93e0dd72c37d378dd936f031ffb83ff2bd87ce)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2019 MediaTek Inc.
4  * Author: Xia Jiang <xia.jiang@mediatek.com>
5  *
6  */
7 
8 #include <linux/bitfield.h>
9 #include <linux/bits.h>
10 #include <linux/clk.h>
11 #include <linux/interrupt.h>
12 #include <linux/irq.h>
13 #include <linux/io.h>
14 #include <linux/kernel.h>
15 #include <linux/mod_devicetable.h>
16 #include <linux/module.h>
17 #include <linux/platform_device.h>
18 #include <linux/pm_runtime.h>
19 #include <linux/slab.h>
20 #include <media/media-device.h>
21 #include <media/videobuf2-core.h>
22 #include <media/videobuf2-dma-contig.h>
23 #include <media/videobuf2-v4l2.h>
24 #include <media/v4l2-mem2mem.h>
25 #include <media/v4l2-dev.h>
26 #include <media/v4l2-device.h>
27 #include <media/v4l2-fh.h>
28 #include <media/v4l2-event.h>
29 
30 #include "mtk_jpeg_core.h"
31 #include "mtk_jpeg_enc_hw.h"
32 
33 static const struct mtk_jpeg_enc_qlt mtk_jpeg_enc_quality[] = {
34 	{.quality_param = 34, .hardware_value = JPEG_ENC_QUALITY_Q34},
35 	{.quality_param = 39, .hardware_value = JPEG_ENC_QUALITY_Q39},
36 	{.quality_param = 48, .hardware_value = JPEG_ENC_QUALITY_Q48},
37 	{.quality_param = 60, .hardware_value = JPEG_ENC_QUALITY_Q60},
38 	{.quality_param = 64, .hardware_value = JPEG_ENC_QUALITY_Q64},
39 	{.quality_param = 68, .hardware_value = JPEG_ENC_QUALITY_Q68},
40 	{.quality_param = 74, .hardware_value = JPEG_ENC_QUALITY_Q74},
41 	{.quality_param = 80, .hardware_value = JPEG_ENC_QUALITY_Q80},
42 	{.quality_param = 82, .hardware_value = JPEG_ENC_QUALITY_Q82},
43 	{.quality_param = 84, .hardware_value = JPEG_ENC_QUALITY_Q84},
44 	{.quality_param = 87, .hardware_value = JPEG_ENC_QUALITY_Q87},
45 	{.quality_param = 90, .hardware_value = JPEG_ENC_QUALITY_Q90},
46 	{.quality_param = 92, .hardware_value = JPEG_ENC_QUALITY_Q92},
47 	{.quality_param = 95, .hardware_value = JPEG_ENC_QUALITY_Q95},
48 	{.quality_param = 97, .hardware_value = JPEG_ENC_QUALITY_Q97},
49 };
50 
51 static const struct of_device_id mtk_jpegenc_drv_ids[] = {
52 	{
53 		.compatible = "mediatek,mt8195-jpgenc-hw",
54 	},
55 	{},
56 };
57 MODULE_DEVICE_TABLE(of, mtk_jpegenc_drv_ids);
58 
mtk_jpeg_enc_reset(void __iomem * base)59 void mtk_jpeg_enc_reset(void __iomem *base)
60 {
61 	writel(0, base + JPEG_ENC_RSTB);
62 	writel(JPEG_ENC_RESET_BIT, base + JPEG_ENC_RSTB);
63 	writel(0, base + JPEG_ENC_CODEC_SEL);
64 }
65 EXPORT_SYMBOL_GPL(mtk_jpeg_enc_reset);
66 
mtk_jpeg_enc_get_file_size(void __iomem * base,bool support_34bit)67 u32 mtk_jpeg_enc_get_file_size(void __iomem *base, bool support_34bit)
68 {
69 	return (readl(base + JPEG_ENC_DMA_ADDR0) << ((support_34bit) ? 2 : 0)) -
70 	       readl(base + JPEG_ENC_DST_ADDR0);
71 }
72 EXPORT_SYMBOL_GPL(mtk_jpeg_enc_get_file_size);
73 
mtk_jpeg_enc_start(void __iomem * base)74 void mtk_jpeg_enc_start(void __iomem *base)
75 {
76 	u32 value;
77 
78 	value = readl(base + JPEG_ENC_CTRL);
79 	value |= JPEG_ENC_CTRL_INT_EN_BIT | JPEG_ENC_CTRL_ENABLE_BIT;
80 	writel(value, base + JPEG_ENC_CTRL);
81 }
82 EXPORT_SYMBOL_GPL(mtk_jpeg_enc_start);
83 
mtk_jpeg_set_enc_src(struct mtk_jpeg_ctx * ctx,void __iomem * base,struct vb2_buffer * src_buf)84 void mtk_jpeg_set_enc_src(struct mtk_jpeg_ctx *ctx,  void __iomem *base,
85 			  struct vb2_buffer *src_buf)
86 {
87 	int i;
88 	dma_addr_t dma_addr;
89 	u32 addr_ext;
90 	bool support_34bit = ctx->jpeg->variant->support_34bit;
91 
92 	for (i = 0; i < src_buf->num_planes; i++) {
93 		dma_addr = vb2_dma_contig_plane_dma_addr(src_buf, i) +
94 			   src_buf->planes[i].data_offset;
95 		if (i == 0)
96 			writel(lower_32_bits(dma_addr), base + JPEG_ENC_SRC_LUMA_ADDR);
97 		else
98 			writel(lower_32_bits(dma_addr), base + JPEG_ENC_SRC_CHROMA_ADDR);
99 
100 		if (support_34bit) {
101 			addr_ext = FIELD_PREP(MTK_JPEG_ADDR_MASK, upper_32_bits(dma_addr));
102 			if (i == 0)
103 				writel(addr_ext, base + JPEG_ENC_SRC_LUMA_ADDR_EXT);
104 			else
105 				writel(addr_ext, base + JPEG_ENC_SRC_CHRO_ADDR_EXT);
106 		}
107 	}
108 }
109 EXPORT_SYMBOL_GPL(mtk_jpeg_set_enc_src);
110 
mtk_jpeg_set_enc_dst(struct mtk_jpeg_ctx * ctx,void __iomem * base,struct vb2_buffer * dst_buf)111 void mtk_jpeg_set_enc_dst(struct mtk_jpeg_ctx *ctx, void __iomem *base,
112 			  struct vb2_buffer *dst_buf)
113 {
114 	dma_addr_t dma_addr;
115 	size_t size;
116 	u32 dma_addr_offset;
117 	u32 dma_addr_offsetmask;
118 	u32 addr_ext;
119 	bool support_34bit = ctx->jpeg->variant->support_34bit;
120 
121 	dma_addr = vb2_dma_contig_plane_dma_addr(dst_buf, 0);
122 	dma_addr_offset = ctx->enable_exif ? MTK_JPEG_MAX_EXIF_SIZE : 0;
123 	dma_addr_offsetmask = dma_addr & JPEG_ENC_DST_ADDR_OFFSET_MASK;
124 	size = vb2_plane_size(dst_buf, 0);
125 
126 	writel(dma_addr_offset & ~0xf, base + JPEG_ENC_OFFSET_ADDR);
127 	writel(dma_addr_offsetmask & 0xf, base + JPEG_ENC_BYTE_OFFSET_MASK);
128 	writel(dma_addr & ~0xf, base + JPEG_ENC_DST_ADDR0);
129 	writel((dma_addr + size) & ~0xf, base + JPEG_ENC_STALL_ADDR0);
130 
131 	if (support_34bit) {
132 		addr_ext = FIELD_PREP(MTK_JPEG_ADDR_MASK, upper_32_bits(dma_addr));
133 		writel(addr_ext, base + JPEG_ENC_DEST_ADDR0_EXT);
134 		writel(addr_ext + size, base + JPEG_ENC_STALL_ADDR0_EXT);
135 	}
136 }
137 EXPORT_SYMBOL_GPL(mtk_jpeg_set_enc_dst);
138 
mtk_jpeg_set_enc_params(struct mtk_jpeg_ctx * ctx,void __iomem * base)139 void mtk_jpeg_set_enc_params(struct mtk_jpeg_ctx *ctx,  void __iomem *base)
140 {
141 	u32 value;
142 	u32 width = ctx->out_q.enc_crop_rect.width;
143 	u32 height = ctx->out_q.enc_crop_rect.height;
144 	u32 enc_format = ctx->out_q.fmt->fourcc;
145 	u32 bytesperline = ctx->out_q.pix_mp.plane_fmt[0].bytesperline;
146 	u32 blk_num;
147 	u32 img_stride;
148 	u32 mem_stride;
149 	u32 i, enc_quality;
150 	u32 nr_enc_quality = ARRAY_SIZE(mtk_jpeg_enc_quality);
151 
152 	value = width << 16 | height;
153 	writel(value, base + JPEG_ENC_IMG_SIZE);
154 
155 	if (enc_format == V4L2_PIX_FMT_NV12M ||
156 	    enc_format == V4L2_PIX_FMT_NV21M)
157 	    /*
158 	     * Total 8 x 8 block number of luma and chroma.
159 	     * The number of blocks is counted from 0.
160 	     */
161 		blk_num = DIV_ROUND_UP(width, 16) *
162 			  DIV_ROUND_UP(height, 16) * 6 - 1;
163 	else
164 		blk_num = DIV_ROUND_UP(width, 16) *
165 			  DIV_ROUND_UP(height, 8) * 4 - 1;
166 	writel(blk_num, base + JPEG_ENC_BLK_NUM);
167 
168 	if (enc_format == V4L2_PIX_FMT_NV12M ||
169 	    enc_format == V4L2_PIX_FMT_NV21M) {
170 		/* 4:2:0 */
171 		img_stride = round_up(width, 16);
172 		mem_stride = bytesperline;
173 	} else {
174 		/* 4:2:2 */
175 		img_stride = round_up(width * 2, 32);
176 		mem_stride = img_stride;
177 	}
178 	writel(img_stride, base + JPEG_ENC_IMG_STRIDE);
179 	writel(mem_stride, base + JPEG_ENC_STRIDE);
180 
181 	enc_quality = mtk_jpeg_enc_quality[nr_enc_quality - 1].hardware_value;
182 	for (i = 0; i < nr_enc_quality; i++) {
183 		if (ctx->enc_quality <= mtk_jpeg_enc_quality[i].quality_param) {
184 			enc_quality = mtk_jpeg_enc_quality[i].hardware_value;
185 			break;
186 		}
187 	}
188 	writel(enc_quality, base + JPEG_ENC_QUALITY);
189 
190 	value = readl(base + JPEG_ENC_CTRL);
191 	value &= ~JPEG_ENC_CTRL_YUV_FORMAT_MASK;
192 	value |= (ctx->out_q.fmt->hw_format & 3) << 3;
193 	if (ctx->enable_exif)
194 		value |= JPEG_ENC_CTRL_FILE_FORMAT_BIT;
195 	else
196 		value &= ~JPEG_ENC_CTRL_FILE_FORMAT_BIT;
197 	if (ctx->restart_interval)
198 		value |= JPEG_ENC_CTRL_RESTART_EN_BIT;
199 	else
200 		value &= ~JPEG_ENC_CTRL_RESTART_EN_BIT;
201 	writel(value, base + JPEG_ENC_CTRL);
202 
203 	writel(ctx->restart_interval, base + JPEG_ENC_RST_MCU_NUM);
204 }
205 EXPORT_SYMBOL_GPL(mtk_jpeg_set_enc_params);
206 
mtk_jpegenc_put_buf(struct mtk_jpegenc_comp_dev * jpeg)207 static void mtk_jpegenc_put_buf(struct mtk_jpegenc_comp_dev *jpeg)
208 {
209 	struct mtk_jpeg_ctx *ctx;
210 	struct vb2_v4l2_buffer *dst_buffer;
211 	struct list_head *temp_entry;
212 	struct list_head *pos = NULL;
213 	struct mtk_jpeg_src_buf *dst_done_buf, *tmp_dst_done_buf;
214 	unsigned long flags;
215 
216 	ctx = jpeg->hw_param.curr_ctx;
217 	if (!ctx) {
218 		dev_err(jpeg->dev, "comp_jpeg ctx fail !!!\n");
219 		return;
220 	}
221 
222 	dst_buffer = jpeg->hw_param.dst_buffer;
223 	if (!dst_buffer) {
224 		dev_err(jpeg->dev, "comp_jpeg dst_buffer fail !!!\n");
225 		return;
226 	}
227 
228 	dst_done_buf = container_of(dst_buffer,
229 				    struct mtk_jpeg_src_buf, b);
230 
231 	spin_lock_irqsave(&ctx->done_queue_lock, flags);
232 	list_add_tail(&dst_done_buf->list, &ctx->dst_done_queue);
233 	while (!list_empty(&ctx->dst_done_queue) &&
234 	       (pos != &ctx->dst_done_queue)) {
235 		list_for_each_prev_safe(pos, temp_entry, &ctx->dst_done_queue) {
236 			tmp_dst_done_buf = list_entry(pos,
237 						      struct mtk_jpeg_src_buf,
238 						      list);
239 			if (tmp_dst_done_buf->frame_num ==
240 				ctx->last_done_frame_num) {
241 				list_del(&tmp_dst_done_buf->list);
242 				v4l2_m2m_buf_done(&tmp_dst_done_buf->b,
243 						  VB2_BUF_STATE_DONE);
244 				ctx->last_done_frame_num++;
245 			}
246 		}
247 	}
248 	spin_unlock_irqrestore(&ctx->done_queue_lock, flags);
249 }
250 
mtk_jpegenc_timeout_work(struct work_struct * work)251 static void mtk_jpegenc_timeout_work(struct work_struct *work)
252 {
253 	struct delayed_work *dly_work = to_delayed_work(work);
254 	struct mtk_jpegenc_comp_dev *cjpeg =
255 		container_of(dly_work,
256 			     struct mtk_jpegenc_comp_dev,
257 			     job_timeout_work);
258 	struct mtk_jpeg_dev *master_jpeg = cjpeg->master_dev;
259 	enum vb2_buffer_state buf_state = VB2_BUF_STATE_ERROR;
260 	struct vb2_v4l2_buffer *src_buf, *dst_buf;
261 
262 	src_buf = cjpeg->hw_param.src_buffer;
263 	dst_buf = cjpeg->hw_param.dst_buffer;
264 	v4l2_m2m_buf_copy_metadata(src_buf, dst_buf, true);
265 
266 	mtk_jpeg_enc_reset(cjpeg->reg_base);
267 	clk_disable_unprepare(cjpeg->venc_clk.clks->clk);
268 	pm_runtime_put(cjpeg->dev);
269 	cjpeg->hw_state = MTK_JPEG_HW_IDLE;
270 	atomic_inc(&master_jpeg->hw_rdy);
271 	wake_up(&master_jpeg->hw_wq);
272 	v4l2_m2m_buf_done(src_buf, buf_state);
273 	mtk_jpegenc_put_buf(cjpeg);
274 }
275 
mtk_jpegenc_hw_irq_handler(int irq,void * priv)276 static irqreturn_t mtk_jpegenc_hw_irq_handler(int irq, void *priv)
277 {
278 	struct vb2_v4l2_buffer *src_buf, *dst_buf;
279 	enum vb2_buffer_state buf_state;
280 	struct mtk_jpeg_ctx *ctx;
281 	u32 result_size;
282 	u32 irq_status;
283 
284 	struct mtk_jpegenc_comp_dev *jpeg = priv;
285 	struct mtk_jpeg_dev *master_jpeg = jpeg->master_dev;
286 
287 	cancel_delayed_work(&jpeg->job_timeout_work);
288 
289 	ctx = jpeg->hw_param.curr_ctx;
290 	src_buf = jpeg->hw_param.src_buffer;
291 	dst_buf = jpeg->hw_param.dst_buffer;
292 	v4l2_m2m_buf_copy_metadata(src_buf, dst_buf, true);
293 
294 	irq_status = readl(jpeg->reg_base + JPEG_ENC_INT_STS) &
295 		JPEG_ENC_INT_STATUS_MASK_ALLIRQ;
296 	if (irq_status)
297 		writel(0, jpeg->reg_base + JPEG_ENC_INT_STS);
298 	if (!(irq_status & JPEG_ENC_INT_STATUS_DONE))
299 		dev_warn(jpeg->dev, "Jpg Enc occurs unknown Err.");
300 
301 	result_size = mtk_jpeg_enc_get_file_size(jpeg->reg_base,
302 						 ctx->jpeg->variant->support_34bit);
303 	vb2_set_plane_payload(&dst_buf->vb2_buf, 0, result_size);
304 	buf_state = VB2_BUF_STATE_DONE;
305 	v4l2_m2m_buf_done(src_buf, buf_state);
306 	mtk_jpegenc_put_buf(jpeg);
307 	pm_runtime_put(ctx->jpeg->dev);
308 	clk_disable_unprepare(jpeg->venc_clk.clks->clk);
309 
310 	jpeg->hw_state = MTK_JPEG_HW_IDLE;
311 	wake_up(&master_jpeg->hw_wq);
312 	atomic_inc(&master_jpeg->hw_rdy);
313 
314 	return IRQ_HANDLED;
315 }
316 
mtk_jpegenc_hw_init_irq(struct mtk_jpegenc_comp_dev * dev)317 static int mtk_jpegenc_hw_init_irq(struct mtk_jpegenc_comp_dev *dev)
318 {
319 	struct platform_device *pdev = dev->plat_dev;
320 	int ret;
321 
322 	dev->jpegenc_irq = platform_get_irq(pdev, 0);
323 	if (dev->jpegenc_irq < 0)
324 		return dev->jpegenc_irq;
325 
326 	ret = devm_request_irq(&pdev->dev,
327 			       dev->jpegenc_irq,
328 			       mtk_jpegenc_hw_irq_handler,
329 			       0,
330 			       pdev->name, dev);
331 	if (ret) {
332 		dev_err(&pdev->dev, "Failed to devm_request_irq %d (%d)",
333 			dev->jpegenc_irq, ret);
334 		return ret;
335 	}
336 
337 	return 0;
338 }
339 
mtk_jpegenc_hw_probe(struct platform_device * pdev)340 static int mtk_jpegenc_hw_probe(struct platform_device *pdev)
341 {
342 	struct mtk_jpegenc_clk *jpegenc_clk;
343 	struct mtk_jpeg_dev *master_dev;
344 	struct mtk_jpegenc_comp_dev *dev;
345 	int ret, i;
346 
347 	struct device *decs = &pdev->dev;
348 
349 	if (!decs->parent)
350 		return -EPROBE_DEFER;
351 
352 	master_dev = dev_get_drvdata(decs->parent);
353 	if (!master_dev)
354 		return -EPROBE_DEFER;
355 
356 	dev = devm_kzalloc(&pdev->dev, sizeof(*dev), GFP_KERNEL);
357 	if (!dev)
358 		return -ENOMEM;
359 
360 	dev->plat_dev = pdev;
361 	dev->dev = &pdev->dev;
362 
363 	spin_lock_init(&dev->hw_lock);
364 	dev->hw_state = MTK_JPEG_HW_IDLE;
365 
366 	INIT_DELAYED_WORK(&dev->job_timeout_work,
367 			  mtk_jpegenc_timeout_work);
368 
369 	jpegenc_clk = &dev->venc_clk;
370 
371 	jpegenc_clk->clk_num = devm_clk_bulk_get_all(&pdev->dev,
372 						     &jpegenc_clk->clks);
373 	if (jpegenc_clk->clk_num < 0)
374 		return dev_err_probe(&pdev->dev, jpegenc_clk->clk_num,
375 				     "Failed to get jpegenc clock count\n");
376 
377 	dev->reg_base = devm_platform_ioremap_resource(pdev, 0);
378 	if (IS_ERR(dev->reg_base))
379 		return PTR_ERR(dev->reg_base);
380 
381 	ret = mtk_jpegenc_hw_init_irq(dev);
382 	if (ret)
383 		return ret;
384 
385 	i = atomic_add_return(1, &master_dev->hw_index) - 1;
386 	master_dev->enc_hw_dev[i] = dev;
387 	master_dev->reg_encbase[i] = dev->reg_base;
388 	dev->master_dev = master_dev;
389 
390 	platform_set_drvdata(pdev, dev);
391 	pm_runtime_enable(&pdev->dev);
392 
393 	return 0;
394 }
395 
396 static struct platform_driver mtk_jpegenc_hw_driver = {
397 	.probe = mtk_jpegenc_hw_probe,
398 	.driver = {
399 		.name = "mtk-jpegenc-hw",
400 		.of_match_table = mtk_jpegenc_drv_ids,
401 	},
402 };
403 
404 module_platform_driver(mtk_jpegenc_hw_driver);
405 
406 MODULE_DESCRIPTION("MediaTek JPEG encode HW driver");
407 MODULE_LICENSE("GPL");
408