xref: /linux/drivers/media/platform/verisilicon/hantro_g2_hevc_dec.c (revision d7aa60d966461ca6114dc348e97889dc8850ff7f)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Hantro VPU HEVC codec driver
4  *
5  * Copyright (C) 2020 Safran Passenger Innovations LLC
6  */
7 
8 #include "hantro_hw.h"
9 #include "hantro_g2_regs.h"
10 
prepare_tile_info_buffer(struct hantro_ctx * ctx)11 static void prepare_tile_info_buffer(struct hantro_ctx *ctx)
12 {
13 	struct hantro_dev *vpu = ctx->dev;
14 	const struct hantro_hevc_dec_ctrls *ctrls = &ctx->hevc_dec.ctrls;
15 	const struct v4l2_ctrl_hevc_pps *pps = ctrls->pps;
16 	const struct v4l2_ctrl_hevc_sps *sps = ctrls->sps;
17 	u16 *p = (u16 *)((u8 *)ctx->hevc_dec.tile_sizes.cpu);
18 	unsigned int num_tile_rows = pps->num_tile_rows_minus1 + 1;
19 	unsigned int num_tile_cols = pps->num_tile_columns_minus1 + 1;
20 	unsigned int pic_width_in_ctbs, pic_height_in_ctbs;
21 	unsigned int max_log2_ctb_size, ctb_size;
22 	bool tiles_enabled, uniform_spacing;
23 	u32 no_chroma = 0;
24 
25 	tiles_enabled = !!(pps->flags & V4L2_HEVC_PPS_FLAG_TILES_ENABLED);
26 	uniform_spacing = !!(pps->flags & V4L2_HEVC_PPS_FLAG_UNIFORM_SPACING);
27 
28 	hantro_reg_write(vpu, &g2_tile_e, tiles_enabled);
29 
30 	max_log2_ctb_size = sps->log2_min_luma_coding_block_size_minus3 + 3 +
31 			    sps->log2_diff_max_min_luma_coding_block_size;
32 	pic_width_in_ctbs = (sps->pic_width_in_luma_samples +
33 			    (1 << max_log2_ctb_size) - 1) >> max_log2_ctb_size;
34 	pic_height_in_ctbs = (sps->pic_height_in_luma_samples + (1 << max_log2_ctb_size) - 1)
35 			     >> max_log2_ctb_size;
36 	ctb_size = 1 << max_log2_ctb_size;
37 
38 	vpu_debug(1, "Preparing tile sizes buffer for %dx%d CTBs (CTB size %d)\n",
39 		  pic_width_in_ctbs, pic_height_in_ctbs, ctb_size);
40 
41 	if (tiles_enabled) {
42 		unsigned int i, j, h;
43 
44 		vpu_debug(1, "Tiles enabled! %dx%d\n", num_tile_cols, num_tile_rows);
45 
46 		hantro_reg_write(vpu, &g2_num_tile_rows, num_tile_rows);
47 		hantro_reg_write(vpu, &g2_num_tile_cols, num_tile_cols);
48 
49 		/* write width + height for each tile in pic */
50 		if (!uniform_spacing) {
51 			u32 tmp_w = 0, tmp_h = 0;
52 
53 			for (i = 0; i < num_tile_rows; i++) {
54 				if (i == num_tile_rows - 1)
55 					h = pic_height_in_ctbs - tmp_h;
56 				else
57 					h = pps->row_height_minus1[i] + 1;
58 				tmp_h += h;
59 				if (i == 0 && h == 1 && ctb_size == 16)
60 					no_chroma = 1;
61 				for (j = 0, tmp_w = 0; j < num_tile_cols - 1; j++) {
62 					tmp_w += pps->column_width_minus1[j] + 1;
63 					*p++ = pps->column_width_minus1[j] + 1;
64 					*p++ = h;
65 					if (i == 0 && h == 1 && ctb_size == 16)
66 						no_chroma = 1;
67 				}
68 				/* last column */
69 				*p++ = pic_width_in_ctbs - tmp_w;
70 				*p++ = h;
71 			}
72 		} else { /* uniform spacing */
73 			u32 tmp, prev_h, prev_w;
74 
75 			for (i = 0, prev_h = 0; i < num_tile_rows; i++) {
76 				tmp = (i + 1) * pic_height_in_ctbs / num_tile_rows;
77 				h = tmp - prev_h;
78 				prev_h = tmp;
79 				if (i == 0 && h == 1 && ctb_size == 16)
80 					no_chroma = 1;
81 				for (j = 0, prev_w = 0; j < num_tile_cols; j++) {
82 					tmp = (j + 1) * pic_width_in_ctbs / num_tile_cols;
83 					*p++ = tmp - prev_w;
84 					*p++ = h;
85 					if (j == 0 &&
86 					    (pps->column_width_minus1[0] + 1) == 1 &&
87 					    ctb_size == 16)
88 						no_chroma = 1;
89 					prev_w = tmp;
90 				}
91 			}
92 		}
93 	} else {
94 		hantro_reg_write(vpu, &g2_num_tile_rows, 1);
95 		hantro_reg_write(vpu, &g2_num_tile_cols, 1);
96 
97 		/* There's one tile, with dimensions equal to pic size. */
98 		p[0] = pic_width_in_ctbs;
99 		p[1] = pic_height_in_ctbs;
100 	}
101 
102 	if (no_chroma)
103 		vpu_debug(1, "%s: no chroma!\n", __func__);
104 }
105 
compute_header_skip_length(struct hantro_ctx * ctx)106 static int compute_header_skip_length(struct hantro_ctx *ctx)
107 {
108 	const struct hantro_hevc_dec_ctrls *ctrls = &ctx->hevc_dec.ctrls;
109 	const struct v4l2_ctrl_hevc_decode_params *decode_params = ctrls->decode_params;
110 	const struct v4l2_ctrl_hevc_sps *sps = ctrls->sps;
111 	const struct v4l2_ctrl_hevc_pps *pps = ctrls->pps;
112 	int skip = 0;
113 
114 	if (pps->flags & V4L2_HEVC_PPS_FLAG_OUTPUT_FLAG_PRESENT)
115 		/* size of pic_output_flag */
116 		skip++;
117 
118 	if (sps->flags & V4L2_HEVC_SPS_FLAG_SEPARATE_COLOUR_PLANE)
119 		/* size of pic_order_cnt_lsb */
120 		skip += 2;
121 
122 	if (!(decode_params->flags & V4L2_HEVC_DECODE_PARAM_FLAG_IDR_PIC)) {
123 		/* size of pic_order_cnt_lsb */
124 		skip += sps->log2_max_pic_order_cnt_lsb_minus4 + 4;
125 
126 		/* size of short_term_ref_pic_set_sps_flag */
127 		skip++;
128 
129 		if (decode_params->short_term_ref_pic_set_size)
130 			/* size of st_ref_pic_set( num_short_term_ref_pic_sets ) */
131 			skip += decode_params->short_term_ref_pic_set_size;
132 		else if (sps->num_short_term_ref_pic_sets > 1)
133 			skip += fls(sps->num_short_term_ref_pic_sets - 1);
134 
135 		skip += decode_params->long_term_ref_pic_set_size;
136 	}
137 
138 	return skip;
139 }
140 
set_params(struct hantro_ctx * ctx)141 static void set_params(struct hantro_ctx *ctx)
142 {
143 	const struct hantro_hevc_dec_ctrls *ctrls = &ctx->hevc_dec.ctrls;
144 	const struct v4l2_ctrl_hevc_sps *sps = ctrls->sps;
145 	const struct v4l2_ctrl_hevc_pps *pps = ctrls->pps;
146 	const struct v4l2_ctrl_hevc_decode_params *decode_params = ctrls->decode_params;
147 	struct hantro_dev *vpu = ctx->dev;
148 	u32 min_log2_cb_size, max_log2_ctb_size, min_cb_size, max_ctb_size;
149 	u32 pic_width_in_min_cbs, pic_height_in_min_cbs;
150 	u32 pic_width_aligned, pic_height_aligned;
151 	u32 partial_ctb_x, partial_ctb_y;
152 
153 	hantro_reg_write(vpu, &g2_bit_depth_y_minus8, sps->bit_depth_luma_minus8);
154 	hantro_reg_write(vpu, &g2_bit_depth_c_minus8, sps->bit_depth_chroma_minus8);
155 
156 	hantro_reg_write(vpu, &g2_hdr_skip_length, compute_header_skip_length(ctx));
157 
158 	min_log2_cb_size = sps->log2_min_luma_coding_block_size_minus3 + 3;
159 	max_log2_ctb_size = min_log2_cb_size + sps->log2_diff_max_min_luma_coding_block_size;
160 
161 	hantro_reg_write(vpu, &g2_min_cb_size, min_log2_cb_size);
162 	hantro_reg_write(vpu, &g2_max_cb_size, max_log2_ctb_size);
163 
164 	min_cb_size = 1 << min_log2_cb_size;
165 	max_ctb_size = 1 << max_log2_ctb_size;
166 
167 	pic_width_in_min_cbs = sps->pic_width_in_luma_samples / min_cb_size;
168 	pic_height_in_min_cbs = sps->pic_height_in_luma_samples / min_cb_size;
169 	pic_width_aligned = ALIGN(sps->pic_width_in_luma_samples, max_ctb_size);
170 	pic_height_aligned = ALIGN(sps->pic_height_in_luma_samples, max_ctb_size);
171 
172 	partial_ctb_x = !!(sps->pic_width_in_luma_samples != pic_width_aligned);
173 	partial_ctb_y = !!(sps->pic_height_in_luma_samples != pic_height_aligned);
174 
175 	hantro_reg_write(vpu, &g2_partial_ctb_x, partial_ctb_x);
176 	hantro_reg_write(vpu, &g2_partial_ctb_y, partial_ctb_y);
177 
178 	hantro_reg_write(vpu, &g2_pic_width_in_cbs, pic_width_in_min_cbs);
179 	hantro_reg_write(vpu, &g2_pic_height_in_cbs, pic_height_in_min_cbs);
180 
181 	hantro_reg_write(vpu, &g2_pic_width_4x4,
182 			 (pic_width_in_min_cbs * min_cb_size) / 4);
183 	hantro_reg_write(vpu, &g2_pic_height_4x4,
184 			 (pic_height_in_min_cbs * min_cb_size) / 4);
185 
186 	hantro_reg_write(vpu, &hevc_max_inter_hierdepth,
187 			 sps->max_transform_hierarchy_depth_inter);
188 	hantro_reg_write(vpu, &hevc_max_intra_hierdepth,
189 			 sps->max_transform_hierarchy_depth_intra);
190 	hantro_reg_write(vpu, &hevc_min_trb_size,
191 			 sps->log2_min_luma_transform_block_size_minus2 + 2);
192 	hantro_reg_write(vpu, &hevc_max_trb_size,
193 			 sps->log2_min_luma_transform_block_size_minus2 + 2 +
194 			 sps->log2_diff_max_min_luma_transform_block_size);
195 
196 	hantro_reg_write(vpu, &g2_tempor_mvp_e,
197 			 !!(sps->flags & V4L2_HEVC_SPS_FLAG_SPS_TEMPORAL_MVP_ENABLED) &&
198 			 !(decode_params->flags & V4L2_HEVC_DECODE_PARAM_FLAG_IDR_PIC));
199 	hantro_reg_write(vpu, &g2_strong_smooth_e,
200 			 !!(sps->flags & V4L2_HEVC_SPS_FLAG_STRONG_INTRA_SMOOTHING_ENABLED));
201 	hantro_reg_write(vpu, &g2_asym_pred_e,
202 			 !!(sps->flags & V4L2_HEVC_SPS_FLAG_AMP_ENABLED));
203 	hantro_reg_write(vpu, &g2_sao_e,
204 			 !!(sps->flags & V4L2_HEVC_SPS_FLAG_SAMPLE_ADAPTIVE_OFFSET));
205 	hantro_reg_write(vpu, &g2_sign_data_hide,
206 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_SIGN_DATA_HIDING_ENABLED));
207 
208 	if (pps->flags & V4L2_HEVC_PPS_FLAG_CU_QP_DELTA_ENABLED) {
209 		hantro_reg_write(vpu, &g2_cu_qpd_e, 1);
210 		hantro_reg_write(vpu, &g2_max_cu_qpd_depth, pps->diff_cu_qp_delta_depth);
211 	} else {
212 		hantro_reg_write(vpu, &g2_cu_qpd_e, 0);
213 		hantro_reg_write(vpu, &g2_max_cu_qpd_depth, 0);
214 	}
215 
216 	hantro_reg_write(vpu, &g2_cb_qp_offset, pps->pps_cb_qp_offset);
217 	hantro_reg_write(vpu, &g2_cr_qp_offset, pps->pps_cr_qp_offset);
218 
219 	hantro_reg_write(vpu, &g2_filt_offset_beta, pps->pps_beta_offset_div2);
220 	hantro_reg_write(vpu, &g2_filt_offset_tc, pps->pps_tc_offset_div2);
221 	hantro_reg_write(vpu, &g2_slice_hdr_ext_e,
222 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_SLICE_SEGMENT_HEADER_EXTENSION_PRESENT));
223 	hantro_reg_write(vpu, &g2_slice_hdr_ext_bits, pps->num_extra_slice_header_bits);
224 	hantro_reg_write(vpu, &g2_slice_chqp_present,
225 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT));
226 	hantro_reg_write(vpu, &g2_weight_bipr_idc,
227 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_WEIGHTED_BIPRED));
228 	hantro_reg_write(vpu, &g2_transq_bypass,
229 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_TRANSQUANT_BYPASS_ENABLED));
230 	hantro_reg_write(vpu, &g2_list_mod_e,
231 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_LISTS_MODIFICATION_PRESENT));
232 	hantro_reg_write(vpu, &g2_entropy_sync_e,
233 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_ENTROPY_CODING_SYNC_ENABLED));
234 	hantro_reg_write(vpu, &g2_cabac_init_present,
235 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_CABAC_INIT_PRESENT));
236 	hantro_reg_write(vpu, &g2_idr_pic_e,
237 			 !!(decode_params->flags & V4L2_HEVC_DECODE_PARAM_FLAG_IRAP_PIC));
238 	hantro_reg_write(vpu, &hevc_parallel_merge,
239 			 pps->log2_parallel_merge_level_minus2 + 2);
240 	hantro_reg_write(vpu, &g2_pcm_filt_d,
241 			 !!(sps->flags & V4L2_HEVC_SPS_FLAG_PCM_LOOP_FILTER_DISABLED));
242 	hantro_reg_write(vpu, &g2_pcm_e,
243 			 !!(sps->flags & V4L2_HEVC_SPS_FLAG_PCM_ENABLED));
244 	if (sps->flags & V4L2_HEVC_SPS_FLAG_PCM_ENABLED) {
245 		hantro_reg_write(vpu, &g2_max_pcm_size,
246 				 sps->log2_diff_max_min_pcm_luma_coding_block_size +
247 				 sps->log2_min_pcm_luma_coding_block_size_minus3 + 3);
248 		hantro_reg_write(vpu, &g2_min_pcm_size,
249 				 sps->log2_min_pcm_luma_coding_block_size_minus3 + 3);
250 		hantro_reg_write(vpu, &g2_bit_depth_pcm_y,
251 				 sps->pcm_sample_bit_depth_luma_minus1 + 1);
252 		hantro_reg_write(vpu, &g2_bit_depth_pcm_c,
253 				 sps->pcm_sample_bit_depth_chroma_minus1 + 1);
254 	} else {
255 		hantro_reg_write(vpu, &g2_max_pcm_size, 0);
256 		hantro_reg_write(vpu, &g2_min_pcm_size, 0);
257 		hantro_reg_write(vpu, &g2_bit_depth_pcm_y, 0);
258 		hantro_reg_write(vpu, &g2_bit_depth_pcm_c, 0);
259 	}
260 
261 	hantro_reg_write(vpu, &g2_start_code_e, 1);
262 	hantro_reg_write(vpu, &g2_init_qp, pps->init_qp_minus26 + 26);
263 	hantro_reg_write(vpu, &g2_weight_pred_e,
264 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_WEIGHTED_PRED));
265 	hantro_reg_write(vpu, &g2_cabac_init_present,
266 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_CABAC_INIT_PRESENT));
267 	hantro_reg_write(vpu, &g2_const_intra_e,
268 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_CONSTRAINED_INTRA_PRED));
269 	hantro_reg_write(vpu, &g2_transform_skip,
270 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_TRANSFORM_SKIP_ENABLED));
271 	hantro_reg_write(vpu, &g2_out_filtering_dis,
272 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_PPS_DISABLE_DEBLOCKING_FILTER));
273 	hantro_reg_write(vpu, &g2_filt_ctrl_pres,
274 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_DEBLOCKING_FILTER_CONTROL_PRESENT));
275 	hantro_reg_write(vpu, &g2_dependent_slice,
276 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_DEPENDENT_SLICE_SEGMENT_ENABLED));
277 	hantro_reg_write(vpu, &g2_filter_override,
278 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_DEBLOCKING_FILTER_OVERRIDE_ENABLED));
279 	hantro_reg_write(vpu, &g2_refidx0_active,
280 			 pps->num_ref_idx_l0_default_active_minus1 + 1);
281 	hantro_reg_write(vpu, &g2_refidx1_active,
282 			 pps->num_ref_idx_l1_default_active_minus1 + 1);
283 	hantro_reg_write(vpu, &g2_apf_threshold, 8);
284 }
285 
get_dpb_index(const struct v4l2_ctrl_hevc_decode_params * decode_params,const u32 index)286 static u32 get_dpb_index(const struct v4l2_ctrl_hevc_decode_params *decode_params,
287 			 const u32 index)
288 {
289 	if (index > decode_params->num_active_dpb_entries)
290 		return 0;
291 
292 	return index;
293 }
294 
set_ref_pic_list(struct hantro_ctx * ctx)295 static void set_ref_pic_list(struct hantro_ctx *ctx)
296 {
297 	const struct hantro_hevc_dec_ctrls *ctrls = &ctx->hevc_dec.ctrls;
298 	struct hantro_dev *vpu = ctx->dev;
299 	const struct v4l2_ctrl_hevc_decode_params *decode_params = ctrls->decode_params;
300 	u32 list0[V4L2_HEVC_DPB_ENTRIES_NUM_MAX] = {};
301 	u32 list1[V4L2_HEVC_DPB_ENTRIES_NUM_MAX] = {};
302 	static const struct hantro_reg ref_pic_regs0[] = {
303 		hevc_rlist_f0,
304 		hevc_rlist_f1,
305 		hevc_rlist_f2,
306 		hevc_rlist_f3,
307 		hevc_rlist_f4,
308 		hevc_rlist_f5,
309 		hevc_rlist_f6,
310 		hevc_rlist_f7,
311 		hevc_rlist_f8,
312 		hevc_rlist_f9,
313 		hevc_rlist_f10,
314 		hevc_rlist_f11,
315 		hevc_rlist_f12,
316 		hevc_rlist_f13,
317 		hevc_rlist_f14,
318 		hevc_rlist_f15,
319 	};
320 	static const struct hantro_reg ref_pic_regs1[] = {
321 		hevc_rlist_b0,
322 		hevc_rlist_b1,
323 		hevc_rlist_b2,
324 		hevc_rlist_b3,
325 		hevc_rlist_b4,
326 		hevc_rlist_b5,
327 		hevc_rlist_b6,
328 		hevc_rlist_b7,
329 		hevc_rlist_b8,
330 		hevc_rlist_b9,
331 		hevc_rlist_b10,
332 		hevc_rlist_b11,
333 		hevc_rlist_b12,
334 		hevc_rlist_b13,
335 		hevc_rlist_b14,
336 		hevc_rlist_b15,
337 	};
338 	unsigned int i, j;
339 
340 	/* List 0 contains: short term before, short term after and long term */
341 	j = 0;
342 	for (i = 0; i < decode_params->num_poc_st_curr_before && j < ARRAY_SIZE(list0); i++)
343 		list0[j++] = decode_params->poc_st_curr_before[i];
344 	for (i = 0; i < decode_params->num_poc_st_curr_after && j < ARRAY_SIZE(list0); i++)
345 		list0[j++] = decode_params->poc_st_curr_after[i];
346 	for (i = 0; i < decode_params->num_poc_lt_curr && j < ARRAY_SIZE(list0); i++)
347 		list0[j++] = decode_params->poc_lt_curr[i];
348 
349 	/* Fill the list, copying over and over */
350 	i = 0;
351 	while (j < ARRAY_SIZE(list0))
352 		list0[j++] = list0[i++];
353 
354 	j = 0;
355 	for (i = 0; i < decode_params->num_poc_st_curr_after && j < ARRAY_SIZE(list1); i++)
356 		list1[j++] = decode_params->poc_st_curr_after[i];
357 	for (i = 0; i < decode_params->num_poc_st_curr_before && j < ARRAY_SIZE(list1); i++)
358 		list1[j++] = decode_params->poc_st_curr_before[i];
359 	for (i = 0; i < decode_params->num_poc_lt_curr && j < ARRAY_SIZE(list1); i++)
360 		list1[j++] = decode_params->poc_lt_curr[i];
361 
362 	i = 0;
363 	while (j < ARRAY_SIZE(list1))
364 		list1[j++] = list1[i++];
365 
366 	for (i = 0; i < V4L2_HEVC_DPB_ENTRIES_NUM_MAX; i++) {
367 		hantro_reg_write(vpu, &ref_pic_regs0[i],
368 				 get_dpb_index(decode_params, list0[i]));
369 		hantro_reg_write(vpu, &ref_pic_regs1[i],
370 				 get_dpb_index(decode_params, list1[i]));
371 	}
372 }
373 
set_ref(struct hantro_ctx * ctx)374 static int set_ref(struct hantro_ctx *ctx)
375 {
376 	const struct hantro_hevc_dec_ctrls *ctrls = &ctx->hevc_dec.ctrls;
377 	const struct v4l2_ctrl_hevc_pps *pps = ctrls->pps;
378 	const struct v4l2_ctrl_hevc_decode_params *decode_params = ctrls->decode_params;
379 	const struct v4l2_hevc_dpb_entry *dpb = decode_params->dpb;
380 	dma_addr_t luma_addr, chroma_addr, mv_addr = 0;
381 	dma_addr_t compress_luma_addr, compress_chroma_addr = 0;
382 	struct hantro_dev *vpu = ctx->dev;
383 	struct vb2_v4l2_buffer *vb2_dst;
384 	struct hantro_decoded_buffer *dst;
385 	size_t cr_offset = hantro_g2_chroma_offset(ctx);
386 	size_t mv_offset = hantro_g2_motion_vectors_offset(ctx);
387 	size_t compress_luma_offset = hantro_g2_luma_compress_offset(ctx);
388 	size_t compress_chroma_offset = hantro_g2_chroma_compress_offset(ctx);
389 	u32 max_ref_frames;
390 	u16 dpb_longterm_e;
391 	static const struct hantro_reg cur_poc[] = {
392 		hevc_cur_poc_00,
393 		hevc_cur_poc_01,
394 		hevc_cur_poc_02,
395 		hevc_cur_poc_03,
396 		hevc_cur_poc_04,
397 		hevc_cur_poc_05,
398 		hevc_cur_poc_06,
399 		hevc_cur_poc_07,
400 		hevc_cur_poc_08,
401 		hevc_cur_poc_09,
402 		hevc_cur_poc_10,
403 		hevc_cur_poc_11,
404 		hevc_cur_poc_12,
405 		hevc_cur_poc_13,
406 		hevc_cur_poc_14,
407 		hevc_cur_poc_15,
408 	};
409 	unsigned int i;
410 
411 	max_ref_frames = decode_params->num_poc_lt_curr +
412 		decode_params->num_poc_st_curr_before +
413 		decode_params->num_poc_st_curr_after;
414 	/*
415 	 * Set max_ref_frames to non-zero to avoid HW hang when decoding
416 	 * badly marked I-frames.
417 	 */
418 	max_ref_frames = max_ref_frames ? max_ref_frames : 1;
419 	hantro_reg_write(vpu, &g2_num_ref_frames, max_ref_frames);
420 	hantro_reg_write(vpu, &g2_filter_over_slices,
421 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED));
422 	hantro_reg_write(vpu, &g2_filter_over_tiles,
423 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_LOOP_FILTER_ACROSS_TILES_ENABLED));
424 
425 	/*
426 	 * Write POC count diff from current pic.
427 	 */
428 	for (i = 0; i < decode_params->num_active_dpb_entries && i < ARRAY_SIZE(cur_poc); i++) {
429 		char poc_diff = decode_params->pic_order_cnt_val - dpb[i].pic_order_cnt_val;
430 
431 		hantro_reg_write(vpu, &cur_poc[i], poc_diff);
432 	}
433 
434 	if (i < ARRAY_SIZE(cur_poc)) {
435 		/*
436 		 * After the references, fill one entry pointing to itself,
437 		 * i.e. difference is zero.
438 		 */
439 		hantro_reg_write(vpu, &cur_poc[i], 0);
440 		i++;
441 	}
442 
443 	/* Fill the rest with the current picture */
444 	for (; i < ARRAY_SIZE(cur_poc); i++)
445 		hantro_reg_write(vpu, &cur_poc[i], decode_params->pic_order_cnt_val);
446 
447 	set_ref_pic_list(ctx);
448 
449 	/* We will only keep the reference pictures that are still used */
450 	hantro_hevc_ref_init(ctx);
451 
452 	/* Set up addresses of DPB buffers */
453 	dpb_longterm_e = 0;
454 	for (i = 0; i < decode_params->num_active_dpb_entries &&
455 	     i < (V4L2_HEVC_DPB_ENTRIES_NUM_MAX - 1); i++) {
456 		luma_addr = hantro_hevc_get_ref_buf(ctx, dpb[i].pic_order_cnt_val);
457 		if (!luma_addr)
458 			return -ENOMEM;
459 
460 		chroma_addr = luma_addr + cr_offset;
461 		mv_addr = luma_addr + mv_offset;
462 		compress_luma_addr = luma_addr + compress_luma_offset;
463 		compress_chroma_addr = luma_addr + compress_chroma_offset;
464 
465 		if (dpb[i].flags & V4L2_HEVC_DPB_ENTRY_LONG_TERM_REFERENCE)
466 			dpb_longterm_e |= BIT(V4L2_HEVC_DPB_ENTRIES_NUM_MAX - 1 - i);
467 
468 		hantro_write_addr(vpu, G2_REF_LUMA_ADDR(i), luma_addr);
469 		hantro_write_addr(vpu, G2_REF_CHROMA_ADDR(i), chroma_addr);
470 		hantro_write_addr(vpu, G2_REF_MV_ADDR(i), mv_addr);
471 		hantro_write_addr(vpu, G2_REF_COMP_LUMA_ADDR(i), compress_luma_addr);
472 		hantro_write_addr(vpu, G2_REF_COMP_CHROMA_ADDR(i), compress_chroma_addr);
473 	}
474 
475 	vb2_dst = hantro_get_dst_buf(ctx);
476 	dst = vb2_to_hantro_decoded_buf(&vb2_dst->vb2_buf);
477 	luma_addr = hantro_get_dec_buf_addr(ctx, &dst->base.vb.vb2_buf);
478 	if (!luma_addr)
479 		return -ENOMEM;
480 
481 	if (hantro_hevc_add_ref_buf(ctx, decode_params->pic_order_cnt_val, luma_addr))
482 		return -EINVAL;
483 
484 	chroma_addr = luma_addr + cr_offset;
485 	mv_addr = luma_addr + mv_offset;
486 	compress_luma_addr = luma_addr + compress_luma_offset;
487 	compress_chroma_addr = luma_addr + compress_chroma_offset;
488 
489 	hantro_write_addr(vpu, G2_REF_LUMA_ADDR(i), luma_addr);
490 	hantro_write_addr(vpu, G2_REF_CHROMA_ADDR(i), chroma_addr);
491 	hantro_write_addr(vpu, G2_REF_MV_ADDR(i), mv_addr);
492 	hantro_write_addr(vpu, G2_REF_COMP_LUMA_ADDR(i), compress_luma_addr);
493 	hantro_write_addr(vpu, G2_REF_COMP_CHROMA_ADDR(i++), compress_chroma_addr);
494 
495 	hantro_write_addr(vpu, G2_OUT_LUMA_ADDR, luma_addr);
496 	hantro_write_addr(vpu, G2_OUT_CHROMA_ADDR, chroma_addr);
497 	hantro_write_addr(vpu, G2_OUT_MV_ADDR, mv_addr);
498 	hantro_write_addr(vpu, G2_OUT_COMP_LUMA_ADDR, compress_luma_addr);
499 	hantro_write_addr(vpu, G2_OUT_COMP_CHROMA_ADDR, compress_chroma_addr);
500 
501 	for (; i < V4L2_HEVC_DPB_ENTRIES_NUM_MAX; i++) {
502 		hantro_write_addr(vpu, G2_REF_LUMA_ADDR(i), 0);
503 		hantro_write_addr(vpu, G2_REF_CHROMA_ADDR(i), 0);
504 		hantro_write_addr(vpu, G2_REF_MV_ADDR(i), 0);
505 		hantro_write_addr(vpu, G2_REF_COMP_LUMA_ADDR(i), 0);
506 		hantro_write_addr(vpu, G2_REF_COMP_CHROMA_ADDR(i), 0);
507 	}
508 
509 	hantro_reg_write(vpu, &g2_refer_lterm_e, dpb_longterm_e);
510 
511 	return 0;
512 }
513 
set_buffers(struct hantro_ctx * ctx)514 static void set_buffers(struct hantro_ctx *ctx)
515 {
516 	struct vb2_v4l2_buffer *src_buf;
517 	struct hantro_dev *vpu = ctx->dev;
518 	dma_addr_t src_dma;
519 	u32 src_len, src_buf_len;
520 
521 	src_buf = hantro_get_src_buf(ctx);
522 
523 	/* Source (stream) buffer. */
524 	src_dma = vb2_dma_contig_plane_dma_addr(&src_buf->vb2_buf, 0);
525 	src_len = vb2_get_plane_payload(&src_buf->vb2_buf, 0);
526 	src_buf_len = vb2_plane_size(&src_buf->vb2_buf, 0);
527 
528 	hantro_write_addr(vpu, G2_STREAM_ADDR, src_dma);
529 	hantro_reg_write(vpu, &g2_stream_len, src_len);
530 	hantro_reg_write(vpu, &g2_strm_buffer_len, src_buf_len);
531 	hantro_reg_write(vpu, &g2_strm_start_offset, 0);
532 	hantro_reg_write(vpu, &g2_start_bit, 0);
533 	hantro_reg_write(vpu, &g2_write_mvs_e, 1);
534 
535 	hantro_write_addr(vpu, G2_TILE_SIZES_ADDR, ctx->hevc_dec.tile_sizes.dma);
536 	hantro_write_addr(vpu, G2_TILE_FILTER_ADDR, ctx->hevc_dec.tile_filter.dma);
537 	hantro_write_addr(vpu, G2_TILE_SAO_ADDR, ctx->hevc_dec.tile_sao.dma);
538 	hantro_write_addr(vpu, G2_TILE_BSD_ADDR, ctx->hevc_dec.tile_bsd.dma);
539 }
540 
prepare_scaling_list_buffer(struct hantro_ctx * ctx)541 static void prepare_scaling_list_buffer(struct hantro_ctx *ctx)
542 {
543 	struct hantro_dev *vpu = ctx->dev;
544 	const struct hantro_hevc_dec_ctrls *ctrls = &ctx->hevc_dec.ctrls;
545 	const struct v4l2_ctrl_hevc_scaling_matrix *sc = ctrls->scaling;
546 	const struct v4l2_ctrl_hevc_sps *sps = ctrls->sps;
547 	u8 *p = ((u8 *)ctx->hevc_dec.scaling_lists.cpu);
548 	unsigned int scaling_list_enabled;
549 	unsigned int i, j, k;
550 
551 	scaling_list_enabled = !!(sps->flags & V4L2_HEVC_SPS_FLAG_SCALING_LIST_ENABLED);
552 	hantro_reg_write(vpu, &g2_scaling_list_e, scaling_list_enabled);
553 
554 	if (!scaling_list_enabled)
555 		return;
556 
557 	for (i = 0; i < ARRAY_SIZE(sc->scaling_list_dc_coef_16x16); i++)
558 		*p++ = sc->scaling_list_dc_coef_16x16[i];
559 
560 	for (i = 0; i < ARRAY_SIZE(sc->scaling_list_dc_coef_32x32); i++)
561 		*p++ = sc->scaling_list_dc_coef_32x32[i];
562 
563 	/* 128-bit boundary */
564 	p += 8;
565 
566 	/* write scaling lists column by column */
567 
568 	for (i = 0; i < 6; i++)
569 		for (j = 0; j < 4; j++)
570 			for (k = 0; k < 4; k++)
571 				*p++ = sc->scaling_list_4x4[i][4 * k + j];
572 
573 	for (i = 0; i < 6; i++)
574 		for (j = 0; j < 8; j++)
575 			for (k = 0; k < 8; k++)
576 				*p++ = sc->scaling_list_8x8[i][8 * k + j];
577 
578 	for (i = 0; i < 6; i++)
579 		for (j = 0; j < 8; j++)
580 			for (k = 0; k < 8; k++)
581 				*p++ = sc->scaling_list_16x16[i][8 * k + j];
582 
583 	for (i = 0; i < 2; i++)
584 		for (j = 0; j < 8; j++)
585 			for (k = 0; k < 8; k++)
586 				*p++ = sc->scaling_list_32x32[i][8 * k + j];
587 
588 	hantro_write_addr(vpu, G2_HEVC_SCALING_LIST_ADDR, ctx->hevc_dec.scaling_lists.dma);
589 }
590 
hantro_g2_hevc_dec_run(struct hantro_ctx * ctx)591 int hantro_g2_hevc_dec_run(struct hantro_ctx *ctx)
592 {
593 	struct hantro_dev *vpu = ctx->dev;
594 	int ret;
595 
596 	/* Prepare HEVC decoder context. */
597 	ret = hantro_hevc_dec_prepare_run(ctx);
598 	if (ret)
599 		return ret;
600 
601 	/* Configure hardware registers. */
602 	set_params(ctx);
603 
604 	/* set reference pictures */
605 	ret = set_ref(ctx);
606 	if (ret)
607 		return ret;
608 
609 	set_buffers(ctx);
610 	prepare_tile_info_buffer(ctx);
611 
612 	prepare_scaling_list_buffer(ctx);
613 
614 	hantro_end_prepare_run(ctx);
615 
616 	hantro_reg_write(vpu, &g2_mode, HEVC_DEC_MODE);
617 	hantro_reg_write(vpu, &g2_clk_gate_e, 1);
618 
619 	/* Don't disable output */
620 	hantro_reg_write(vpu, &g2_out_dis, 0);
621 
622 	hantro_reg_write(vpu, &g2_ref_compress_bypass, !ctx->hevc_dec.use_compression);
623 
624 	/* Bus width and max burst */
625 	hantro_reg_write(vpu, &g2_buswidth, BUS_WIDTH_128);
626 	hantro_reg_write(vpu, &g2_max_burst, 16);
627 
628 	/* Swap */
629 	hantro_reg_write(vpu, &g2_strm_swap, 0xf);
630 	hantro_reg_write(vpu, &g2_dirmv_swap, 0xf);
631 	hantro_reg_write(vpu, &g2_compress_swap, 0xf);
632 
633 	/* Start decoding! */
634 	vdpu_write(vpu, G2_REG_INTERRUPT_DEC_E, G2_REG_INTERRUPT);
635 
636 	return 0;
637 }
638