1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Hantro VPU HEVC codec driver
4  *
5  * Copyright (C) 2020 Safran Passenger Innovations LLC
6  */
7 
8 #include "hantro_hw.h"
9 #include "hantro_g2_regs.h"
10 
11 #define G2_ALIGN	16
12 
hantro_hevc_chroma_offset(struct hantro_ctx * ctx)13 static size_t hantro_hevc_chroma_offset(struct hantro_ctx *ctx)
14 {
15 	return ctx->dst_fmt.width * ctx->dst_fmt.height;
16 }
17 
hantro_hevc_motion_vectors_offset(struct hantro_ctx * ctx)18 static size_t hantro_hevc_motion_vectors_offset(struct hantro_ctx *ctx)
19 {
20 	size_t cr_offset = hantro_hevc_chroma_offset(ctx);
21 
22 	return ALIGN((cr_offset * 3) / 2, G2_ALIGN);
23 }
24 
prepare_tile_info_buffer(struct hantro_ctx * ctx)25 static void prepare_tile_info_buffer(struct hantro_ctx *ctx)
26 {
27 	struct hantro_dev *vpu = ctx->dev;
28 	const struct hantro_hevc_dec_ctrls *ctrls = &ctx->hevc_dec.ctrls;
29 	const struct v4l2_ctrl_hevc_pps *pps = ctrls->pps;
30 	const struct v4l2_ctrl_hevc_sps *sps = ctrls->sps;
31 	u16 *p = (u16 *)((u8 *)ctx->hevc_dec.tile_sizes.cpu);
32 	unsigned int num_tile_rows = pps->num_tile_rows_minus1 + 1;
33 	unsigned int num_tile_cols = pps->num_tile_columns_minus1 + 1;
34 	unsigned int pic_width_in_ctbs, pic_height_in_ctbs;
35 	unsigned int max_log2_ctb_size, ctb_size;
36 	bool tiles_enabled, uniform_spacing;
37 	u32 no_chroma = 0;
38 
39 	tiles_enabled = !!(pps->flags & V4L2_HEVC_PPS_FLAG_TILES_ENABLED);
40 	uniform_spacing = !!(pps->flags & V4L2_HEVC_PPS_FLAG_UNIFORM_SPACING);
41 
42 	hantro_reg_write(vpu, &g2_tile_e, tiles_enabled);
43 
44 	max_log2_ctb_size = sps->log2_min_luma_coding_block_size_minus3 + 3 +
45 			    sps->log2_diff_max_min_luma_coding_block_size;
46 	pic_width_in_ctbs = (sps->pic_width_in_luma_samples +
47 			    (1 << max_log2_ctb_size) - 1) >> max_log2_ctb_size;
48 	pic_height_in_ctbs = (sps->pic_height_in_luma_samples + (1 << max_log2_ctb_size) - 1)
49 			     >> max_log2_ctb_size;
50 	ctb_size = 1 << max_log2_ctb_size;
51 
52 	vpu_debug(1, "Preparing tile sizes buffer for %dx%d CTBs (CTB size %d)\n",
53 		  pic_width_in_ctbs, pic_height_in_ctbs, ctb_size);
54 
55 	if (tiles_enabled) {
56 		unsigned int i, j, h;
57 
58 		vpu_debug(1, "Tiles enabled! %dx%d\n", num_tile_cols, num_tile_rows);
59 
60 		hantro_reg_write(vpu, &g2_num_tile_rows, num_tile_rows);
61 		hantro_reg_write(vpu, &g2_num_tile_cols, num_tile_cols);
62 
63 		/* write width + height for each tile in pic */
64 		if (!uniform_spacing) {
65 			u32 tmp_w = 0, tmp_h = 0;
66 
67 			for (i = 0; i < num_tile_rows; i++) {
68 				if (i == num_tile_rows - 1)
69 					h = pic_height_in_ctbs - tmp_h;
70 				else
71 					h = pps->row_height_minus1[i] + 1;
72 				tmp_h += h;
73 				if (i == 0 && h == 1 && ctb_size == 16)
74 					no_chroma = 1;
75 				for (j = 0, tmp_w = 0; j < num_tile_cols - 1; j++) {
76 					tmp_w += pps->column_width_minus1[j] + 1;
77 					*p++ = pps->column_width_minus1[j] + 1;
78 					*p++ = h;
79 					if (i == 0 && h == 1 && ctb_size == 16)
80 						no_chroma = 1;
81 				}
82 				/* last column */
83 				*p++ = pic_width_in_ctbs - tmp_w;
84 				*p++ = h;
85 			}
86 		} else { /* uniform spacing */
87 			u32 tmp, prev_h, prev_w;
88 
89 			for (i = 0, prev_h = 0; i < num_tile_rows; i++) {
90 				tmp = (i + 1) * pic_height_in_ctbs / num_tile_rows;
91 				h = tmp - prev_h;
92 				prev_h = tmp;
93 				if (i == 0 && h == 1 && ctb_size == 16)
94 					no_chroma = 1;
95 				for (j = 0, prev_w = 0; j < num_tile_cols; j++) {
96 					tmp = (j + 1) * pic_width_in_ctbs / num_tile_cols;
97 					*p++ = tmp - prev_w;
98 					*p++ = h;
99 					if (j == 0 &&
100 					    (pps->column_width_minus1[0] + 1) == 1 &&
101 					    ctb_size == 16)
102 						no_chroma = 1;
103 					prev_w = tmp;
104 				}
105 			}
106 		}
107 	} else {
108 		hantro_reg_write(vpu, &g2_num_tile_rows, 1);
109 		hantro_reg_write(vpu, &g2_num_tile_cols, 1);
110 
111 		/* There's one tile, with dimensions equal to pic size. */
112 		p[0] = pic_width_in_ctbs;
113 		p[1] = pic_height_in_ctbs;
114 	}
115 
116 	if (no_chroma)
117 		vpu_debug(1, "%s: no chroma!\n", __func__);
118 }
119 
set_params(struct hantro_ctx * ctx)120 static void set_params(struct hantro_ctx *ctx)
121 {
122 	const struct hantro_hevc_dec_ctrls *ctrls = &ctx->hevc_dec.ctrls;
123 	const struct v4l2_ctrl_hevc_sps *sps = ctrls->sps;
124 	const struct v4l2_ctrl_hevc_pps *pps = ctrls->pps;
125 	const struct v4l2_ctrl_hevc_decode_params *decode_params = ctrls->decode_params;
126 	struct hantro_dev *vpu = ctx->dev;
127 	u32 min_log2_cb_size, max_log2_ctb_size, min_cb_size, max_ctb_size;
128 	u32 pic_width_in_min_cbs, pic_height_in_min_cbs;
129 	u32 pic_width_aligned, pic_height_aligned;
130 	u32 partial_ctb_x, partial_ctb_y;
131 
132 	hantro_reg_write(vpu, &g2_bit_depth_y_minus8, sps->bit_depth_luma_minus8);
133 	hantro_reg_write(vpu, &g2_bit_depth_c_minus8, sps->bit_depth_chroma_minus8);
134 
135 	hantro_reg_write(vpu, &g2_output_8_bits, 0);
136 
137 	hantro_reg_write(vpu, &g2_hdr_skip_length, ctrls->hevc_hdr_skip_length);
138 
139 	min_log2_cb_size = sps->log2_min_luma_coding_block_size_minus3 + 3;
140 	max_log2_ctb_size = min_log2_cb_size + sps->log2_diff_max_min_luma_coding_block_size;
141 
142 	hantro_reg_write(vpu, &g2_min_cb_size, min_log2_cb_size);
143 	hantro_reg_write(vpu, &g2_max_cb_size, max_log2_ctb_size);
144 
145 	min_cb_size = 1 << min_log2_cb_size;
146 	max_ctb_size = 1 << max_log2_ctb_size;
147 
148 	pic_width_in_min_cbs = sps->pic_width_in_luma_samples / min_cb_size;
149 	pic_height_in_min_cbs = sps->pic_height_in_luma_samples / min_cb_size;
150 	pic_width_aligned = ALIGN(sps->pic_width_in_luma_samples, max_ctb_size);
151 	pic_height_aligned = ALIGN(sps->pic_height_in_luma_samples, max_ctb_size);
152 
153 	partial_ctb_x = !!(sps->pic_width_in_luma_samples != pic_width_aligned);
154 	partial_ctb_y = !!(sps->pic_height_in_luma_samples != pic_height_aligned);
155 
156 	hantro_reg_write(vpu, &g2_partial_ctb_x, partial_ctb_x);
157 	hantro_reg_write(vpu, &g2_partial_ctb_y, partial_ctb_y);
158 
159 	hantro_reg_write(vpu, &g2_pic_width_in_cbs, pic_width_in_min_cbs);
160 	hantro_reg_write(vpu, &g2_pic_height_in_cbs, pic_height_in_min_cbs);
161 
162 	hantro_reg_write(vpu, &g2_pic_width_4x4,
163 			 (pic_width_in_min_cbs * min_cb_size) / 4);
164 	hantro_reg_write(vpu, &g2_pic_height_4x4,
165 			 (pic_height_in_min_cbs * min_cb_size) / 4);
166 
167 	hantro_reg_write(vpu, &hevc_max_inter_hierdepth,
168 			 sps->max_transform_hierarchy_depth_inter);
169 	hantro_reg_write(vpu, &hevc_max_intra_hierdepth,
170 			 sps->max_transform_hierarchy_depth_intra);
171 	hantro_reg_write(vpu, &hevc_min_trb_size,
172 			 sps->log2_min_luma_transform_block_size_minus2 + 2);
173 	hantro_reg_write(vpu, &hevc_max_trb_size,
174 			 sps->log2_min_luma_transform_block_size_minus2 + 2 +
175 			 sps->log2_diff_max_min_luma_transform_block_size);
176 
177 	hantro_reg_write(vpu, &g2_tempor_mvp_e,
178 			 !!(sps->flags & V4L2_HEVC_SPS_FLAG_SPS_TEMPORAL_MVP_ENABLED) &&
179 			 !(decode_params->flags & V4L2_HEVC_DECODE_PARAM_FLAG_IDR_PIC));
180 	hantro_reg_write(vpu, &g2_strong_smooth_e,
181 			 !!(sps->flags & V4L2_HEVC_SPS_FLAG_STRONG_INTRA_SMOOTHING_ENABLED));
182 	hantro_reg_write(vpu, &g2_asym_pred_e,
183 			 !!(sps->flags & V4L2_HEVC_SPS_FLAG_AMP_ENABLED));
184 	hantro_reg_write(vpu, &g2_sao_e,
185 			 !!(sps->flags & V4L2_HEVC_SPS_FLAG_SAMPLE_ADAPTIVE_OFFSET));
186 	hantro_reg_write(vpu, &g2_sign_data_hide,
187 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_SIGN_DATA_HIDING_ENABLED));
188 
189 	if (pps->flags & V4L2_HEVC_PPS_FLAG_CU_QP_DELTA_ENABLED) {
190 		hantro_reg_write(vpu, &g2_cu_qpd_e, 1);
191 		hantro_reg_write(vpu, &g2_max_cu_qpd_depth, pps->diff_cu_qp_delta_depth);
192 	} else {
193 		hantro_reg_write(vpu, &g2_cu_qpd_e, 0);
194 		hantro_reg_write(vpu, &g2_max_cu_qpd_depth, 0);
195 	}
196 
197 	hantro_reg_write(vpu, &g2_cb_qp_offset, pps->pps_cb_qp_offset);
198 	hantro_reg_write(vpu, &g2_cr_qp_offset, pps->pps_cr_qp_offset);
199 
200 	hantro_reg_write(vpu, &g2_filt_offset_beta, pps->pps_beta_offset_div2);
201 	hantro_reg_write(vpu, &g2_filt_offset_tc, pps->pps_tc_offset_div2);
202 	hantro_reg_write(vpu, &g2_slice_hdr_ext_e,
203 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_SLICE_SEGMENT_HEADER_EXTENSION_PRESENT));
204 	hantro_reg_write(vpu, &g2_slice_hdr_ext_bits, pps->num_extra_slice_header_bits);
205 	hantro_reg_write(vpu, &g2_slice_chqp_present,
206 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT));
207 	hantro_reg_write(vpu, &g2_weight_bipr_idc,
208 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_WEIGHTED_BIPRED));
209 	hantro_reg_write(vpu, &g2_transq_bypass,
210 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_TRANSQUANT_BYPASS_ENABLED));
211 	hantro_reg_write(vpu, &g2_list_mod_e,
212 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_LISTS_MODIFICATION_PRESENT));
213 	hantro_reg_write(vpu, &g2_entropy_sync_e,
214 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_ENTROPY_CODING_SYNC_ENABLED));
215 	hantro_reg_write(vpu, &g2_cabac_init_present,
216 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_CABAC_INIT_PRESENT));
217 	hantro_reg_write(vpu, &g2_idr_pic_e,
218 			 !!(decode_params->flags & V4L2_HEVC_DECODE_PARAM_FLAG_IRAP_PIC));
219 	hantro_reg_write(vpu, &hevc_parallel_merge,
220 			 pps->log2_parallel_merge_level_minus2 + 2);
221 	hantro_reg_write(vpu, &g2_pcm_filt_d,
222 			 !!(sps->flags & V4L2_HEVC_SPS_FLAG_PCM_LOOP_FILTER_DISABLED));
223 	hantro_reg_write(vpu, &g2_pcm_e,
224 			 !!(sps->flags & V4L2_HEVC_SPS_FLAG_PCM_ENABLED));
225 	if (sps->flags & V4L2_HEVC_SPS_FLAG_PCM_ENABLED) {
226 		hantro_reg_write(vpu, &g2_max_pcm_size,
227 				 sps->log2_diff_max_min_pcm_luma_coding_block_size +
228 				 sps->log2_min_pcm_luma_coding_block_size_minus3 + 3);
229 		hantro_reg_write(vpu, &g2_min_pcm_size,
230 				 sps->log2_min_pcm_luma_coding_block_size_minus3 + 3);
231 		hantro_reg_write(vpu, &g2_bit_depth_pcm_y,
232 				 sps->pcm_sample_bit_depth_luma_minus1 + 1);
233 		hantro_reg_write(vpu, &g2_bit_depth_pcm_c,
234 				 sps->pcm_sample_bit_depth_chroma_minus1 + 1);
235 	} else {
236 		hantro_reg_write(vpu, &g2_max_pcm_size, 0);
237 		hantro_reg_write(vpu, &g2_min_pcm_size, 0);
238 		hantro_reg_write(vpu, &g2_bit_depth_pcm_y, 0);
239 		hantro_reg_write(vpu, &g2_bit_depth_pcm_c, 0);
240 	}
241 
242 	hantro_reg_write(vpu, &g2_start_code_e, 1);
243 	hantro_reg_write(vpu, &g2_init_qp, pps->init_qp_minus26 + 26);
244 	hantro_reg_write(vpu, &g2_weight_pred_e,
245 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_WEIGHTED_PRED));
246 	hantro_reg_write(vpu, &g2_cabac_init_present,
247 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_CABAC_INIT_PRESENT));
248 	hantro_reg_write(vpu, &g2_const_intra_e,
249 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_CONSTRAINED_INTRA_PRED));
250 	hantro_reg_write(vpu, &g2_transform_skip,
251 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_TRANSFORM_SKIP_ENABLED));
252 	hantro_reg_write(vpu, &g2_out_filtering_dis,
253 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_PPS_DISABLE_DEBLOCKING_FILTER));
254 	hantro_reg_write(vpu, &g2_filt_ctrl_pres,
255 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_DEBLOCKING_FILTER_CONTROL_PRESENT));
256 	hantro_reg_write(vpu, &g2_dependent_slice,
257 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_DEPENDENT_SLICE_SEGMENT_ENABLED));
258 	hantro_reg_write(vpu, &g2_filter_override,
259 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_DEBLOCKING_FILTER_OVERRIDE_ENABLED));
260 	hantro_reg_write(vpu, &g2_refidx0_active,
261 			 pps->num_ref_idx_l0_default_active_minus1 + 1);
262 	hantro_reg_write(vpu, &g2_refidx1_active,
263 			 pps->num_ref_idx_l1_default_active_minus1 + 1);
264 	hantro_reg_write(vpu, &g2_apf_threshold, 8);
265 }
266 
set_ref_pic_list(struct hantro_ctx * ctx)267 static void set_ref_pic_list(struct hantro_ctx *ctx)
268 {
269 	const struct hantro_hevc_dec_ctrls *ctrls = &ctx->hevc_dec.ctrls;
270 	struct hantro_dev *vpu = ctx->dev;
271 	const struct v4l2_ctrl_hevc_decode_params *decode_params = ctrls->decode_params;
272 	u32 list0[V4L2_HEVC_DPB_ENTRIES_NUM_MAX] = {};
273 	u32 list1[V4L2_HEVC_DPB_ENTRIES_NUM_MAX] = {};
274 	static const struct hantro_reg ref_pic_regs0[] = {
275 		hevc_rlist_f0,
276 		hevc_rlist_f1,
277 		hevc_rlist_f2,
278 		hevc_rlist_f3,
279 		hevc_rlist_f4,
280 		hevc_rlist_f5,
281 		hevc_rlist_f6,
282 		hevc_rlist_f7,
283 		hevc_rlist_f8,
284 		hevc_rlist_f9,
285 		hevc_rlist_f10,
286 		hevc_rlist_f11,
287 		hevc_rlist_f12,
288 		hevc_rlist_f13,
289 		hevc_rlist_f14,
290 		hevc_rlist_f15,
291 	};
292 	static const struct hantro_reg ref_pic_regs1[] = {
293 		hevc_rlist_b0,
294 		hevc_rlist_b1,
295 		hevc_rlist_b2,
296 		hevc_rlist_b3,
297 		hevc_rlist_b4,
298 		hevc_rlist_b5,
299 		hevc_rlist_b6,
300 		hevc_rlist_b7,
301 		hevc_rlist_b8,
302 		hevc_rlist_b9,
303 		hevc_rlist_b10,
304 		hevc_rlist_b11,
305 		hevc_rlist_b12,
306 		hevc_rlist_b13,
307 		hevc_rlist_b14,
308 		hevc_rlist_b15,
309 	};
310 	unsigned int i, j;
311 
312 	/* List 0 contains: short term before, short term after and long term */
313 	j = 0;
314 	for (i = 0; i < decode_params->num_poc_st_curr_before && j < ARRAY_SIZE(list0); i++)
315 		list0[j++] = decode_params->poc_st_curr_before[i];
316 	for (i = 0; i < decode_params->num_poc_st_curr_after && j < ARRAY_SIZE(list0); i++)
317 		list0[j++] = decode_params->poc_st_curr_after[i];
318 	for (i = 0; i < decode_params->num_poc_lt_curr && j < ARRAY_SIZE(list0); i++)
319 		list0[j++] = decode_params->poc_lt_curr[i];
320 
321 	/* Fill the list, copying over and over */
322 	i = 0;
323 	while (j < ARRAY_SIZE(list0))
324 		list0[j++] = list0[i++];
325 
326 	j = 0;
327 	for (i = 0; i < decode_params->num_poc_st_curr_after && j < ARRAY_SIZE(list1); i++)
328 		list1[j++] = decode_params->poc_st_curr_after[i];
329 	for (i = 0; i < decode_params->num_poc_st_curr_before && j < ARRAY_SIZE(list1); i++)
330 		list1[j++] = decode_params->poc_st_curr_before[i];
331 	for (i = 0; i < decode_params->num_poc_lt_curr && j < ARRAY_SIZE(list1); i++)
332 		list1[j++] = decode_params->poc_lt_curr[i];
333 
334 	i = 0;
335 	while (j < ARRAY_SIZE(list1))
336 		list1[j++] = list1[i++];
337 
338 	for (i = 0; i < V4L2_HEVC_DPB_ENTRIES_NUM_MAX; i++) {
339 		hantro_reg_write(vpu, &ref_pic_regs0[i], list0[i]);
340 		hantro_reg_write(vpu, &ref_pic_regs1[i], list1[i]);
341 	}
342 }
343 
set_ref(struct hantro_ctx * ctx)344 static int set_ref(struct hantro_ctx *ctx)
345 {
346 	const struct hantro_hevc_dec_ctrls *ctrls = &ctx->hevc_dec.ctrls;
347 	const struct v4l2_ctrl_hevc_pps *pps = ctrls->pps;
348 	const struct v4l2_ctrl_hevc_decode_params *decode_params = ctrls->decode_params;
349 	const struct v4l2_hevc_dpb_entry *dpb = decode_params->dpb;
350 	dma_addr_t luma_addr, chroma_addr, mv_addr = 0;
351 	struct hantro_dev *vpu = ctx->dev;
352 	struct vb2_v4l2_buffer *vb2_dst;
353 	struct hantro_decoded_buffer *dst;
354 	size_t cr_offset = hantro_hevc_chroma_offset(ctx);
355 	size_t mv_offset = hantro_hevc_motion_vectors_offset(ctx);
356 	u32 max_ref_frames;
357 	u16 dpb_longterm_e;
358 	static const struct hantro_reg cur_poc[] = {
359 		hevc_cur_poc_00,
360 		hevc_cur_poc_01,
361 		hevc_cur_poc_02,
362 		hevc_cur_poc_03,
363 		hevc_cur_poc_04,
364 		hevc_cur_poc_05,
365 		hevc_cur_poc_06,
366 		hevc_cur_poc_07,
367 		hevc_cur_poc_08,
368 		hevc_cur_poc_09,
369 		hevc_cur_poc_10,
370 		hevc_cur_poc_11,
371 		hevc_cur_poc_12,
372 		hevc_cur_poc_13,
373 		hevc_cur_poc_14,
374 		hevc_cur_poc_15,
375 	};
376 	unsigned int i;
377 
378 	max_ref_frames = decode_params->num_poc_lt_curr +
379 		decode_params->num_poc_st_curr_before +
380 		decode_params->num_poc_st_curr_after;
381 	/*
382 	 * Set max_ref_frames to non-zero to avoid HW hang when decoding
383 	 * badly marked I-frames.
384 	 */
385 	max_ref_frames = max_ref_frames ? max_ref_frames : 1;
386 	hantro_reg_write(vpu, &g2_num_ref_frames, max_ref_frames);
387 	hantro_reg_write(vpu, &g2_filter_over_slices,
388 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED));
389 	hantro_reg_write(vpu, &g2_filter_over_tiles,
390 			 !!(pps->flags & V4L2_HEVC_PPS_FLAG_LOOP_FILTER_ACROSS_TILES_ENABLED));
391 
392 	/*
393 	 * Write POC count diff from current pic.
394 	 */
395 	for (i = 0; i < decode_params->num_active_dpb_entries && i < ARRAY_SIZE(cur_poc); i++) {
396 		char poc_diff = decode_params->pic_order_cnt_val - dpb[i].pic_order_cnt_val;
397 
398 		hantro_reg_write(vpu, &cur_poc[i], poc_diff);
399 	}
400 
401 	if (i < ARRAY_SIZE(cur_poc)) {
402 		/*
403 		 * After the references, fill one entry pointing to itself,
404 		 * i.e. difference is zero.
405 		 */
406 		hantro_reg_write(vpu, &cur_poc[i], 0);
407 		i++;
408 	}
409 
410 	/* Fill the rest with the current picture */
411 	for (; i < ARRAY_SIZE(cur_poc); i++)
412 		hantro_reg_write(vpu, &cur_poc[i], decode_params->pic_order_cnt_val);
413 
414 	set_ref_pic_list(ctx);
415 
416 	/* We will only keep the reference pictures that are still used */
417 	hantro_hevc_ref_init(ctx);
418 
419 	/* Set up addresses of DPB buffers */
420 	dpb_longterm_e = 0;
421 	for (i = 0; i < decode_params->num_active_dpb_entries &&
422 	     i < (V4L2_HEVC_DPB_ENTRIES_NUM_MAX - 1); i++) {
423 		luma_addr = hantro_hevc_get_ref_buf(ctx, dpb[i].pic_order_cnt_val);
424 		if (!luma_addr)
425 			return -ENOMEM;
426 
427 		chroma_addr = luma_addr + cr_offset;
428 		mv_addr = luma_addr + mv_offset;
429 
430 		if (dpb[i].flags & V4L2_HEVC_DPB_ENTRY_LONG_TERM_REFERENCE)
431 			dpb_longterm_e |= BIT(V4L2_HEVC_DPB_ENTRIES_NUM_MAX - 1 - i);
432 
433 		hantro_write_addr(vpu, G2_REF_LUMA_ADDR(i), luma_addr);
434 		hantro_write_addr(vpu, G2_REF_CHROMA_ADDR(i), chroma_addr);
435 		hantro_write_addr(vpu, G2_REF_MV_ADDR(i), mv_addr);
436 	}
437 
438 	vb2_dst = hantro_get_dst_buf(ctx);
439 	dst = vb2_to_hantro_decoded_buf(&vb2_dst->vb2_buf);
440 	luma_addr = hantro_get_dec_buf_addr(ctx, &dst->base.vb.vb2_buf);
441 	if (!luma_addr)
442 		return -ENOMEM;
443 
444 	if (hantro_hevc_add_ref_buf(ctx, decode_params->pic_order_cnt_val, luma_addr))
445 		return -EINVAL;
446 
447 	chroma_addr = luma_addr + cr_offset;
448 	mv_addr = luma_addr + mv_offset;
449 
450 	hantro_write_addr(vpu, G2_REF_LUMA_ADDR(i), luma_addr);
451 	hantro_write_addr(vpu, G2_REF_CHROMA_ADDR(i), chroma_addr);
452 	hantro_write_addr(vpu, G2_REF_MV_ADDR(i++), mv_addr);
453 
454 	hantro_write_addr(vpu, G2_OUT_LUMA_ADDR, luma_addr);
455 	hantro_write_addr(vpu, G2_OUT_CHROMA_ADDR, chroma_addr);
456 	hantro_write_addr(vpu, G2_OUT_MV_ADDR, mv_addr);
457 
458 	for (; i < V4L2_HEVC_DPB_ENTRIES_NUM_MAX; i++) {
459 		hantro_write_addr(vpu, G2_REF_LUMA_ADDR(i), 0);
460 		hantro_write_addr(vpu, G2_REF_CHROMA_ADDR(i), 0);
461 		hantro_write_addr(vpu, G2_REF_MV_ADDR(i), 0);
462 	}
463 
464 	hantro_reg_write(vpu, &g2_refer_lterm_e, dpb_longterm_e);
465 
466 	return 0;
467 }
468 
set_buffers(struct hantro_ctx * ctx)469 static void set_buffers(struct hantro_ctx *ctx)
470 {
471 	struct vb2_v4l2_buffer *src_buf;
472 	struct hantro_dev *vpu = ctx->dev;
473 	dma_addr_t src_dma;
474 	u32 src_len, src_buf_len;
475 
476 	src_buf = hantro_get_src_buf(ctx);
477 
478 	/* Source (stream) buffer. */
479 	src_dma = vb2_dma_contig_plane_dma_addr(&src_buf->vb2_buf, 0);
480 	src_len = vb2_get_plane_payload(&src_buf->vb2_buf, 0);
481 	src_buf_len = vb2_plane_size(&src_buf->vb2_buf, 0);
482 
483 	hantro_write_addr(vpu, G2_STREAM_ADDR, src_dma);
484 	hantro_reg_write(vpu, &g2_stream_len, src_len);
485 	hantro_reg_write(vpu, &g2_strm_buffer_len, src_buf_len);
486 	hantro_reg_write(vpu, &g2_strm_start_offset, 0);
487 	hantro_reg_write(vpu, &g2_write_mvs_e, 1);
488 
489 	hantro_write_addr(vpu, G2_TILE_SIZES_ADDR, ctx->hevc_dec.tile_sizes.dma);
490 	hantro_write_addr(vpu, G2_TILE_FILTER_ADDR, ctx->hevc_dec.tile_filter.dma);
491 	hantro_write_addr(vpu, G2_TILE_SAO_ADDR, ctx->hevc_dec.tile_sao.dma);
492 	hantro_write_addr(vpu, G2_TILE_BSD_ADDR, ctx->hevc_dec.tile_bsd.dma);
493 }
494 
prepare_scaling_list_buffer(struct hantro_ctx * ctx)495 static void prepare_scaling_list_buffer(struct hantro_ctx *ctx)
496 {
497 	struct hantro_dev *vpu = ctx->dev;
498 	const struct hantro_hevc_dec_ctrls *ctrls = &ctx->hevc_dec.ctrls;
499 	const struct v4l2_ctrl_hevc_scaling_matrix *sc = ctrls->scaling;
500 	const struct v4l2_ctrl_hevc_sps *sps = ctrls->sps;
501 	u8 *p = ((u8 *)ctx->hevc_dec.scaling_lists.cpu);
502 	unsigned int scaling_list_enabled;
503 	unsigned int i, j, k;
504 
505 	scaling_list_enabled = !!(sps->flags & V4L2_HEVC_SPS_FLAG_SCALING_LIST_ENABLED);
506 	hantro_reg_write(vpu, &g2_scaling_list_e, scaling_list_enabled);
507 
508 	if (!scaling_list_enabled)
509 		return;
510 
511 	for (i = 0; i < ARRAY_SIZE(sc->scaling_list_dc_coef_16x16); i++)
512 		*p++ = sc->scaling_list_dc_coef_16x16[i];
513 
514 	for (i = 0; i < ARRAY_SIZE(sc->scaling_list_dc_coef_32x32); i++)
515 		*p++ = sc->scaling_list_dc_coef_32x32[i];
516 
517 	/* 128-bit boundary */
518 	p += 8;
519 
520 	/* write scaling lists column by column */
521 
522 	for (i = 0; i < 6; i++)
523 		for (j = 0; j < 4; j++)
524 			for (k = 0; k < 4; k++)
525 				*p++ = sc->scaling_list_4x4[i][4 * k + j];
526 
527 	for (i = 0; i < 6; i++)
528 		for (j = 0; j < 8; j++)
529 			for (k = 0; k < 8; k++)
530 				*p++ = sc->scaling_list_8x8[i][8 * k + j];
531 
532 	for (i = 0; i < 6; i++)
533 		for (j = 0; j < 8; j++)
534 			for (k = 0; k < 8; k++)
535 				*p++ = sc->scaling_list_16x16[i][8 * k + j];
536 
537 	for (i = 0; i < 2; i++)
538 		for (j = 0; j < 8; j++)
539 			for (k = 0; k < 8; k++)
540 				*p++ = sc->scaling_list_32x32[i][8 * k + j];
541 
542 	hantro_write_addr(vpu, G2_HEVC_SCALING_LIST_ADDR, ctx->hevc_dec.scaling_lists.dma);
543 }
544 
hantro_g2_hevc_dec_run(struct hantro_ctx * ctx)545 int hantro_g2_hevc_dec_run(struct hantro_ctx *ctx)
546 {
547 	struct hantro_dev *vpu = ctx->dev;
548 	int ret;
549 
550 	hantro_g2_check_idle(vpu);
551 
552 	/* Prepare HEVC decoder context. */
553 	ret = hantro_hevc_dec_prepare_run(ctx);
554 	if (ret)
555 		return ret;
556 
557 	/* Configure hardware registers. */
558 	set_params(ctx);
559 
560 	/* set reference pictures */
561 	ret = set_ref(ctx);
562 	if (ret)
563 		return ret;
564 
565 	set_buffers(ctx);
566 	prepare_tile_info_buffer(ctx);
567 
568 	prepare_scaling_list_buffer(ctx);
569 
570 	hantro_end_prepare_run(ctx);
571 
572 	hantro_reg_write(vpu, &g2_mode, HEVC_DEC_MODE);
573 	hantro_reg_write(vpu, &g2_clk_gate_e, 1);
574 
575 	/* Don't disable output */
576 	hantro_reg_write(vpu, &g2_out_dis, 0);
577 
578 	/* Don't compress buffers */
579 	hantro_reg_write(vpu, &g2_ref_compress_bypass, 1);
580 
581 	/* Bus width and max burst */
582 	hantro_reg_write(vpu, &g2_buswidth, BUS_WIDTH_128);
583 	hantro_reg_write(vpu, &g2_max_burst, 16);
584 
585 	/* Swap */
586 	hantro_reg_write(vpu, &g2_strm_swap, 0xf);
587 	hantro_reg_write(vpu, &g2_dirmv_swap, 0xf);
588 	hantro_reg_write(vpu, &g2_compress_swap, 0xf);
589 
590 	/* Start decoding! */
591 	vdpu_write(vpu, G2_REG_INTERRUPT_DEC_E, G2_REG_INTERRUPT);
592 
593 	return 0;
594 }
595