1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Hantro VPU HEVC codec driver
4 *
5 * Copyright (C) 2020 Safran Passenger Innovations LLC
6 */
7
8 #include <linux/types.h>
9 #include <media/v4l2-mem2mem.h>
10
11 #include "hantro.h"
12 #include "hantro_hw.h"
13
14 #define VERT_FILTER_RAM_SIZE 8 /* bytes per pixel row */
15 /*
16 * BSD control data of current picture at tile border
17 * 128 bits per 4x4 tile = 128/(8*4) bytes per row
18 */
19 #define BSD_CTRL_RAM_SIZE 4 /* bytes per pixel row */
20 /* tile border coefficients of filter */
21 #define VERT_SAO_RAM_SIZE 48 /* bytes per pixel */
22
23 #define SCALING_LIST_SIZE (16 * 64)
24
25 #define MAX_TILE_COLS 20
26 #define MAX_TILE_ROWS 22
27
hantro_hevc_ref_init(struct hantro_ctx * ctx)28 void hantro_hevc_ref_init(struct hantro_ctx *ctx)
29 {
30 struct hantro_hevc_dec_hw_ctx *hevc_dec = &ctx->hevc_dec;
31
32 hevc_dec->ref_bufs_used = 0;
33 }
34
hantro_hevc_get_ref_buf(struct hantro_ctx * ctx,s32 poc)35 dma_addr_t hantro_hevc_get_ref_buf(struct hantro_ctx *ctx,
36 s32 poc)
37 {
38 struct hantro_hevc_dec_hw_ctx *hevc_dec = &ctx->hevc_dec;
39 int i;
40
41 /* Find the reference buffer in already known ones */
42 for (i = 0; i < NUM_REF_PICTURES; i++) {
43 if (hevc_dec->ref_bufs_poc[i] == poc) {
44 hevc_dec->ref_bufs_used |= 1 << i;
45 return hevc_dec->ref_bufs[i].dma;
46 }
47 }
48
49 return 0;
50 }
51
hantro_hevc_add_ref_buf(struct hantro_ctx * ctx,int poc,dma_addr_t addr)52 int hantro_hevc_add_ref_buf(struct hantro_ctx *ctx, int poc, dma_addr_t addr)
53 {
54 struct hantro_hevc_dec_hw_ctx *hevc_dec = &ctx->hevc_dec;
55 int i;
56
57 /* Add a new reference buffer */
58 for (i = 0; i < NUM_REF_PICTURES; i++) {
59 if (!(hevc_dec->ref_bufs_used & 1 << i)) {
60 hevc_dec->ref_bufs_used |= 1 << i;
61 hevc_dec->ref_bufs_poc[i] = poc;
62 hevc_dec->ref_bufs[i].dma = addr;
63 return 0;
64 }
65 }
66
67 return -EINVAL;
68 }
69
tile_buffer_reallocate(struct hantro_ctx * ctx)70 static int tile_buffer_reallocate(struct hantro_ctx *ctx)
71 {
72 struct hantro_dev *vpu = ctx->dev;
73 struct hantro_hevc_dec_hw_ctx *hevc_dec = &ctx->hevc_dec;
74 const struct hantro_hevc_dec_ctrls *ctrls = &ctx->hevc_dec.ctrls;
75 const struct v4l2_ctrl_hevc_pps *pps = ctrls->pps;
76 const struct v4l2_ctrl_hevc_sps *sps = ctrls->sps;
77 unsigned int num_tile_cols = pps->num_tile_columns_minus1 + 1;
78 unsigned int height64 = (sps->pic_height_in_luma_samples + 63) & ~63;
79 unsigned int size;
80
81 if (num_tile_cols <= 1 ||
82 num_tile_cols <= hevc_dec->num_tile_cols_allocated)
83 return 0;
84
85 /* Need to reallocate due to tiles passed via PPS */
86 if (hevc_dec->tile_filter.cpu) {
87 dma_free_coherent(vpu->dev, hevc_dec->tile_filter.size,
88 hevc_dec->tile_filter.cpu,
89 hevc_dec->tile_filter.dma);
90 hevc_dec->tile_filter.cpu = NULL;
91 }
92
93 if (hevc_dec->tile_sao.cpu) {
94 dma_free_coherent(vpu->dev, hevc_dec->tile_sao.size,
95 hevc_dec->tile_sao.cpu,
96 hevc_dec->tile_sao.dma);
97 hevc_dec->tile_sao.cpu = NULL;
98 }
99
100 if (hevc_dec->tile_bsd.cpu) {
101 dma_free_coherent(vpu->dev, hevc_dec->tile_bsd.size,
102 hevc_dec->tile_bsd.cpu,
103 hevc_dec->tile_bsd.dma);
104 hevc_dec->tile_bsd.cpu = NULL;
105 }
106
107 size = (VERT_FILTER_RAM_SIZE * height64 * (num_tile_cols - 1) * ctx->bit_depth) / 8;
108 hevc_dec->tile_filter.cpu = dma_alloc_coherent(vpu->dev, size,
109 &hevc_dec->tile_filter.dma,
110 GFP_KERNEL);
111 if (!hevc_dec->tile_filter.cpu)
112 goto err_free_tile_buffers;
113 hevc_dec->tile_filter.size = size;
114
115 size = (VERT_SAO_RAM_SIZE * height64 * (num_tile_cols - 1) * ctx->bit_depth) / 8;
116 hevc_dec->tile_sao.cpu = dma_alloc_coherent(vpu->dev, size,
117 &hevc_dec->tile_sao.dma,
118 GFP_KERNEL);
119 if (!hevc_dec->tile_sao.cpu)
120 goto err_free_tile_buffers;
121 hevc_dec->tile_sao.size = size;
122
123 size = BSD_CTRL_RAM_SIZE * height64 * (num_tile_cols - 1);
124 hevc_dec->tile_bsd.cpu = dma_alloc_coherent(vpu->dev, size,
125 &hevc_dec->tile_bsd.dma,
126 GFP_KERNEL);
127 if (!hevc_dec->tile_bsd.cpu)
128 goto err_free_tile_buffers;
129 hevc_dec->tile_bsd.size = size;
130
131 hevc_dec->num_tile_cols_allocated = num_tile_cols;
132
133 return 0;
134
135 err_free_tile_buffers:
136 if (hevc_dec->tile_filter.cpu)
137 dma_free_coherent(vpu->dev, hevc_dec->tile_filter.size,
138 hevc_dec->tile_filter.cpu,
139 hevc_dec->tile_filter.dma);
140 hevc_dec->tile_filter.cpu = NULL;
141
142 if (hevc_dec->tile_sao.cpu)
143 dma_free_coherent(vpu->dev, hevc_dec->tile_sao.size,
144 hevc_dec->tile_sao.cpu,
145 hevc_dec->tile_sao.dma);
146 hevc_dec->tile_sao.cpu = NULL;
147
148 if (hevc_dec->tile_bsd.cpu)
149 dma_free_coherent(vpu->dev, hevc_dec->tile_bsd.size,
150 hevc_dec->tile_bsd.cpu,
151 hevc_dec->tile_bsd.dma);
152 hevc_dec->tile_bsd.cpu = NULL;
153
154 return -ENOMEM;
155 }
156
hantro_hevc_validate_sps(struct hantro_ctx * ctx,const struct v4l2_ctrl_hevc_sps * sps)157 static int hantro_hevc_validate_sps(struct hantro_ctx *ctx, const struct v4l2_ctrl_hevc_sps *sps)
158 {
159 /*
160 * for tile pixel format check if the width and height match
161 * hardware constraints
162 */
163 if (ctx->vpu_dst_fmt->fourcc == V4L2_PIX_FMT_NV12_4L4) {
164 if (ctx->dst_fmt.width !=
165 ALIGN(sps->pic_width_in_luma_samples, ctx->vpu_dst_fmt->frmsize.step_width))
166 return -EINVAL;
167
168 if (ctx->dst_fmt.height !=
169 ALIGN(sps->pic_height_in_luma_samples, ctx->vpu_dst_fmt->frmsize.step_height))
170 return -EINVAL;
171 }
172
173 return 0;
174 }
175
hantro_hevc_dec_prepare_run(struct hantro_ctx * ctx)176 int hantro_hevc_dec_prepare_run(struct hantro_ctx *ctx)
177 {
178 struct hantro_hevc_dec_hw_ctx *hevc_ctx = &ctx->hevc_dec;
179 struct hantro_hevc_dec_ctrls *ctrls = &hevc_ctx->ctrls;
180 int ret;
181
182 hantro_start_prepare_run(ctx);
183
184 ctrls->decode_params =
185 hantro_get_ctrl(ctx, V4L2_CID_STATELESS_HEVC_DECODE_PARAMS);
186 if (WARN_ON(!ctrls->decode_params))
187 return -EINVAL;
188
189 ctrls->scaling =
190 hantro_get_ctrl(ctx, V4L2_CID_STATELESS_HEVC_SCALING_MATRIX);
191 if (WARN_ON(!ctrls->scaling))
192 return -EINVAL;
193
194 ctrls->sps =
195 hantro_get_ctrl(ctx, V4L2_CID_STATELESS_HEVC_SPS);
196 if (WARN_ON(!ctrls->sps))
197 return -EINVAL;
198
199 ret = hantro_hevc_validate_sps(ctx, ctrls->sps);
200 if (ret)
201 return ret;
202
203 ctrls->pps =
204 hantro_get_ctrl(ctx, V4L2_CID_STATELESS_HEVC_PPS);
205 if (WARN_ON(!ctrls->pps))
206 return -EINVAL;
207
208 ret = tile_buffer_reallocate(ctx);
209 if (ret)
210 return ret;
211
212 return 0;
213 }
214
hantro_hevc_dec_exit(struct hantro_ctx * ctx)215 void hantro_hevc_dec_exit(struct hantro_ctx *ctx)
216 {
217 struct hantro_dev *vpu = ctx->dev;
218 struct hantro_hevc_dec_hw_ctx *hevc_dec = &ctx->hevc_dec;
219
220 if (hevc_dec->tile_sizes.cpu)
221 dma_free_coherent(vpu->dev, hevc_dec->tile_sizes.size,
222 hevc_dec->tile_sizes.cpu,
223 hevc_dec->tile_sizes.dma);
224 hevc_dec->tile_sizes.cpu = NULL;
225
226 if (hevc_dec->scaling_lists.cpu)
227 dma_free_coherent(vpu->dev, hevc_dec->scaling_lists.size,
228 hevc_dec->scaling_lists.cpu,
229 hevc_dec->scaling_lists.dma);
230 hevc_dec->scaling_lists.cpu = NULL;
231
232 if (hevc_dec->tile_filter.cpu)
233 dma_free_coherent(vpu->dev, hevc_dec->tile_filter.size,
234 hevc_dec->tile_filter.cpu,
235 hevc_dec->tile_filter.dma);
236 hevc_dec->tile_filter.cpu = NULL;
237
238 if (hevc_dec->tile_sao.cpu)
239 dma_free_coherent(vpu->dev, hevc_dec->tile_sao.size,
240 hevc_dec->tile_sao.cpu,
241 hevc_dec->tile_sao.dma);
242 hevc_dec->tile_sao.cpu = NULL;
243
244 if (hevc_dec->tile_bsd.cpu)
245 dma_free_coherent(vpu->dev, hevc_dec->tile_bsd.size,
246 hevc_dec->tile_bsd.cpu,
247 hevc_dec->tile_bsd.dma);
248 hevc_dec->tile_bsd.cpu = NULL;
249 }
250
hantro_hevc_dec_init(struct hantro_ctx * ctx)251 int hantro_hevc_dec_init(struct hantro_ctx *ctx)
252 {
253 struct hantro_dev *vpu = ctx->dev;
254 struct hantro_hevc_dec_hw_ctx *hevc_dec = &ctx->hevc_dec;
255 unsigned int size;
256
257 memset(hevc_dec, 0, sizeof(*hevc_dec));
258
259 /*
260 * Maximum number of tiles times width and height (2 bytes each),
261 * rounding up to next 16 bytes boundary + one extra 16 byte
262 * chunk (HW guys wanted to have this).
263 */
264 size = round_up(MAX_TILE_COLS * MAX_TILE_ROWS * 4 * sizeof(u16) + 16, 16);
265 hevc_dec->tile_sizes.cpu = dma_alloc_coherent(vpu->dev, size,
266 &hevc_dec->tile_sizes.dma,
267 GFP_KERNEL);
268 if (!hevc_dec->tile_sizes.cpu)
269 return -ENOMEM;
270
271 hevc_dec->tile_sizes.size = size;
272
273 hevc_dec->scaling_lists.cpu = dma_alloc_coherent(vpu->dev, SCALING_LIST_SIZE,
274 &hevc_dec->scaling_lists.dma,
275 GFP_KERNEL);
276 if (!hevc_dec->scaling_lists.cpu)
277 return -ENOMEM;
278
279 hevc_dec->scaling_lists.size = SCALING_LIST_SIZE;
280
281 hantro_hevc_ref_init(ctx);
282
283 return 0;
284 }
285