233ecd863d5f1a020e5d9008f795cf48bb6bc519
[platform/kernel/linux-starfive.git] / drivers / media / platform / verisilicon / hantro_g2_hevc_dec.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Hantro VPU HEVC codec driver
4  *
5  * Copyright (C) 2020 Safran Passenger Innovations LLC
6  */
7
8 #include "hantro_hw.h"
9 #include "hantro_g2_regs.h"
10
11 #define G2_ALIGN        16
12
13 static size_t hantro_hevc_chroma_offset(struct hantro_ctx *ctx)
14 {
15         return ctx->dst_fmt.width * ctx->dst_fmt.height;
16 }
17
18 static size_t hantro_hevc_motion_vectors_offset(struct hantro_ctx *ctx)
19 {
20         size_t cr_offset = hantro_hevc_chroma_offset(ctx);
21
22         return ALIGN((cr_offset * 3) / 2, G2_ALIGN);
23 }
24
25 static void prepare_tile_info_buffer(struct hantro_ctx *ctx)
26 {
27         struct hantro_dev *vpu = ctx->dev;
28         const struct hantro_hevc_dec_ctrls *ctrls = &ctx->hevc_dec.ctrls;
29         const struct v4l2_ctrl_hevc_pps *pps = ctrls->pps;
30         const struct v4l2_ctrl_hevc_sps *sps = ctrls->sps;
31         u16 *p = (u16 *)((u8 *)ctx->hevc_dec.tile_sizes.cpu);
32         unsigned int num_tile_rows = pps->num_tile_rows_minus1 + 1;
33         unsigned int num_tile_cols = pps->num_tile_columns_minus1 + 1;
34         unsigned int pic_width_in_ctbs, pic_height_in_ctbs;
35         unsigned int max_log2_ctb_size, ctb_size;
36         bool tiles_enabled, uniform_spacing;
37         u32 no_chroma = 0;
38
39         tiles_enabled = !!(pps->flags & V4L2_HEVC_PPS_FLAG_TILES_ENABLED);
40         uniform_spacing = !!(pps->flags & V4L2_HEVC_PPS_FLAG_UNIFORM_SPACING);
41
42         hantro_reg_write(vpu, &g2_tile_e, tiles_enabled);
43
44         max_log2_ctb_size = sps->log2_min_luma_coding_block_size_minus3 + 3 +
45                             sps->log2_diff_max_min_luma_coding_block_size;
46         pic_width_in_ctbs = (sps->pic_width_in_luma_samples +
47                             (1 << max_log2_ctb_size) - 1) >> max_log2_ctb_size;
48         pic_height_in_ctbs = (sps->pic_height_in_luma_samples + (1 << max_log2_ctb_size) - 1)
49                              >> max_log2_ctb_size;
50         ctb_size = 1 << max_log2_ctb_size;
51
52         vpu_debug(1, "Preparing tile sizes buffer for %dx%d CTBs (CTB size %d)\n",
53                   pic_width_in_ctbs, pic_height_in_ctbs, ctb_size);
54
55         if (tiles_enabled) {
56                 unsigned int i, j, h;
57
58                 vpu_debug(1, "Tiles enabled! %dx%d\n", num_tile_cols, num_tile_rows);
59
60                 hantro_reg_write(vpu, &g2_num_tile_rows, num_tile_rows);
61                 hantro_reg_write(vpu, &g2_num_tile_cols, num_tile_cols);
62
63                 /* write width + height for each tile in pic */
64                 if (!uniform_spacing) {
65                         u32 tmp_w = 0, tmp_h = 0;
66
67                         for (i = 0; i < num_tile_rows; i++) {
68                                 if (i == num_tile_rows - 1)
69                                         h = pic_height_in_ctbs - tmp_h;
70                                 else
71                                         h = pps->row_height_minus1[i] + 1;
72                                 tmp_h += h;
73                                 if (i == 0 && h == 1 && ctb_size == 16)
74                                         no_chroma = 1;
75                                 for (j = 0, tmp_w = 0; j < num_tile_cols - 1; j++) {
76                                         tmp_w += pps->column_width_minus1[j] + 1;
77                                         *p++ = pps->column_width_minus1[j] + 1;
78                                         *p++ = h;
79                                         if (i == 0 && h == 1 && ctb_size == 16)
80                                                 no_chroma = 1;
81                                 }
82                                 /* last column */
83                                 *p++ = pic_width_in_ctbs - tmp_w;
84                                 *p++ = h;
85                         }
86                 } else { /* uniform spacing */
87                         u32 tmp, prev_h, prev_w;
88
89                         for (i = 0, prev_h = 0; i < num_tile_rows; i++) {
90                                 tmp = (i + 1) * pic_height_in_ctbs / num_tile_rows;
91                                 h = tmp - prev_h;
92                                 prev_h = tmp;
93                                 if (i == 0 && h == 1 && ctb_size == 16)
94                                         no_chroma = 1;
95                                 for (j = 0, prev_w = 0; j < num_tile_cols; j++) {
96                                         tmp = (j + 1) * pic_width_in_ctbs / num_tile_cols;
97                                         *p++ = tmp - prev_w;
98                                         *p++ = h;
99                                         if (j == 0 &&
100                                             (pps->column_width_minus1[0] + 1) == 1 &&
101                                             ctb_size == 16)
102                                                 no_chroma = 1;
103                                         prev_w = tmp;
104                                 }
105                         }
106                 }
107         } else {
108                 hantro_reg_write(vpu, &g2_num_tile_rows, 1);
109                 hantro_reg_write(vpu, &g2_num_tile_cols, 1);
110
111                 /* There's one tile, with dimensions equal to pic size. */
112                 p[0] = pic_width_in_ctbs;
113                 p[1] = pic_height_in_ctbs;
114         }
115
116         if (no_chroma)
117                 vpu_debug(1, "%s: no chroma!\n", __func__);
118 }
119
120 static int compute_header_skip_length(struct hantro_ctx *ctx)
121 {
122         const struct hantro_hevc_dec_ctrls *ctrls = &ctx->hevc_dec.ctrls;
123         const struct v4l2_ctrl_hevc_decode_params *decode_params = ctrls->decode_params;
124         const struct v4l2_ctrl_hevc_sps *sps = ctrls->sps;
125         const struct v4l2_ctrl_hevc_pps *pps = ctrls->pps;
126         int skip = 0;
127
128         if (pps->flags & V4L2_HEVC_PPS_FLAG_OUTPUT_FLAG_PRESENT)
129                 /* size of pic_output_flag */
130                 skip++;
131
132         if (sps->flags & V4L2_HEVC_SPS_FLAG_SEPARATE_COLOUR_PLANE)
133                 /* size of pic_order_cnt_lsb */
134                 skip += 2;
135
136         if (!(decode_params->flags & V4L2_HEVC_DECODE_PARAM_FLAG_IDR_PIC)) {
137                 /* size of pic_order_cnt_lsb */
138                 skip += sps->log2_max_pic_order_cnt_lsb_minus4 + 4;
139
140                 /* size of short_term_ref_pic_set_sps_flag */
141                 skip++;
142
143                 if (decode_params->short_term_ref_pic_set_size)
144                         /* size of st_ref_pic_set( num_short_term_ref_pic_sets ) */
145                         skip += decode_params->short_term_ref_pic_set_size;
146                 else if (sps->num_short_term_ref_pic_sets > 1)
147                         skip += fls(sps->num_short_term_ref_pic_sets - 1);
148
149                 skip += decode_params->long_term_ref_pic_set_size;
150         }
151
152         return skip;
153 }
154
155 static void set_params(struct hantro_ctx *ctx)
156 {
157         const struct hantro_hevc_dec_ctrls *ctrls = &ctx->hevc_dec.ctrls;
158         const struct v4l2_ctrl_hevc_sps *sps = ctrls->sps;
159         const struct v4l2_ctrl_hevc_pps *pps = ctrls->pps;
160         const struct v4l2_ctrl_hevc_decode_params *decode_params = ctrls->decode_params;
161         struct hantro_dev *vpu = ctx->dev;
162         u32 min_log2_cb_size, max_log2_ctb_size, min_cb_size, max_ctb_size;
163         u32 pic_width_in_min_cbs, pic_height_in_min_cbs;
164         u32 pic_width_aligned, pic_height_aligned;
165         u32 partial_ctb_x, partial_ctb_y;
166
167         hantro_reg_write(vpu, &g2_bit_depth_y_minus8, sps->bit_depth_luma_minus8);
168         hantro_reg_write(vpu, &g2_bit_depth_c_minus8, sps->bit_depth_chroma_minus8);
169
170         hantro_reg_write(vpu, &g2_output_8_bits, 0);
171
172         hantro_reg_write(vpu, &g2_hdr_skip_length, compute_header_skip_length(ctx));
173
174         min_log2_cb_size = sps->log2_min_luma_coding_block_size_minus3 + 3;
175         max_log2_ctb_size = min_log2_cb_size + sps->log2_diff_max_min_luma_coding_block_size;
176
177         hantro_reg_write(vpu, &g2_min_cb_size, min_log2_cb_size);
178         hantro_reg_write(vpu, &g2_max_cb_size, max_log2_ctb_size);
179
180         min_cb_size = 1 << min_log2_cb_size;
181         max_ctb_size = 1 << max_log2_ctb_size;
182
183         pic_width_in_min_cbs = sps->pic_width_in_luma_samples / min_cb_size;
184         pic_height_in_min_cbs = sps->pic_height_in_luma_samples / min_cb_size;
185         pic_width_aligned = ALIGN(sps->pic_width_in_luma_samples, max_ctb_size);
186         pic_height_aligned = ALIGN(sps->pic_height_in_luma_samples, max_ctb_size);
187
188         partial_ctb_x = !!(sps->pic_width_in_luma_samples != pic_width_aligned);
189         partial_ctb_y = !!(sps->pic_height_in_luma_samples != pic_height_aligned);
190
191         hantro_reg_write(vpu, &g2_partial_ctb_x, partial_ctb_x);
192         hantro_reg_write(vpu, &g2_partial_ctb_y, partial_ctb_y);
193
194         hantro_reg_write(vpu, &g2_pic_width_in_cbs, pic_width_in_min_cbs);
195         hantro_reg_write(vpu, &g2_pic_height_in_cbs, pic_height_in_min_cbs);
196
197         hantro_reg_write(vpu, &g2_pic_width_4x4,
198                          (pic_width_in_min_cbs * min_cb_size) / 4);
199         hantro_reg_write(vpu, &g2_pic_height_4x4,
200                          (pic_height_in_min_cbs * min_cb_size) / 4);
201
202         hantro_reg_write(vpu, &hevc_max_inter_hierdepth,
203                          sps->max_transform_hierarchy_depth_inter);
204         hantro_reg_write(vpu, &hevc_max_intra_hierdepth,
205                          sps->max_transform_hierarchy_depth_intra);
206         hantro_reg_write(vpu, &hevc_min_trb_size,
207                          sps->log2_min_luma_transform_block_size_minus2 + 2);
208         hantro_reg_write(vpu, &hevc_max_trb_size,
209                          sps->log2_min_luma_transform_block_size_minus2 + 2 +
210                          sps->log2_diff_max_min_luma_transform_block_size);
211
212         hantro_reg_write(vpu, &g2_tempor_mvp_e,
213                          !!(sps->flags & V4L2_HEVC_SPS_FLAG_SPS_TEMPORAL_MVP_ENABLED) &&
214                          !(decode_params->flags & V4L2_HEVC_DECODE_PARAM_FLAG_IDR_PIC));
215         hantro_reg_write(vpu, &g2_strong_smooth_e,
216                          !!(sps->flags & V4L2_HEVC_SPS_FLAG_STRONG_INTRA_SMOOTHING_ENABLED));
217         hantro_reg_write(vpu, &g2_asym_pred_e,
218                          !!(sps->flags & V4L2_HEVC_SPS_FLAG_AMP_ENABLED));
219         hantro_reg_write(vpu, &g2_sao_e,
220                          !!(sps->flags & V4L2_HEVC_SPS_FLAG_SAMPLE_ADAPTIVE_OFFSET));
221         hantro_reg_write(vpu, &g2_sign_data_hide,
222                          !!(pps->flags & V4L2_HEVC_PPS_FLAG_SIGN_DATA_HIDING_ENABLED));
223
224         if (pps->flags & V4L2_HEVC_PPS_FLAG_CU_QP_DELTA_ENABLED) {
225                 hantro_reg_write(vpu, &g2_cu_qpd_e, 1);
226                 hantro_reg_write(vpu, &g2_max_cu_qpd_depth, pps->diff_cu_qp_delta_depth);
227         } else {
228                 hantro_reg_write(vpu, &g2_cu_qpd_e, 0);
229                 hantro_reg_write(vpu, &g2_max_cu_qpd_depth, 0);
230         }
231
232         hantro_reg_write(vpu, &g2_cb_qp_offset, pps->pps_cb_qp_offset);
233         hantro_reg_write(vpu, &g2_cr_qp_offset, pps->pps_cr_qp_offset);
234
235         hantro_reg_write(vpu, &g2_filt_offset_beta, pps->pps_beta_offset_div2);
236         hantro_reg_write(vpu, &g2_filt_offset_tc, pps->pps_tc_offset_div2);
237         hantro_reg_write(vpu, &g2_slice_hdr_ext_e,
238                          !!(pps->flags & V4L2_HEVC_PPS_FLAG_SLICE_SEGMENT_HEADER_EXTENSION_PRESENT));
239         hantro_reg_write(vpu, &g2_slice_hdr_ext_bits, pps->num_extra_slice_header_bits);
240         hantro_reg_write(vpu, &g2_slice_chqp_present,
241                          !!(pps->flags & V4L2_HEVC_PPS_FLAG_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT));
242         hantro_reg_write(vpu, &g2_weight_bipr_idc,
243                          !!(pps->flags & V4L2_HEVC_PPS_FLAG_WEIGHTED_BIPRED));
244         hantro_reg_write(vpu, &g2_transq_bypass,
245                          !!(pps->flags & V4L2_HEVC_PPS_FLAG_TRANSQUANT_BYPASS_ENABLED));
246         hantro_reg_write(vpu, &g2_list_mod_e,
247                          !!(pps->flags & V4L2_HEVC_PPS_FLAG_LISTS_MODIFICATION_PRESENT));
248         hantro_reg_write(vpu, &g2_entropy_sync_e,
249                          !!(pps->flags & V4L2_HEVC_PPS_FLAG_ENTROPY_CODING_SYNC_ENABLED));
250         hantro_reg_write(vpu, &g2_cabac_init_present,
251                          !!(pps->flags & V4L2_HEVC_PPS_FLAG_CABAC_INIT_PRESENT));
252         hantro_reg_write(vpu, &g2_idr_pic_e,
253                          !!(decode_params->flags & V4L2_HEVC_DECODE_PARAM_FLAG_IRAP_PIC));
254         hantro_reg_write(vpu, &hevc_parallel_merge,
255                          pps->log2_parallel_merge_level_minus2 + 2);
256         hantro_reg_write(vpu, &g2_pcm_filt_d,
257                          !!(sps->flags & V4L2_HEVC_SPS_FLAG_PCM_LOOP_FILTER_DISABLED));
258         hantro_reg_write(vpu, &g2_pcm_e,
259                          !!(sps->flags & V4L2_HEVC_SPS_FLAG_PCM_ENABLED));
260         if (sps->flags & V4L2_HEVC_SPS_FLAG_PCM_ENABLED) {
261                 hantro_reg_write(vpu, &g2_max_pcm_size,
262                                  sps->log2_diff_max_min_pcm_luma_coding_block_size +
263                                  sps->log2_min_pcm_luma_coding_block_size_minus3 + 3);
264                 hantro_reg_write(vpu, &g2_min_pcm_size,
265                                  sps->log2_min_pcm_luma_coding_block_size_minus3 + 3);
266                 hantro_reg_write(vpu, &g2_bit_depth_pcm_y,
267                                  sps->pcm_sample_bit_depth_luma_minus1 + 1);
268                 hantro_reg_write(vpu, &g2_bit_depth_pcm_c,
269                                  sps->pcm_sample_bit_depth_chroma_minus1 + 1);
270         } else {
271                 hantro_reg_write(vpu, &g2_max_pcm_size, 0);
272                 hantro_reg_write(vpu, &g2_min_pcm_size, 0);
273                 hantro_reg_write(vpu, &g2_bit_depth_pcm_y, 0);
274                 hantro_reg_write(vpu, &g2_bit_depth_pcm_c, 0);
275         }
276
277         hantro_reg_write(vpu, &g2_start_code_e, 1);
278         hantro_reg_write(vpu, &g2_init_qp, pps->init_qp_minus26 + 26);
279         hantro_reg_write(vpu, &g2_weight_pred_e,
280                          !!(pps->flags & V4L2_HEVC_PPS_FLAG_WEIGHTED_PRED));
281         hantro_reg_write(vpu, &g2_cabac_init_present,
282                          !!(pps->flags & V4L2_HEVC_PPS_FLAG_CABAC_INIT_PRESENT));
283         hantro_reg_write(vpu, &g2_const_intra_e,
284                          !!(pps->flags & V4L2_HEVC_PPS_FLAG_CONSTRAINED_INTRA_PRED));
285         hantro_reg_write(vpu, &g2_transform_skip,
286                          !!(pps->flags & V4L2_HEVC_PPS_FLAG_TRANSFORM_SKIP_ENABLED));
287         hantro_reg_write(vpu, &g2_out_filtering_dis,
288                          !!(pps->flags & V4L2_HEVC_PPS_FLAG_PPS_DISABLE_DEBLOCKING_FILTER));
289         hantro_reg_write(vpu, &g2_filt_ctrl_pres,
290                          !!(pps->flags & V4L2_HEVC_PPS_FLAG_DEBLOCKING_FILTER_CONTROL_PRESENT));
291         hantro_reg_write(vpu, &g2_dependent_slice,
292                          !!(pps->flags & V4L2_HEVC_PPS_FLAG_DEPENDENT_SLICE_SEGMENT_ENABLED));
293         hantro_reg_write(vpu, &g2_filter_override,
294                          !!(pps->flags & V4L2_HEVC_PPS_FLAG_DEBLOCKING_FILTER_OVERRIDE_ENABLED));
295         hantro_reg_write(vpu, &g2_refidx0_active,
296                          pps->num_ref_idx_l0_default_active_minus1 + 1);
297         hantro_reg_write(vpu, &g2_refidx1_active,
298                          pps->num_ref_idx_l1_default_active_minus1 + 1);
299         hantro_reg_write(vpu, &g2_apf_threshold, 8);
300 }
301
302 static void set_ref_pic_list(struct hantro_ctx *ctx)
303 {
304         const struct hantro_hevc_dec_ctrls *ctrls = &ctx->hevc_dec.ctrls;
305         struct hantro_dev *vpu = ctx->dev;
306         const struct v4l2_ctrl_hevc_decode_params *decode_params = ctrls->decode_params;
307         u32 list0[V4L2_HEVC_DPB_ENTRIES_NUM_MAX] = {};
308         u32 list1[V4L2_HEVC_DPB_ENTRIES_NUM_MAX] = {};
309         static const struct hantro_reg ref_pic_regs0[] = {
310                 hevc_rlist_f0,
311                 hevc_rlist_f1,
312                 hevc_rlist_f2,
313                 hevc_rlist_f3,
314                 hevc_rlist_f4,
315                 hevc_rlist_f5,
316                 hevc_rlist_f6,
317                 hevc_rlist_f7,
318                 hevc_rlist_f8,
319                 hevc_rlist_f9,
320                 hevc_rlist_f10,
321                 hevc_rlist_f11,
322                 hevc_rlist_f12,
323                 hevc_rlist_f13,
324                 hevc_rlist_f14,
325                 hevc_rlist_f15,
326         };
327         static const struct hantro_reg ref_pic_regs1[] = {
328                 hevc_rlist_b0,
329                 hevc_rlist_b1,
330                 hevc_rlist_b2,
331                 hevc_rlist_b3,
332                 hevc_rlist_b4,
333                 hevc_rlist_b5,
334                 hevc_rlist_b6,
335                 hevc_rlist_b7,
336                 hevc_rlist_b8,
337                 hevc_rlist_b9,
338                 hevc_rlist_b10,
339                 hevc_rlist_b11,
340                 hevc_rlist_b12,
341                 hevc_rlist_b13,
342                 hevc_rlist_b14,
343                 hevc_rlist_b15,
344         };
345         unsigned int i, j;
346
347         /* List 0 contains: short term before, short term after and long term */
348         j = 0;
349         for (i = 0; i < decode_params->num_poc_st_curr_before && j < ARRAY_SIZE(list0); i++)
350                 list0[j++] = decode_params->poc_st_curr_before[i];
351         for (i = 0; i < decode_params->num_poc_st_curr_after && j < ARRAY_SIZE(list0); i++)
352                 list0[j++] = decode_params->poc_st_curr_after[i];
353         for (i = 0; i < decode_params->num_poc_lt_curr && j < ARRAY_SIZE(list0); i++)
354                 list0[j++] = decode_params->poc_lt_curr[i];
355
356         /* Fill the list, copying over and over */
357         i = 0;
358         while (j < ARRAY_SIZE(list0))
359                 list0[j++] = list0[i++];
360
361         j = 0;
362         for (i = 0; i < decode_params->num_poc_st_curr_after && j < ARRAY_SIZE(list1); i++)
363                 list1[j++] = decode_params->poc_st_curr_after[i];
364         for (i = 0; i < decode_params->num_poc_st_curr_before && j < ARRAY_SIZE(list1); i++)
365                 list1[j++] = decode_params->poc_st_curr_before[i];
366         for (i = 0; i < decode_params->num_poc_lt_curr && j < ARRAY_SIZE(list1); i++)
367                 list1[j++] = decode_params->poc_lt_curr[i];
368
369         i = 0;
370         while (j < ARRAY_SIZE(list1))
371                 list1[j++] = list1[i++];
372
373         for (i = 0; i < V4L2_HEVC_DPB_ENTRIES_NUM_MAX; i++) {
374                 hantro_reg_write(vpu, &ref_pic_regs0[i], list0[i]);
375                 hantro_reg_write(vpu, &ref_pic_regs1[i], list1[i]);
376         }
377 }
378
379 static int set_ref(struct hantro_ctx *ctx)
380 {
381         const struct hantro_hevc_dec_ctrls *ctrls = &ctx->hevc_dec.ctrls;
382         const struct v4l2_ctrl_hevc_pps *pps = ctrls->pps;
383         const struct v4l2_ctrl_hevc_decode_params *decode_params = ctrls->decode_params;
384         const struct v4l2_hevc_dpb_entry *dpb = decode_params->dpb;
385         dma_addr_t luma_addr, chroma_addr, mv_addr = 0;
386         struct hantro_dev *vpu = ctx->dev;
387         struct vb2_v4l2_buffer *vb2_dst;
388         struct hantro_decoded_buffer *dst;
389         size_t cr_offset = hantro_hevc_chroma_offset(ctx);
390         size_t mv_offset = hantro_hevc_motion_vectors_offset(ctx);
391         u32 max_ref_frames;
392         u16 dpb_longterm_e;
393         static const struct hantro_reg cur_poc[] = {
394                 hevc_cur_poc_00,
395                 hevc_cur_poc_01,
396                 hevc_cur_poc_02,
397                 hevc_cur_poc_03,
398                 hevc_cur_poc_04,
399                 hevc_cur_poc_05,
400                 hevc_cur_poc_06,
401                 hevc_cur_poc_07,
402                 hevc_cur_poc_08,
403                 hevc_cur_poc_09,
404                 hevc_cur_poc_10,
405                 hevc_cur_poc_11,
406                 hevc_cur_poc_12,
407                 hevc_cur_poc_13,
408                 hevc_cur_poc_14,
409                 hevc_cur_poc_15,
410         };
411         unsigned int i;
412
413         max_ref_frames = decode_params->num_poc_lt_curr +
414                 decode_params->num_poc_st_curr_before +
415                 decode_params->num_poc_st_curr_after;
416         /*
417          * Set max_ref_frames to non-zero to avoid HW hang when decoding
418          * badly marked I-frames.
419          */
420         max_ref_frames = max_ref_frames ? max_ref_frames : 1;
421         hantro_reg_write(vpu, &g2_num_ref_frames, max_ref_frames);
422         hantro_reg_write(vpu, &g2_filter_over_slices,
423                          !!(pps->flags & V4L2_HEVC_PPS_FLAG_PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED));
424         hantro_reg_write(vpu, &g2_filter_over_tiles,
425                          !!(pps->flags & V4L2_HEVC_PPS_FLAG_LOOP_FILTER_ACROSS_TILES_ENABLED));
426
427         /*
428          * Write POC count diff from current pic.
429          */
430         for (i = 0; i < decode_params->num_active_dpb_entries && i < ARRAY_SIZE(cur_poc); i++) {
431                 char poc_diff = decode_params->pic_order_cnt_val - dpb[i].pic_order_cnt_val;
432
433                 hantro_reg_write(vpu, &cur_poc[i], poc_diff);
434         }
435
436         if (i < ARRAY_SIZE(cur_poc)) {
437                 /*
438                  * After the references, fill one entry pointing to itself,
439                  * i.e. difference is zero.
440                  */
441                 hantro_reg_write(vpu, &cur_poc[i], 0);
442                 i++;
443         }
444
445         /* Fill the rest with the current picture */
446         for (; i < ARRAY_SIZE(cur_poc); i++)
447                 hantro_reg_write(vpu, &cur_poc[i], decode_params->pic_order_cnt_val);
448
449         set_ref_pic_list(ctx);
450
451         /* We will only keep the reference pictures that are still used */
452         hantro_hevc_ref_init(ctx);
453
454         /* Set up addresses of DPB buffers */
455         dpb_longterm_e = 0;
456         for (i = 0; i < decode_params->num_active_dpb_entries &&
457              i < (V4L2_HEVC_DPB_ENTRIES_NUM_MAX - 1); i++) {
458                 luma_addr = hantro_hevc_get_ref_buf(ctx, dpb[i].pic_order_cnt_val);
459                 if (!luma_addr)
460                         return -ENOMEM;
461
462                 chroma_addr = luma_addr + cr_offset;
463                 mv_addr = luma_addr + mv_offset;
464
465                 if (dpb[i].flags & V4L2_HEVC_DPB_ENTRY_LONG_TERM_REFERENCE)
466                         dpb_longterm_e |= BIT(V4L2_HEVC_DPB_ENTRIES_NUM_MAX - 1 - i);
467
468                 hantro_write_addr(vpu, G2_REF_LUMA_ADDR(i), luma_addr);
469                 hantro_write_addr(vpu, G2_REF_CHROMA_ADDR(i), chroma_addr);
470                 hantro_write_addr(vpu, G2_REF_MV_ADDR(i), mv_addr);
471         }
472
473         vb2_dst = hantro_get_dst_buf(ctx);
474         dst = vb2_to_hantro_decoded_buf(&vb2_dst->vb2_buf);
475         luma_addr = hantro_get_dec_buf_addr(ctx, &dst->base.vb.vb2_buf);
476         if (!luma_addr)
477                 return -ENOMEM;
478
479         if (hantro_hevc_add_ref_buf(ctx, decode_params->pic_order_cnt_val, luma_addr))
480                 return -EINVAL;
481
482         chroma_addr = luma_addr + cr_offset;
483         mv_addr = luma_addr + mv_offset;
484
485         hantro_write_addr(vpu, G2_REF_LUMA_ADDR(i), luma_addr);
486         hantro_write_addr(vpu, G2_REF_CHROMA_ADDR(i), chroma_addr);
487         hantro_write_addr(vpu, G2_REF_MV_ADDR(i++), mv_addr);
488
489         hantro_write_addr(vpu, G2_OUT_LUMA_ADDR, luma_addr);
490         hantro_write_addr(vpu, G2_OUT_CHROMA_ADDR, chroma_addr);
491         hantro_write_addr(vpu, G2_OUT_MV_ADDR, mv_addr);
492
493         for (; i < V4L2_HEVC_DPB_ENTRIES_NUM_MAX; i++) {
494                 hantro_write_addr(vpu, G2_REF_LUMA_ADDR(i), 0);
495                 hantro_write_addr(vpu, G2_REF_CHROMA_ADDR(i), 0);
496                 hantro_write_addr(vpu, G2_REF_MV_ADDR(i), 0);
497         }
498
499         hantro_reg_write(vpu, &g2_refer_lterm_e, dpb_longterm_e);
500
501         return 0;
502 }
503
504 static void set_buffers(struct hantro_ctx *ctx)
505 {
506         struct vb2_v4l2_buffer *src_buf;
507         struct hantro_dev *vpu = ctx->dev;
508         dma_addr_t src_dma;
509         u32 src_len, src_buf_len;
510
511         src_buf = hantro_get_src_buf(ctx);
512
513         /* Source (stream) buffer. */
514         src_dma = vb2_dma_contig_plane_dma_addr(&src_buf->vb2_buf, 0);
515         src_len = vb2_get_plane_payload(&src_buf->vb2_buf, 0);
516         src_buf_len = vb2_plane_size(&src_buf->vb2_buf, 0);
517
518         hantro_write_addr(vpu, G2_STREAM_ADDR, src_dma);
519         hantro_reg_write(vpu, &g2_stream_len, src_len);
520         hantro_reg_write(vpu, &g2_strm_buffer_len, src_buf_len);
521         hantro_reg_write(vpu, &g2_strm_start_offset, 0);
522         hantro_reg_write(vpu, &g2_write_mvs_e, 1);
523
524         hantro_write_addr(vpu, G2_TILE_SIZES_ADDR, ctx->hevc_dec.tile_sizes.dma);
525         hantro_write_addr(vpu, G2_TILE_FILTER_ADDR, ctx->hevc_dec.tile_filter.dma);
526         hantro_write_addr(vpu, G2_TILE_SAO_ADDR, ctx->hevc_dec.tile_sao.dma);
527         hantro_write_addr(vpu, G2_TILE_BSD_ADDR, ctx->hevc_dec.tile_bsd.dma);
528 }
529
530 static void prepare_scaling_list_buffer(struct hantro_ctx *ctx)
531 {
532         struct hantro_dev *vpu = ctx->dev;
533         const struct hantro_hevc_dec_ctrls *ctrls = &ctx->hevc_dec.ctrls;
534         const struct v4l2_ctrl_hevc_scaling_matrix *sc = ctrls->scaling;
535         const struct v4l2_ctrl_hevc_sps *sps = ctrls->sps;
536         u8 *p = ((u8 *)ctx->hevc_dec.scaling_lists.cpu);
537         unsigned int scaling_list_enabled;
538         unsigned int i, j, k;
539
540         scaling_list_enabled = !!(sps->flags & V4L2_HEVC_SPS_FLAG_SCALING_LIST_ENABLED);
541         hantro_reg_write(vpu, &g2_scaling_list_e, scaling_list_enabled);
542
543         if (!scaling_list_enabled)
544                 return;
545
546         for (i = 0; i < ARRAY_SIZE(sc->scaling_list_dc_coef_16x16); i++)
547                 *p++ = sc->scaling_list_dc_coef_16x16[i];
548
549         for (i = 0; i < ARRAY_SIZE(sc->scaling_list_dc_coef_32x32); i++)
550                 *p++ = sc->scaling_list_dc_coef_32x32[i];
551
552         /* 128-bit boundary */
553         p += 8;
554
555         /* write scaling lists column by column */
556
557         for (i = 0; i < 6; i++)
558                 for (j = 0; j < 4; j++)
559                         for (k = 0; k < 4; k++)
560                                 *p++ = sc->scaling_list_4x4[i][4 * k + j];
561
562         for (i = 0; i < 6; i++)
563                 for (j = 0; j < 8; j++)
564                         for (k = 0; k < 8; k++)
565                                 *p++ = sc->scaling_list_8x8[i][8 * k + j];
566
567         for (i = 0; i < 6; i++)
568                 for (j = 0; j < 8; j++)
569                         for (k = 0; k < 8; k++)
570                                 *p++ = sc->scaling_list_16x16[i][8 * k + j];
571
572         for (i = 0; i < 2; i++)
573                 for (j = 0; j < 8; j++)
574                         for (k = 0; k < 8; k++)
575                                 *p++ = sc->scaling_list_32x32[i][8 * k + j];
576
577         hantro_write_addr(vpu, G2_HEVC_SCALING_LIST_ADDR, ctx->hevc_dec.scaling_lists.dma);
578 }
579
580 int hantro_g2_hevc_dec_run(struct hantro_ctx *ctx)
581 {
582         struct hantro_dev *vpu = ctx->dev;
583         int ret;
584
585         hantro_g2_check_idle(vpu);
586
587         /* Prepare HEVC decoder context. */
588         ret = hantro_hevc_dec_prepare_run(ctx);
589         if (ret)
590                 return ret;
591
592         /* Configure hardware registers. */
593         set_params(ctx);
594
595         /* set reference pictures */
596         ret = set_ref(ctx);
597         if (ret)
598                 return ret;
599
600         set_buffers(ctx);
601         prepare_tile_info_buffer(ctx);
602
603         prepare_scaling_list_buffer(ctx);
604
605         hantro_end_prepare_run(ctx);
606
607         hantro_reg_write(vpu, &g2_mode, HEVC_DEC_MODE);
608         hantro_reg_write(vpu, &g2_clk_gate_e, 1);
609
610         /* Don't disable output */
611         hantro_reg_write(vpu, &g2_out_dis, 0);
612
613         /* Don't compress buffers */
614         hantro_reg_write(vpu, &g2_ref_compress_bypass, 1);
615
616         /* Bus width and max burst */
617         hantro_reg_write(vpu, &g2_buswidth, BUS_WIDTH_128);
618         hantro_reg_write(vpu, &g2_max_burst, 16);
619
620         /* Swap */
621         hantro_reg_write(vpu, &g2_strm_swap, 0xf);
622         hantro_reg_write(vpu, &g2_dirmv_swap, 0xf);
623         hantro_reg_write(vpu, &g2_compress_swap, 0xf);
624
625         /* Start decoding! */
626         vdpu_write(vpu, G2_REG_INTERRUPT_DEC_E, G2_REG_INTERRUPT);
627
628         return 0;
629 }