xref: /OK3568_Linux_fs/external/mpp/mpp/hal/rkdec/vp9d/hal_vp9d_vdpu34x.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1 /*
2  * Copyright 2020 Rockchip Electronics Co. LTD
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define MODULE_TAG "hal_vp9d_vdpu34x"
18 
19 #include <stdio.h>
20 #include <string.h>
21 
22 #include "mpp_env.h"
23 #include "mpp_mem.h"
24 #include "mpp_common.h"
25 #include "mpp_device.h"
26 #include "mpp_hal.h"
27 
28 #include "hal_bufs.h"
29 #include "hal_vp9d_debug.h"
30 #include "hal_vp9d_com.h"
31 #include "hal_vp9d_vdpu34x.h"
32 #include "hal_vp9d_ctx.h"
33 #include "vdpu34x_vp9d.h"
34 #include "vp9d_syntax.h"
35 
36 #define HW_PROB 1
37 #define VP9_CONTEXT 4
38 #define VP9_CTU_SIZE 64
39 #define PROB_SIZE_ALIGN_TO_4K MPP_ALIGN(PROB_SIZE, SZ_4K)
40 #define COUNT_SIZE_ALIGN_TO_4K MPP_ALIGN(COUNT_SIZE, SZ_4K)
41 #define MAX_SEGMAP_SIZE_ALIGN_TO_4K MPP_ALIGN(MAX_SEGMAP_SIZE, SZ_4K)
42 
43 #define VDPU34X_OFFSET_COUNT (PROB_SIZE_ALIGN_TO_4K)
44 #define VDPU34X_PROBE_BUFFER_SIZE (PROB_SIZE_ALIGN_TO_4K + COUNT_SIZE_ALIGN_TO_4K)
45 
46 typedef struct Vdpu34xVp9dCtx_t {
47     Vp9dRegBuf      g_buf[MAX_GEN_REG];
48     MppBuffer       probe_base;
49     MppBuffer       seg_base;
50     RK_U32          offset_count;
51     RK_U32          offset_segid_cur;
52     RK_U32          offset_segid_last;
53     MppBuffer       prob_default_base;
54     void*           hw_regs;
55     RK_S32          mv_base_addr;
56     RK_S32          pre_mv_base_addr;
57     Vp9dLastInfo    ls_info;
58     /*
59      * swap between segid_cur_base & segid_last_base
60      * 0  used segid_cur_base as last
61      * 1  used segid_last_base as
62      */
63     RK_U32          last_segid_flag;
64     RK_S32          width;
65     RK_S32          height;
66     /* rcb buffers info */
67     RK_S32          rcb_buf_size;
68     Vdpu34xRcbInfo  rcb_info[RCB_BUF_COUNT];
69     MppBuffer       rcb_buf;
70     RK_U32          num_row_tiles;
71     RK_U32          bit_depth;
72     /* colmv buffers info */
73     HalBufs         cmv_bufs;
74     RK_S32          mv_size;
75     RK_S32          mv_count;
76     RK_U32          prob_ctx_valid[VP9_CONTEXT];
77     MppBuffer       prob_loop_base[VP9_CONTEXT];
78     RK_U32          prob_ref_poc[VP9_CONTEXT];
79     RK_U32          col_ref_poc;
80     RK_U32          segid_ref_poc;
81 } Vdpu34xVp9dCtx;
82 
hal_vp9d_alloc_res(HalVp9dCtx * hal)83 static MPP_RET hal_vp9d_alloc_res(HalVp9dCtx *hal)
84 {
85     RK_S32 i = 0;
86     RK_S32 ret = 0;
87     HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
88     Vdpu34xVp9dCtx *hw_ctx = (Vdpu34xVp9dCtx*)p_hal->hw_ctx;
89     hw_ctx->offset_count = VDPU34X_OFFSET_COUNT;
90     hw_ctx->offset_segid_cur = 0;
91     hw_ctx->offset_segid_last = MAX_SEGMAP_SIZE_ALIGN_TO_4K;
92     /* alloc common buffer */
93     for (i = 0; i < VP9_CONTEXT; i++) {
94         ret = mpp_buffer_get(p_hal->group, &hw_ctx->prob_loop_base[i], PROB_SIZE);
95         if (ret) {
96             mpp_err("vp9 probe_loop_base get buffer failed\n");
97             return ret;
98         }
99     }
100     ret = mpp_buffer_get(p_hal->group, &hw_ctx->prob_default_base, PROB_SIZE);
101     if (ret) {
102         mpp_err("vp9 probe_default_base get buffer failed\n");
103         return ret;
104     }
105     /* alloc buffer for fast mode or normal */
106     if (p_hal->fast_mode) {
107         for (i = 0; i < MAX_GEN_REG; i++) {
108             hw_ctx->g_buf[i].hw_regs = mpp_calloc_size(void, sizeof(Vdpu34xVp9dRegSet));
109             ret = mpp_buffer_get(p_hal->group, &hw_ctx->g_buf[i].probe_base, VDPU34X_PROBE_BUFFER_SIZE);
110             if (ret) {
111                 mpp_err("vp9 probe_base get buffer failed\n");
112                 return ret;
113             }
114         }
115     } else {
116         hw_ctx->hw_regs = mpp_calloc_size(void, sizeof(Vdpu34xVp9dRegSet));
117         ret = mpp_buffer_get(p_hal->group, &hw_ctx->probe_base, VDPU34X_PROBE_BUFFER_SIZE);
118         if (ret) {
119             mpp_err("vp9 probe_base get buffer failed\n");
120             return ret;
121         }
122     }
123     ret = mpp_buffer_get(p_hal->group, &hw_ctx->seg_base, MAX_SEGMAP_SIZE_ALIGN_TO_4K * 2);
124     if (ret) {
125         mpp_err("vp9 segid_base get buffer failed\n");
126         return ret;
127     }
128     return MPP_OK;
129 }
130 
hal_vp9d_release_res(HalVp9dCtx * hal)131 static MPP_RET hal_vp9d_release_res(HalVp9dCtx *hal)
132 {
133     RK_S32 i = 0;
134     RK_S32 ret = 0;
135     HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
136     Vdpu34xVp9dCtx *hw_ctx = (Vdpu34xVp9dCtx*)p_hal->hw_ctx;
137 
138     if (hw_ctx->prob_default_base) {
139         ret = mpp_buffer_put(hw_ctx->prob_default_base);
140         if (ret) {
141             mpp_err("vp9 probe_wr_base put buffer failed\n");
142             return ret;
143         }
144     }
145     for (i = 0; i < VP9_CONTEXT; i++) {
146         if (hw_ctx->prob_loop_base[i]) {
147             ret = mpp_buffer_put(hw_ctx->prob_loop_base[i]);
148             if (ret) {
149                 mpp_err("vp9 probe_base put buffer failed\n");
150                 return ret;
151             }
152         }
153     }
154     if (p_hal->fast_mode) {
155         for (i = 0; i < MAX_GEN_REG; i++) {
156             if (hw_ctx->g_buf[i].probe_base) {
157                 ret = mpp_buffer_put(hw_ctx->g_buf[i].probe_base);
158                 if (ret) {
159                     mpp_err("vp9 probe_base put buffer failed\n");
160                     return ret;
161                 }
162             }
163             if (hw_ctx->g_buf[i].hw_regs) {
164                 mpp_free(hw_ctx->g_buf[i].hw_regs);
165                 hw_ctx->g_buf[i].hw_regs = NULL;
166             }
167             if (hw_ctx->g_buf[i].rcb_buf) {
168                 ret = mpp_buffer_put(hw_ctx->g_buf[i].rcb_buf);
169                 if (ret) {
170                     mpp_err("vp9 rcb_buf[%d] put buffer failed\n", i);
171                     return ret;
172                 }
173             }
174         }
175     } else {
176         if (hw_ctx->probe_base) {
177             ret = mpp_buffer_put(hw_ctx->probe_base);
178             if (ret) {
179                 mpp_err("vp9 probe_base put buffer failed\n");
180                 return ret;
181             }
182         }
183 
184         if (hw_ctx->hw_regs) {
185             mpp_free(hw_ctx->hw_regs);
186             hw_ctx->hw_regs = NULL;
187         }
188         if (hw_ctx->rcb_buf) {
189             ret = mpp_buffer_put(hw_ctx->rcb_buf);
190             if (ret) {
191                 mpp_err("vp9 rcb_buf put buffer failed\n");
192                 return ret;
193             }
194         }
195     }
196 
197     if (hw_ctx->cmv_bufs) {
198         ret = hal_bufs_deinit(hw_ctx->cmv_bufs);
199         if (ret) {
200             mpp_err("vp9 cmv bufs deinit buffer failed\n");
201             return ret;
202         }
203     }
204 
205     if (hw_ctx->seg_base) {
206         ret = mpp_buffer_put(hw_ctx->seg_base);
207         if (ret) {
208             mpp_err("vp9 seg_base put buffer failed\n");
209             return ret;
210         }
211     }
212 
213     return MPP_OK;
214 }
215 
hal_vp9d_vdpu34x_deinit(void * hal)216 static MPP_RET hal_vp9d_vdpu34x_deinit(void *hal)
217 {
218     MPP_RET ret = MPP_OK;
219     HalVp9dCtx *p_hal = (HalVp9dCtx *)hal;
220 
221     hal_vp9d_release_res(p_hal);
222 
223     if (p_hal->group) {
224         ret = mpp_buffer_group_put(p_hal->group);
225         if (ret) {
226             mpp_err("vp9d group free buffer failed\n");
227             return ret;
228         }
229     }
230     MPP_FREE(p_hal->hw_ctx);
231     return ret = MPP_OK;
232 }
233 
hal_vp9d_vdpu34x_init(void * hal,MppHalCfg * cfg)234 static MPP_RET hal_vp9d_vdpu34x_init(void *hal, MppHalCfg *cfg)
235 {
236     MPP_RET ret = MPP_OK;
237     HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
238     MEM_CHECK(ret, p_hal->hw_ctx = mpp_calloc_size(void, sizeof(Vdpu34xVp9dCtx)));
239     Vdpu34xVp9dCtx *hw_ctx = (Vdpu34xVp9dCtx*)p_hal->hw_ctx;
240 
241     hw_ctx->mv_base_addr = -1;
242     hw_ctx->pre_mv_base_addr = -1;
243     mpp_slots_set_prop(p_hal->slots, SLOTS_HOR_ALIGN, vp9_hor_align);
244     mpp_slots_set_prop(p_hal->slots, SLOTS_VER_ALIGN, vp9_ver_align);
245 
246     if (p_hal->group == NULL) {
247         ret = mpp_buffer_group_get_internal(&p_hal->group, MPP_BUFFER_TYPE_ION);
248         if (ret) {
249             mpp_err("vp9 mpp_buffer_group_get failed\n");
250             goto __FAILED;
251         }
252     }
253 
254     ret = hal_vp9d_alloc_res(p_hal);
255     if (ret) {
256         mpp_err("hal_vp9d_alloc_res failed\n");
257         goto __FAILED;
258     }
259 
260     hw_ctx->last_segid_flag = 1;
261     {
262         // report hw_info to parser
263         const MppSocInfo *info = mpp_get_soc_info();
264         const void *hw_info = NULL;
265         RK_U32 i;
266 
267         for (i = 0; i < MPP_ARRAY_ELEMS(info->dec_caps); i++) {
268             if (info->dec_caps[i] && info->dec_caps[i]->type == VPU_CLIENT_RKVDEC) {
269                 hw_info = info->dec_caps[i];
270                 break;
271             }
272         }
273 
274         mpp_assert(hw_info);
275         cfg->hw_info = hw_info;
276     }
277 
278     return ret;
279 __FAILED:
280     hal_vp9d_vdpu34x_deinit(hal);
281     return ret;
282 }
283 
vp9d_refine_rcb_size(Vdpu34xRcbInfo * rcb_info,Vdpu34xVp9dRegSet * vp9_hw_regs,RK_S32 width,RK_S32 height,void * data)284 static void vp9d_refine_rcb_size(Vdpu34xRcbInfo *rcb_info,
285                                  Vdpu34xVp9dRegSet *vp9_hw_regs,
286                                  RK_S32 width, RK_S32 height, void* data)
287 {
288     RK_U32 rcb_bits = 0;
289     DXVA_PicParams_VP9 *pic_param = (DXVA_PicParams_VP9*)data;
290     RK_U32 num_tiles = pic_param->log2_tile_rows;
291     RK_U32 bit_depth = pic_param->BitDepthMinus8Luma + 8;
292     RK_U32 ext_align_size = num_tiles * 64 * 8;
293 
294     width = MPP_ALIGN(width, VP9_CTU_SIZE);
295     height = MPP_ALIGN(height, VP9_CTU_SIZE);
296     /* RCB_STRMD_ROW */
297     if (width > 4096)
298         rcb_bits = MPP_ALIGN(width, 64) * 232 + ext_align_size;
299     else
300         rcb_bits = 0;
301     rcb_info[RCB_STRMD_ROW].size = MPP_RCB_BYTES(rcb_bits);
302     /* RCB_TRANSD_ROW */
303     if (width > 8192)
304         rcb_bits = (MPP_ALIGN(width - 8192, 4) << 1) + ext_align_size;
305     else
306         rcb_bits = 0;
307     rcb_info[RCB_TRANSD_ROW].size = MPP_RCB_BYTES(rcb_bits);
308     /* RCB_TRANSD_COL */
309     if (height > 8192)
310         rcb_bits = (MPP_ALIGN(height - 8192, 4) << 1) + ext_align_size;
311     else
312         rcb_bits = 0;
313     rcb_info[RCB_TRANSD_COL].size = MPP_RCB_BYTES(rcb_bits);
314     /* RCB_INTER_ROW */
315     rcb_bits = width * 36 + ext_align_size;
316     rcb_info[RCB_INTER_ROW].size = MPP_RCB_BYTES(rcb_bits);
317     /* RCB_INTER_COL */
318     rcb_info[RCB_INTER_COL].size = 0;
319     /* RCB_INTRA_ROW */
320     rcb_bits = width * 48 + ext_align_size;
321     rcb_info[RCB_INTRA_ROW].size = MPP_RCB_BYTES(rcb_bits);
322     /* RCB_DBLK_ROW */
323     rcb_bits = width * (1 + 16 * bit_depth) + num_tiles * 192 * bit_depth + ext_align_size;
324     rcb_info[RCB_DBLK_ROW].size = MPP_RCB_BYTES(rcb_bits);
325     /* RCB_SAO_ROW */
326     rcb_info[RCB_SAO_ROW].size = 0;
327     /* RCB_FBC_ROW */
328     if (vp9_hw_regs->common.reg012.fbc_e) {
329         rcb_bits = 8 * width * bit_depth + ext_align_size;
330     } else
331         rcb_bits = 0;
332     rcb_info[RCB_FBC_ROW].size = MPP_RCB_BYTES(rcb_bits);
333     /* RCB_FILT_COL */
334     if (vp9_hw_regs->common.reg012.fbc_e) {
335         rcb_bits = height * (4 + 24 *  bit_depth);
336     } else
337         rcb_bits = height * (4 + 16 *  bit_depth);
338     rcb_bits += ext_align_size;
339     rcb_info[RCB_FILT_COL].size = MPP_RCB_BYTES(rcb_bits);
340 }
341 
hal_vp9d_rcb_info_update(void * hal,Vdpu34xVp9dRegSet * hw_regs,void * data)342 static void hal_vp9d_rcb_info_update(void *hal,  Vdpu34xVp9dRegSet *hw_regs, void *data)
343 {
344     HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
345     Vdpu34xVp9dCtx *hw_ctx = (Vdpu34xVp9dCtx*)p_hal->hw_ctx;
346     DXVA_PicParams_VP9 *pic_param = (DXVA_PicParams_VP9*)data;
347     RK_U32 num_tiles = pic_param->log2_tile_rows;
348     RK_U32 bit_depth = pic_param->BitDepthMinus8Luma + 8;
349     RK_S32 height = vp9_ver_align(pic_param->height);
350     RK_S32 width  = vp9_ver_align(pic_param->width);
351 
352     if (hw_ctx->num_row_tiles != num_tiles ||
353         hw_ctx->bit_depth != bit_depth ||
354         hw_ctx->width != width ||
355         hw_ctx->height != height) {
356 
357         hw_ctx->rcb_buf_size = vdpu34x_get_rcb_buf_size(hw_ctx->rcb_info, width, height);
358         vp9d_refine_rcb_size(hw_ctx->rcb_info, hw_regs, width, height, pic_param);
359 
360         if (p_hal->fast_mode) {
361             RK_U32 i;
362 
363             for (i = 0; i < MPP_ARRAY_ELEMS(hw_ctx->g_buf); i++) {
364                 MppBuffer rcb_buf = hw_ctx->g_buf[i].rcb_buf;
365 
366                 if (rcb_buf) {
367                     mpp_buffer_put(rcb_buf);
368                     hw_ctx->g_buf[i].rcb_buf = NULL;
369                 }
370                 mpp_buffer_get(p_hal->group, &rcb_buf, hw_ctx->rcb_buf_size);
371                 hw_ctx->g_buf[i].rcb_buf = rcb_buf;
372             }
373         } else {
374             MppBuffer rcb_buf = hw_ctx->rcb_buf;
375 
376             if (rcb_buf) {
377                 mpp_buffer_put(rcb_buf);
378                 rcb_buf = NULL;
379             }
380             mpp_buffer_get(p_hal->group, &rcb_buf, hw_ctx->rcb_buf_size);
381             hw_ctx->rcb_buf = rcb_buf;
382         }
383 
384         hw_ctx->num_row_tiles  = num_tiles;
385         hw_ctx->bit_depth      = bit_depth;
386         hw_ctx->width          = width;
387         hw_ctx->height         = height;
388     }
389 }
390 
hal_vp9d_vdpu34x_gen_regs(void * hal,HalTaskInfo * task)391 static MPP_RET hal_vp9d_vdpu34x_gen_regs(void *hal, HalTaskInfo *task)
392 {
393     RK_S32   i;
394     RK_U8    bit_depth = 0;
395     RK_U32   pic_h[3] = { 0 };
396     RK_U32   ref_frame_width_y;
397     RK_U32   ref_frame_height_y;
398     RK_S32   stream_len = 0, aglin_offset = 0;
399     RK_U32   y_hor_virstride, uv_hor_virstride, y_virstride;
400     RK_U8   *bitstream = NULL;
401     MppBuffer streambuf = NULL;
402     RK_U32 sw_y_hor_virstride;
403     RK_U32 sw_uv_hor_virstride;
404     RK_U32 sw_y_virstride;
405     RK_U8  ref_idx = 0;
406     RK_U8  ref_frame_idx = 0;
407     RK_U32 *reg_ref_base = 0;
408     RK_S32 intraFlag = 0;
409     MppBuffer framebuf = NULL;
410     HalBuf *mv_buf = NULL;
411     RK_U32 fbc_en = 0;
412 
413     HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
414     Vdpu34xVp9dCtx *hw_ctx = (Vdpu34xVp9dCtx*)p_hal->hw_ctx;
415     DXVA_PicParams_VP9 *pic_param = (DXVA_PicParams_VP9*)task->dec.syntax.data;
416     RK_S32 mv_size = pic_param->width * pic_param->height / 2;
417     RK_U32 frame_ctx_id = pic_param->frame_context_idx;
418 
419     if (p_hal->fast_mode) {
420         for (i = 0; i < MAX_GEN_REG; i++) {
421             if (!hw_ctx->g_buf[i].use_flag) {
422                 task->dec.reg_index = i;
423                 hw_ctx->probe_base = hw_ctx->g_buf[i].probe_base;
424 
425                 hw_ctx->hw_regs = hw_ctx->g_buf[i].hw_regs;
426                 hw_ctx->g_buf[i].use_flag = 1;
427                 break;
428             }
429         }
430         if (i == MAX_GEN_REG) {
431             mpp_err("vp9 fast mode buf all used\n");
432             return MPP_ERR_NOMEM;
433         }
434     }
435 
436     if (hw_ctx->cmv_bufs == NULL || hw_ctx->mv_size < mv_size) {
437         size_t size = mv_size;
438 
439         if (hw_ctx->cmv_bufs) {
440             hal_bufs_deinit(hw_ctx->cmv_bufs);
441             hw_ctx->cmv_bufs = NULL;
442         }
443 
444         hal_bufs_init(&hw_ctx->cmv_bufs);
445         if (hw_ctx->cmv_bufs == NULL) {
446             mpp_err_f("colmv bufs init fail");
447             return MPP_NOK;
448         }
449         hw_ctx->mv_size = mv_size;
450         hw_ctx->mv_count = mpp_buf_slot_get_count(p_hal ->slots);
451         hal_bufs_setup(hw_ctx->cmv_bufs, hw_ctx->mv_count, 1, &size);
452     }
453 
454     Vdpu34xVp9dRegSet *vp9_hw_regs = (Vdpu34xVp9dRegSet*)hw_ctx->hw_regs;
455     intraFlag = (!pic_param->frame_type || pic_param->intra_only);
456     stream_len = (RK_S32)mpp_packet_get_length(task->dec.input_packet);
457     memset(hw_ctx->hw_regs, 0, sizeof(Vdpu34xVp9dRegSet));
458 #if HW_PROB
459     hal_vp9d_prob_flag_delta(mpp_buffer_get_ptr(hw_ctx->probe_base), task->dec.syntax.data);
460     if (intraFlag)
461         hal_vp9d_prob_default(mpp_buffer_get_ptr(hw_ctx->prob_default_base), task->dec.syntax.data);
462 
463     /* config reg103 */
464     vp9_hw_regs->vp9d_param.reg103.prob_update_en   = 1;
465     vp9_hw_regs->vp9d_param.reg103.intra_only_flag  = intraFlag;
466     if (!intraFlag) {
467         vp9_hw_regs->vp9d_param.reg103.txfmmode_rfsh_en = (pic_param->txmode == 4) ? 1 : 0;
468         vp9_hw_regs->vp9d_param.reg103.interp_filter_switch_en = pic_param->interp_filter == 4 ? 1 : 0;
469     }
470     vp9_hw_regs->vp9d_param.reg103.ref_mode_rfsh_en     = 1;
471     vp9_hw_regs->vp9d_param.reg103.single_ref_rfsh_en   = 1;
472     vp9_hw_regs->vp9d_param.reg103.comp_ref_rfsh_en     = 1;
473     vp9_hw_regs->vp9d_param.reg103.inter_coef_rfsh_flag = 0;
474     vp9_hw_regs->vp9d_param.reg103.refresh_en           =
475         !pic_param->error_resilient_mode && !pic_param->parallelmode;
476     vp9_hw_regs->vp9d_param.reg103.prob_save_en             = pic_param->refresh_frame_context;
477     vp9_hw_regs->vp9d_param.reg103.allow_high_precision_mv  = pic_param->allow_high_precision_mv;
478     vp9_hw_regs->vp9d_param.reg103.last_key_frame_flag      = hw_ctx->ls_info.last_intra_only;
479 
480     /* set info for multi core */
481     {
482         MppFrame mframe = NULL;
483 
484         mpp_buf_slot_get_prop(p_hal->slots, task->dec.output, SLOT_FRAME_PTR, &mframe);
485         vp9_hw_regs->vp9d_param.reg65.cur_poc = mframe ? mpp_frame_get_poc(mframe) : 0;
486         // last poc
487         ref_idx = pic_param->frame_refs[0].Index7Bits;
488         ref_frame_idx = pic_param->ref_frame_map[ref_idx].Index7Bits;
489         if (ref_frame_idx < 0x7f) {
490             mframe = NULL;
491             mpp_buf_slot_get_prop(p_hal ->slots, ref_frame_idx, SLOT_FRAME_PTR, &mframe);
492             vp9_hw_regs->vp9d_param.reg95.last_poc = mframe ? mpp_frame_get_poc(mframe) : 0;
493         }
494         // golden poc
495         ref_idx = pic_param->frame_refs[1].Index7Bits;
496         ref_frame_idx = pic_param->ref_frame_map[ref_idx].Index7Bits;
497         if (ref_frame_idx < 0x7f) {
498             mframe = NULL;
499             mpp_buf_slot_get_prop(p_hal ->slots, ref_frame_idx, SLOT_FRAME_PTR, &mframe);
500             vp9_hw_regs->vp9d_param.reg96.golden_poc = mframe ? mpp_frame_get_poc(mframe) : 0;
501         }
502         // altref poc
503         ref_idx = pic_param->frame_refs[2].Index7Bits;
504         ref_frame_idx = pic_param->ref_frame_map[ref_idx].Index7Bits;
505         if (ref_frame_idx < 0x7f) {
506             mframe = NULL;
507             mpp_buf_slot_get_prop(p_hal ->slots, ref_frame_idx, SLOT_FRAME_PTR, &mframe);
508             vp9_hw_regs->vp9d_param.reg97.altref_poc = mframe ? mpp_frame_get_poc(mframe) : 0;
509         }
510         // colref poc
511         vp9_hw_regs->vp9d_param.reg98.col_ref_poc =
512             hw_ctx->col_ref_poc ? hw_ctx->col_ref_poc : vp9_hw_regs->vp9d_param.reg65.cur_poc;
513         if (pic_param->show_frame && !pic_param->show_existing_frame)
514             hw_ctx->col_ref_poc = vp9_hw_regs->vp9d_param.reg65.cur_poc;
515         // segment id ref poc
516         vp9_hw_regs->vp9d_param.reg100.segid_ref_poc = hw_ctx->segid_ref_poc;
517 
518         vp9_hw_regs->vp9d_addr.reg169_segidcur_base = mpp_buffer_get_fd(hw_ctx->seg_base);
519         vp9_hw_regs->vp9d_addr.reg168_segidlast_base = mpp_buffer_get_fd(hw_ctx->seg_base);
520         if (hw_ctx->last_segid_flag) {
521             mpp_dev_set_reg_offset(p_hal->dev, 168, hw_ctx->offset_segid_last);
522             mpp_dev_set_reg_offset(p_hal->dev, 169, hw_ctx->offset_segid_cur);
523         } else {
524             mpp_dev_set_reg_offset(p_hal->dev, 168, hw_ctx->offset_segid_cur);
525             mpp_dev_set_reg_offset(p_hal->dev, 169, hw_ctx->offset_segid_last);
526         }
527 
528         if ((pic_param->stVP9Segments.enabled && pic_param->stVP9Segments.update_map) ||
529             (hw_ctx->ls_info.last_width != pic_param->width) ||
530             (hw_ctx->ls_info.last_height != pic_param->height) ||
531             intraFlag || pic_param->error_resilient_mode) {
532             hw_ctx->segid_ref_poc = vp9_hw_regs->vp9d_param.reg65.cur_poc;
533             hw_ctx->last_segid_flag = !hw_ctx->last_segid_flag;
534             vp9_hw_regs->vp9d_param.reg100.segid_ref_poc = 0;
535             vp9_hw_regs->vp9d_param.reg75.vp9_segment_id_update = 1;
536         } else
537             vp9_hw_regs->vp9d_param.reg75.vp9_segment_id_update = 0;
538     }
539 
540     /* config last prob base and update write base */
541     {
542 
543         if (intraFlag || pic_param->error_resilient_mode) {
544             if (intraFlag
545                 || pic_param->error_resilient_mode
546                 || (pic_param->reset_frame_context == 3)) {
547                 memset(hw_ctx->prob_ctx_valid, 0, sizeof(hw_ctx->prob_ctx_valid));
548             } else if (pic_param->reset_frame_context == 2) {
549                 hw_ctx->prob_ctx_valid[frame_ctx_id] = 0;
550             }
551         }
552 
553 #if VP9_DUMP
554         {
555             static RK_U32 file_cnt = 0;
556             char file_name[128];
557             RK_U32 i = 0;
558             sprintf(file_name, "/data/vp9/prob_last_%d.txt", file_cnt);
559             FILE *fp = fopen(file_name, "wb");
560             RK_U32 *tmp = NULL;
561             if (hw_ctx->prob_ctx_valid[frame_ctx_id]) {
562                 tmp = (RK_U32 *)mpp_buffer_get_ptr(hw_ctx->prob_loop_base[pic_param->frame_context_idx]);
563             } else {
564                 tmp = (RK_U32 *)mpp_buffer_get_ptr(hw_ctx->prob_default_base);
565             }
566             for (i = 0; i < PROB_SIZE / 4; i += 2) {
567                 fprintf(fp, "%08x%08x\n", tmp[i + 1], tmp[i]);
568             }
569             file_cnt++;
570             fflush(fp);
571             fclose(fp);
572         }
573 #endif
574 
575         if (hw_ctx->prob_ctx_valid[frame_ctx_id]) {
576             vp9_hw_regs->vp9d_addr.reg162_last_prob_base =
577                 mpp_buffer_get_fd(hw_ctx->prob_loop_base[frame_ctx_id]);
578             vp9_hw_regs->common.reg028.swreg_vp9_rd_prob_idx = frame_ctx_id + 1;
579             vp9_hw_regs->vp9d_param.reg99.prob_ref_poc = hw_ctx->prob_ref_poc[frame_ctx_id];
580         } else {
581             vp9_hw_regs->vp9d_addr.reg162_last_prob_base = mpp_buffer_get_fd(hw_ctx->prob_default_base);
582             hw_ctx->prob_ctx_valid[frame_ctx_id] |= pic_param->refresh_frame_context;
583             vp9_hw_regs->common.reg028.swreg_vp9_rd_prob_idx = 0;
584             vp9_hw_regs->vp9d_param.reg99.prob_ref_poc = 0;
585             hw_ctx->prob_ref_poc[frame_ctx_id] = vp9_hw_regs->vp9d_param.reg65.cur_poc;
586         }
587         hal_vp9d_dbg_par("vp9d intra %d parallelmode %d frame_ctx_id %d refresh %d err %d\n",
588                          intraFlag, pic_param->parallelmode, frame_ctx_id,
589                          pic_param->refresh_frame_context, pic_param->error_resilient_mode);
590         if (!pic_param->parallelmode)
591             hw_ctx->prob_ref_poc[frame_ctx_id] = vp9_hw_regs->vp9d_param.reg65.cur_poc;
592         vp9_hw_regs->vp9d_addr.reg172_update_prob_wr_base =
593             mpp_buffer_get_fd(hw_ctx->prob_loop_base[frame_ctx_id]);
594         vp9_hw_regs->common.reg028.swreg_vp9_wr_prob_idx = frame_ctx_id + 1;
595 
596     }
597     vp9_hw_regs->vp9d_addr.reg160_delta_prob_base = mpp_buffer_get_fd(hw_ctx->probe_base);
598 #else
599     hal_vp9d_output_probe(mpp_buffer_get_ptr(hw_ctx->probe_base), task->dec.syntax.data);
600 #endif
601     vp9_hw_regs->common.reg013.cur_pic_is_idr = !pic_param->frame_type;
602     vp9_hw_regs->common.reg009.dec_mode = 2; //set as vp9 dec
603     vp9_hw_regs->common.reg016_str_len = ((stream_len + 15) & (~15)) + 0x80;
604 
605     mpp_buf_slot_get_prop(p_hal ->packet_slots, task->dec.input, SLOT_BUFFER, &streambuf);
606     bitstream = mpp_buffer_get_ptr(streambuf);
607     aglin_offset = vp9_hw_regs->common.reg016_str_len - stream_len;
608     if (aglin_offset > 0) {
609         memset((void *)(bitstream + stream_len), 0, aglin_offset);
610     }
611 
612     //--- caculate the yuv_frame_size and mv_size
613     bit_depth = pic_param->BitDepthMinus8Luma + 8;
614     pic_h[0] = vp9_ver_align(pic_param->height);
615     pic_h[1] = vp9_ver_align(pic_param->height) / 2;
616     pic_h[2] = pic_h[1];
617 
618     {
619         MppFrame mframe = NULL;
620 
621         mpp_buf_slot_get_prop(p_hal->slots, task->dec.output, SLOT_FRAME_PTR, &mframe);
622         fbc_en = MPP_FRAME_FMT_IS_FBC(mpp_frame_get_fmt(mframe));
623 
624         if (fbc_en) {
625             RK_U32 fbc_hdr_stride = mpp_frame_get_fbc_hdr_stride(mframe);
626             RK_U32 h = MPP_ALIGN(mpp_frame_get_height(mframe), 64);
627             RK_U32 fbd_offset = MPP_ALIGN(fbc_hdr_stride * (h + 16) / 16, SZ_4K);
628 
629             vp9_hw_regs->common.reg012.fbc_e = 1;
630             vp9_hw_regs->common.reg018.y_hor_virstride = fbc_hdr_stride >> 4;
631             vp9_hw_regs->common.reg019.uv_hor_virstride = fbc_hdr_stride >> 4;
632             vp9_hw_regs->common.reg020_fbc_payload_off.payload_st_offset = fbd_offset >> 4;
633         } else {
634             sw_y_hor_virstride = (vp9_hor_align((pic_param->width * bit_depth) >> 3) >> 4);
635             sw_uv_hor_virstride = (vp9_hor_align((pic_param->width * bit_depth) >> 3) >> 4);
636             sw_y_virstride = pic_h[0] * sw_y_hor_virstride;
637 
638             vp9_hw_regs->common.reg012.fbc_e = 0;
639             vp9_hw_regs->common.reg018.y_hor_virstride = sw_y_hor_virstride;
640             vp9_hw_regs->common.reg019.uv_hor_virstride = sw_uv_hor_virstride;
641             vp9_hw_regs->common.reg020_y_virstride.y_virstride = sw_y_virstride;
642         }
643     }
644     if (!pic_param->intra_only && pic_param->frame_type &&
645         !pic_param->error_resilient_mode && hw_ctx->ls_info.last_show_frame) {
646         hw_ctx->pre_mv_base_addr = hw_ctx->mv_base_addr;
647     }
648 
649     mpp_buf_slot_get_prop(p_hal ->slots, task->dec.output, SLOT_BUFFER, &framebuf);
650     vp9_hw_regs->common_addr.reg130_decout_base =  mpp_buffer_get_fd(framebuf);
651     vp9_hw_regs->common_addr.reg128_rlc_base = mpp_buffer_get_fd(streambuf);
652     vp9_hw_regs->common_addr.reg129_rlcwrite_base = mpp_buffer_get_fd(streambuf);
653 
654     vp9_hw_regs->vp9d_addr.reg167_count_prob_base = mpp_buffer_get_fd(hw_ctx->probe_base);
655     mpp_dev_set_reg_offset(p_hal->dev, 167, hw_ctx->offset_count);
656 
657     //set cur colmv base
658     mv_buf = hal_bufs_get_buf(hw_ctx->cmv_bufs, task->dec.output);
659     vp9_hw_regs->common_addr.reg131_colmv_cur_base = mpp_buffer_get_fd(mv_buf->buf[0]);
660     hw_ctx->mv_base_addr = vp9_hw_regs->common_addr.reg131_colmv_cur_base;
661     if (hw_ctx->pre_mv_base_addr < 0) {
662         hw_ctx->pre_mv_base_addr = hw_ctx->mv_base_addr;
663     }
664     vp9_hw_regs->vp9d_addr.reg170_ref_colmv_base = hw_ctx->pre_mv_base_addr;
665 
666     vp9_hw_regs->vp9d_param.reg64.cprheader_offset = 0;
667     reg_ref_base = (RK_U32*)&vp9_hw_regs->vp9d_addr.reg164_ref_last_base;
668     for (i = 0; i < 3; i++) {
669         MppFrame frame = NULL;
670 
671         ref_idx = pic_param->frame_refs[i].Index7Bits;
672         ref_frame_idx = pic_param->ref_frame_map[ref_idx].Index7Bits;
673         ref_frame_width_y = pic_param->ref_frame_coded_width[ref_idx];
674         ref_frame_height_y = pic_param->ref_frame_coded_height[ref_idx];
675         pic_h[0] = vp9_ver_align(ref_frame_height_y);
676         pic_h[1] = vp9_ver_align(ref_frame_height_y) / 2;
677 
678         if (ref_frame_idx < 0x7f)
679             mpp_buf_slot_get_prop(p_hal ->slots, ref_frame_idx, SLOT_FRAME_PTR, &frame);
680 
681         if (fbc_en && frame) {
682             RK_U32 fbc_hdr_stride = mpp_frame_get_fbc_hdr_stride(frame);
683             RK_U32 h = MPP_ALIGN(mpp_frame_get_height(frame), 64);
684             RK_U32 fbd_offset = MPP_ALIGN(fbc_hdr_stride * (h + 16) / 16, SZ_4K);
685 
686             y_hor_virstride = uv_hor_virstride = fbc_hdr_stride >> 4;
687             y_virstride = fbd_offset;
688         } else {
689             y_hor_virstride = uv_hor_virstride = (vp9_hor_align((ref_frame_width_y * bit_depth) >> 3) >> 4);
690             y_virstride = y_hor_virstride * pic_h[0];
691         }
692 
693         if (pic_param->ref_frame_map[ref_idx].Index7Bits < 0x7f) {
694             mpp_buf_slot_get_prop(p_hal ->slots, pic_param->ref_frame_map[ref_idx].Index7Bits, SLOT_BUFFER, &framebuf);
695         }
696 
697         if (pic_param->ref_frame_map[ref_idx].Index7Bits < 0x7f) {
698             switch (i) {
699             case 0: {
700                 vp9_hw_regs->vp9d_param.reg106.framewidth_last = ref_frame_width_y;
701                 vp9_hw_regs->vp9d_param.reg107.frameheight_last = ref_frame_height_y;
702                 vp9_hw_regs->vp9d_param.reg79.lastfy_hor_virstride = y_hor_virstride;
703                 vp9_hw_regs->vp9d_param.reg80.lastfuv_hor_virstride = uv_hor_virstride;
704                 vp9_hw_regs->vp9d_param.reg85.lastfy_virstride = y_virstride;
705             } break;
706             case 1: {
707                 vp9_hw_regs->vp9d_param.reg108.framewidth_golden = ref_frame_width_y;
708                 vp9_hw_regs->vp9d_param.reg109.frameheight_golden = ref_frame_height_y;
709                 vp9_hw_regs->vp9d_param.reg81.goldenfy_hor_virstride = y_hor_virstride;
710                 vp9_hw_regs->vp9d_param.reg82.goldenfuv_hor_virstride = uv_hor_virstride;
711                 vp9_hw_regs->vp9d_param.reg86.goldeny_virstride = y_virstride;
712             } break;
713             case 2: {
714                 vp9_hw_regs->vp9d_param.reg110.framewidth_alfter = ref_frame_width_y;
715                 vp9_hw_regs->vp9d_param.reg111.frameheight_alfter = ref_frame_height_y;
716                 vp9_hw_regs->vp9d_param.reg83.altreffy_hor_virstride = y_hor_virstride;
717                 vp9_hw_regs->vp9d_param.reg84.altreffuv_hor_virstride = uv_hor_virstride;
718                 vp9_hw_regs->vp9d_param.reg87.altrefy_virstride = y_virstride;
719             } break;
720             default:
721                 break;
722             }
723 
724             /*0 map to 11*/
725             /*1 map to 12*/
726             /*2 map to 13*/
727             if (framebuf != NULL) {
728                 reg_ref_base[i] = mpp_buffer_get_fd(framebuf);
729             } else {
730                 mpp_log("ref buff address is no valid used out as base slot index 0x%x", pic_param->ref_frame_map[ref_idx].Index7Bits);
731                 reg_ref_base[i] = vp9_hw_regs->common_addr.reg130_decout_base;
732             }
733             mv_buf = hal_bufs_get_buf(hw_ctx->cmv_bufs, pic_param->ref_frame_map[ref_idx].Index7Bits);
734             vp9_hw_regs->vp9d_addr.reg181_196_ref_colmv_base[i] = mpp_buffer_get_fd(mv_buf->buf[0]);
735         } else {
736             reg_ref_base[i] = vp9_hw_regs->common_addr.reg130_decout_base;
737             vp9_hw_regs->vp9d_addr.reg181_196_ref_colmv_base[i] = vp9_hw_regs->common_addr.reg131_colmv_cur_base;
738         }
739     }
740 
741     for (i = 0; i < 8; i++) {
742         vp9_hw_regs->vp9d_param.reg67_74[i].segid_frame_qp_delta_en         = (hw_ctx->ls_info.feature_mask[i]) & 0x1;
743         vp9_hw_regs->vp9d_param.reg67_74[i].segid_frame_qp_delta            = hw_ctx->ls_info.feature_data[i][0];
744         vp9_hw_regs->vp9d_param.reg67_74[i].segid_frame_loopfitler_value_en = (hw_ctx->ls_info.feature_mask[i] >> 1) & 0x1;
745         vp9_hw_regs->vp9d_param.reg67_74[i].segid_frame_loopfilter_value    = hw_ctx->ls_info.feature_data[i][1];
746         vp9_hw_regs->vp9d_param.reg67_74[i].segid_referinfo_en              = (hw_ctx->ls_info.feature_mask[i] >> 2) & 0x1;
747         vp9_hw_regs->vp9d_param.reg67_74[i].segid_referinfo                 = hw_ctx->ls_info.feature_data[i][2];
748         vp9_hw_regs->vp9d_param.reg67_74[i].segid_frame_skip_en             = (hw_ctx->ls_info.feature_mask[i] >> 3) & 0x1;
749     }
750 
751     vp9_hw_regs->vp9d_param.reg67_74[0].segid_abs_delta = hw_ctx->ls_info.abs_delta_last;
752     vp9_hw_regs->vp9d_param.reg76.tx_mode               = pic_param->txmode;
753     vp9_hw_regs->vp9d_param.reg76.frame_reference_mode  = pic_param->refmode;
754     vp9_hw_regs->vp9d_param.reg94.ref_deltas_lastframe  = 0;
755 
756     if (!intraFlag) {
757         for (i = 0; i < 4; i++)
758             vp9_hw_regs->vp9d_param.reg94.ref_deltas_lastframe   |= (hw_ctx->ls_info.last_ref_deltas[i] & 0x7f) << (7 * i);
759 
760         for (i = 0; i < 2; i++)
761             vp9_hw_regs->vp9d_param.reg75.mode_deltas_lastframe  |= (hw_ctx->ls_info.last_mode_deltas[i] & 0x7f) << (7 * i);
762     } else {
763         hw_ctx->ls_info.segmentation_enable_flag_last = 0;
764         hw_ctx->ls_info.last_intra_only = 1;
765     }
766 
767     vp9_hw_regs->vp9d_param.reg75.segmentation_enable_lstframe     = hw_ctx->ls_info.segmentation_enable_flag_last;
768     vp9_hw_regs->vp9d_param.reg75.last_show_frame                  = hw_ctx->ls_info.last_show_frame;
769     vp9_hw_regs->vp9d_param.reg75.last_intra_only                  = hw_ctx->ls_info.last_intra_only;
770     vp9_hw_regs->vp9d_param.reg75.last_widthheight_eqcur           = (pic_param->width == hw_ctx->ls_info.last_width) && (pic_param->height == hw_ctx->ls_info.last_height);
771     vp9_hw_regs->vp9d_param.reg78.lasttile_size                    = stream_len - pic_param->first_partition_size;
772 
773 
774     if (!intraFlag) {
775         vp9_hw_regs->vp9d_param.reg88.lref_hor_scale = pic_param->mvscale[0][0];
776         vp9_hw_regs->vp9d_param.reg89.lref_ver_scale = pic_param->mvscale[0][1];
777         vp9_hw_regs->vp9d_param.reg90.gref_hor_scale = pic_param->mvscale[1][0];
778         vp9_hw_regs->vp9d_param.reg91.gref_ver_scale = pic_param->mvscale[1][1];
779         vp9_hw_regs->vp9d_param.reg92.aref_hor_scale = pic_param->mvscale[2][0];
780         vp9_hw_regs->vp9d_param.reg93.aref_ver_scale = pic_param->mvscale[2][1];
781     }
782 
783     vp9_hw_regs->common.reg010.dec_e            = 1;
784     vp9_hw_regs->common.reg011.dec_timeout_e    = 1;
785     vp9_hw_regs->common.reg011.buf_empty_en     = 1;
786     vp9_hw_regs->common.reg011.dec_clkgate_e    = 1;
787     vp9_hw_regs->common.reg011.dec_e_strmd_clkgate_dis = 0;
788 
789     vp9_hw_regs->common.reg012.wait_reset_en    = 1;
790     vp9_hw_regs->common.reg013.timeout_mode     = 1;
791 
792     vp9_hw_regs->common.reg026.swreg_block_gating_e =
793         (mpp_get_soc_type() == ROCKCHIP_SOC_RK3588) ? 0xfffef : 0xfffff;
794     vp9_hw_regs->common.reg026.reg_cfg_gating_en = 1;
795     vp9_hw_regs->common.reg032_timeout_threshold = 0x3ffff;
796 
797     //last info  update
798     hw_ctx->ls_info.abs_delta_last = pic_param->stVP9Segments.abs_delta;
799     for (i = 0 ; i < 4; i ++) {
800         hw_ctx->ls_info.last_ref_deltas[i] = pic_param->ref_deltas[i];
801     }
802 
803     for (i = 0 ; i < 2; i ++) {
804         hw_ctx->ls_info.last_mode_deltas[i] = pic_param->mode_deltas[i];
805     }
806 
807     for (i = 0; i < 8; i++) {
808         hw_ctx->ls_info.feature_data[i][0] = pic_param->stVP9Segments.feature_data[i][0];
809         hw_ctx->ls_info.feature_data[i][1] = pic_param->stVP9Segments.feature_data[i][1];
810         hw_ctx->ls_info.feature_data[i][2] = pic_param->stVP9Segments.feature_data[i][2];
811         hw_ctx->ls_info.feature_data[i][3] = pic_param->stVP9Segments.feature_data[i][3];
812         hw_ctx->ls_info.feature_mask[i]  = pic_param->stVP9Segments.feature_mask[i];
813     }
814     if (!hw_ctx->ls_info.segmentation_enable_flag_last)
815         hw_ctx->ls_info.segmentation_enable_flag_last = pic_param->stVP9Segments.enabled;
816 
817     hw_ctx->ls_info.last_show_frame = pic_param->show_frame;
818     hw_ctx->ls_info.last_width = pic_param->width;
819     hw_ctx->ls_info.last_height = pic_param->height;
820     hw_ctx->ls_info.last_intra_only = (!pic_param->frame_type || pic_param->intra_only);
821     hal_vp9d_dbg_par("stVP9Segments.enabled %d show_frame %d  width %d  height %d last_intra_only %d",
822                      pic_param->stVP9Segments.enabled, pic_param->show_frame,
823                      pic_param->width, pic_param->height,
824                      hw_ctx->ls_info.last_intra_only);
825 
826     hal_vp9d_rcb_info_update(hal, vp9_hw_regs, pic_param);
827     {
828         MppBuffer rcb_buf = NULL;
829 
830         rcb_buf = p_hal->fast_mode ? hw_ctx->g_buf[task->dec.reg_index].rcb_buf : hw_ctx->rcb_buf;
831         vdpu34x_setup_rcb(&vp9_hw_regs->common_addr, p_hal->dev, rcb_buf, hw_ctx->rcb_info);
832     }
833     vdpu34x_setup_statistic(&vp9_hw_regs->common, &vp9_hw_regs->statistic);
834 
835     // whether need update counts
836     if (pic_param->refresh_frame_context && !pic_param->parallelmode) {
837         task->dec.flags.wait_done = 1;
838     }
839 
840     return MPP_OK;
841 }
842 
hal_vp9d_vdpu34x_start(void * hal,HalTaskInfo * task)843 static MPP_RET hal_vp9d_vdpu34x_start(void *hal, HalTaskInfo *task)
844 {
845     MPP_RET ret = MPP_OK;
846     HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
847     Vdpu34xVp9dCtx *hw_ctx = (Vdpu34xVp9dCtx*)p_hal->hw_ctx;
848     Vdpu34xVp9dRegSet *hw_regs = (Vdpu34xVp9dRegSet *)hw_ctx->hw_regs;
849     MppDev dev = p_hal->dev;
850 
851     if (p_hal->fast_mode) {
852         RK_S32 index =  task->dec.reg_index;
853         hw_regs = (Vdpu34xVp9dRegSet *)hw_ctx->g_buf[index].hw_regs;
854     }
855 
856     mpp_assert(hw_regs);
857 
858 
859 #if VP9_DUMP
860     {
861         static RK_U32 file_cnt = 0;
862         char file_name[128];
863         sprintf(file_name, "/data/vp9_regs/reg_%d.txt", file_cnt);
864         FILE *fp = fopen(file_name, "wb");
865         RK_U32 i = 0;
866         RK_U32 *tmp = NULL;
867         tmp = (RK_U32 *)&hw_regs->common;
868         for (i = 0; i < sizeof(hw_regs->common) / 4; i++) {
869             fprintf(fp, "reg[%d] 0x%08x\n", i + 8, tmp[i]);
870         }
871         fprintf(fp, "\n");
872         tmp = (RK_U32 *)&hw_regs->vp9d_param;
873         for (i = 0; i < sizeof(hw_regs->vp9d_param) / 4; i++) {
874             fprintf(fp, "reg[%d] 0x%08x\n", i + 64, tmp[i]);
875         }
876         fprintf(fp, "\n");
877         tmp = (RK_U32 *)&hw_regs->common_addr;
878         for (i = 0; i < sizeof(hw_regs->common_addr) / 4; i++) {
879             fprintf(fp, "reg[%d] 0x%08x\n", i + 128, tmp[i]);
880         }
881         fprintf(fp, "\n");
882         tmp = (RK_U32 *)&hw_regs->vp9d_addr;
883         for (i = 0; i < sizeof(hw_regs->vp9d_addr) / 4; i++) {
884             fprintf(fp, "reg[%d] 0x%08x\n", i + 160, tmp[i]);
885         }
886         file_cnt++;
887         fflush(fp);
888         fclose(fp);
889     }
890 #endif
891 
892     do {
893         MppDevRegWrCfg wr_cfg;
894         MppDevRegRdCfg rd_cfg;
895 
896         wr_cfg.reg = &hw_regs->common;
897         wr_cfg.size = sizeof(hw_regs->common);
898         wr_cfg.offset = OFFSET_COMMON_REGS;
899 
900         ret = mpp_dev_ioctl(dev, MPP_DEV_REG_WR, &wr_cfg);
901         if (ret) {
902             mpp_err_f("set register write failed %d\n", ret);
903             break;
904         }
905 
906         wr_cfg.reg = &hw_regs->vp9d_param;
907         wr_cfg.size = sizeof(hw_regs->vp9d_param);
908         wr_cfg.offset = OFFSET_CODEC_PARAMS_REGS;
909 
910         ret = mpp_dev_ioctl(dev, MPP_DEV_REG_WR, &wr_cfg);
911         if (ret) {
912             mpp_err_f("set register write failed %d\n", ret);
913             break;
914         }
915 
916         wr_cfg.reg = &hw_regs->common_addr;
917         wr_cfg.size = sizeof(hw_regs->common_addr);
918         wr_cfg.offset = OFFSET_COMMON_ADDR_REGS;
919 
920         ret = mpp_dev_ioctl(dev, MPP_DEV_REG_WR, &wr_cfg);
921         if (ret) {
922             mpp_err_f("set register write failed %d\n", ret);
923             break;
924         }
925 
926         wr_cfg.reg = &hw_regs->vp9d_addr;
927         wr_cfg.size = sizeof(hw_regs->vp9d_addr);
928         wr_cfg.offset = OFFSET_CODEC_ADDR_REGS;
929 
930         ret = mpp_dev_ioctl(dev, MPP_DEV_REG_WR, &wr_cfg);
931         if (ret) {
932             mpp_err_f("set register write failed %d\n", ret);
933             break;
934         }
935 
936         wr_cfg.reg = &hw_regs->statistic;
937         wr_cfg.size = sizeof(hw_regs->statistic);
938         wr_cfg.offset = OFFSET_STATISTIC_REGS;
939 
940         ret = mpp_dev_ioctl(dev, MPP_DEV_REG_WR, &wr_cfg);
941         if (ret) {
942             mpp_err_f("set register write failed %d\n", ret);
943             break;
944         }
945 
946         rd_cfg.reg = &hw_regs->irq_status;
947         rd_cfg.size = sizeof(hw_regs->irq_status);
948         rd_cfg.offset = OFFSET_INTERRUPT_REGS;
949 
950         ret = mpp_dev_ioctl(dev, MPP_DEV_REG_RD, &rd_cfg);
951         if (ret) {
952             mpp_err_f("set register read failed %d\n", ret);
953             break;
954         }
955 
956         /* rcb info for sram */
957         vdpu34x_set_rcbinfo(dev, hw_ctx->rcb_info);
958 
959         ret = mpp_dev_ioctl(dev, MPP_DEV_CMD_SEND, NULL);
960         if (ret) {
961             mpp_err_f("send cmd failed %d\n", ret);
962             break;
963         }
964     } while (0);
965 
966     (void)task;
967     return ret;
968 }
969 
hal_vp9d_vdpu34x_wait(void * hal,HalTaskInfo * task)970 static MPP_RET hal_vp9d_vdpu34x_wait(void *hal, HalTaskInfo *task)
971 {
972     MPP_RET ret = MPP_OK;
973     HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
974     Vdpu34xVp9dCtx *hw_ctx = (Vdpu34xVp9dCtx*)p_hal->hw_ctx;
975     Vdpu34xVp9dRegSet *hw_regs = (Vdpu34xVp9dRegSet *)hw_ctx->hw_regs;
976 
977     if (p_hal->fast_mode)
978         hw_regs = (Vdpu34xVp9dRegSet *)hw_ctx->g_buf[task->dec.reg_index].hw_regs;
979 
980     mpp_assert(hw_regs);
981 
982     ret = mpp_dev_ioctl(p_hal->dev, MPP_DEV_CMD_POLL, NULL);
983     if (ret)
984         mpp_err_f("poll cmd failed %d\n", ret);
985 
986     if (hal_vp9d_debug & HAL_VP9D_DBG_REG) {
987         RK_U32 *p = (RK_U32 *)hw_regs;
988         RK_U32 i = 0;
989 
990         for (i = 0; i < sizeof(Vdpu34xVp9dRegSet) / 4; i++)
991             mpp_log("get regs[%02d]: %08X\n", i, *p++);
992     }
993 
994     if (task->dec.flags.parse_err ||
995         task->dec.flags.ref_err ||
996         !hw_regs->irq_status.reg224.dec_rdy_sta) {
997         MppFrame mframe = NULL;
998         mpp_buf_slot_get_prop(p_hal->slots, task->dec.output, SLOT_FRAME_PTR, &mframe);
999         mpp_frame_set_errinfo(mframe, 1);
1000     }
1001 #if !HW_PROB
1002     if (p_hal->dec_cb && task->dec.flags.wait_done) {
1003         DXVA_PicParams_VP9 *pic_param = (DXVA_PicParams_VP9*)task->dec.syntax.data;
1004         hal_vp9d_update_counts(mpp_buffer_get_ptr(hw_ctx->count_base), task->dec.syntax.data);
1005         mpp_callback(p_hal->dec_cb, &pic_param->counts);
1006     }
1007 #endif
1008     if (p_hal->fast_mode) {
1009         hw_ctx->g_buf[task->dec.reg_index].use_flag = 0;
1010     }
1011 
1012     (void)task;
1013     return ret;
1014 }
1015 
hal_vp9d_vdpu34x_reset(void * hal)1016 static MPP_RET hal_vp9d_vdpu34x_reset(void *hal)
1017 {
1018     HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
1019     Vdpu34xVp9dCtx *hw_ctx = (Vdpu34xVp9dCtx*)p_hal->hw_ctx;
1020 
1021     hal_vp9d_enter();
1022 
1023     memset(&hw_ctx->ls_info, 0, sizeof(hw_ctx->ls_info));
1024     hw_ctx->mv_base_addr = -1;
1025     hw_ctx->pre_mv_base_addr = -1;
1026     hw_ctx->last_segid_flag = 1;
1027     memset(&hw_ctx->prob_ref_poc, 0, sizeof(hw_ctx->prob_ref_poc));
1028     hw_ctx->col_ref_poc = 0;
1029     hw_ctx->segid_ref_poc = 0;
1030 
1031     hal_vp9d_leave();
1032 
1033     return MPP_OK;
1034 }
1035 
hal_vp9d_vdpu34x_flush(void * hal)1036 static MPP_RET hal_vp9d_vdpu34x_flush(void *hal)
1037 {
1038     HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
1039     Vdpu34xVp9dCtx *hw_ctx = (Vdpu34xVp9dCtx*)p_hal->hw_ctx;
1040 
1041     hal_vp9d_enter();
1042 
1043     hw_ctx->mv_base_addr = -1;
1044     hw_ctx->pre_mv_base_addr = -1;
1045 
1046     hal_vp9d_leave();
1047 
1048     return MPP_OK;
1049 }
1050 
hal_vp9d_vdpu34x_control(void * hal,MpiCmd cmd_type,void * param)1051 static MPP_RET hal_vp9d_vdpu34x_control(void *hal, MpiCmd cmd_type, void *param)
1052 {
1053     HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
1054 
1055     switch ((MpiCmd)cmd_type) {
1056     case MPP_DEC_SET_FRAME_INFO : {
1057         /* commit buffer stride */
1058         RK_U32 width = mpp_frame_get_width((MppFrame)param);
1059         RK_U32 height = mpp_frame_get_height((MppFrame)param);
1060         MppFrameFormat fmt = mpp_frame_get_fmt((MppFrame)param);
1061 
1062         if (MPP_FRAME_FMT_IS_FBC(fmt)) {
1063             vdpu34x_afbc_align_calc(p_hal->slots, (MppFrame)param, 0);
1064         } else {
1065             mpp_frame_set_hor_stride((MppFrame)param, vp9_hor_align(width));
1066             mpp_frame_set_ver_stride((MppFrame)param, vp9_ver_align(height));
1067         }
1068     } break;
1069     default : {
1070     } break;
1071     }
1072 
1073     return MPP_OK;
1074 }
1075 
1076 const MppHalApi hal_vp9d_vdpu34x = {
1077     .name = "vp9d_vdpu34x",
1078     .type = MPP_CTX_DEC,
1079     .coding = MPP_VIDEO_CodingVP9,
1080     .ctx_size = sizeof(Vdpu34xVp9dCtx),
1081     .flag = 0,
1082     .init = hal_vp9d_vdpu34x_init,
1083     .deinit = hal_vp9d_vdpu34x_deinit,
1084     .reg_gen = hal_vp9d_vdpu34x_gen_regs,
1085     .start = hal_vp9d_vdpu34x_start,
1086     .wait = hal_vp9d_vdpu34x_wait,
1087     .reset = hal_vp9d_vdpu34x_reset,
1088     .flush = hal_vp9d_vdpu34x_flush,
1089     .control = hal_vp9d_vdpu34x_control,
1090 };
1091