xref: /OK3568_Linux_fs/external/mpp/mpp/hal/rkdec/vp9d/hal_vp9d_rkv.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1 /*
2  * Copyright 2020 Rockchip Electronics Co. LTD
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define MODULE_TAG "hal_vp9d_rkv"
18 
19 #include <stdio.h>
20 #include <string.h>
21 
22 #include "mpp_env.h"
23 #include "mpp_mem.h"
24 #include "mpp_common.h"
25 
26 #include "hal_vp9d_debug.h"
27 #include "hal_vp9d_ctx.h"
28 #include "hal_vp9d_com.h"
29 #include "hal_vp9d_rkv.h"
30 #include "hal_vp9d_rkv_reg.h"
31 #include "vp9d_syntax.h"
32 
33 
34 typedef struct Vp9dRkvCtx_t {
35     Vp9dRegBuf      g_buf[MAX_GEN_REG];
36     MppBuffer       probe_base;
37     MppBuffer       count_base;
38     MppBuffer       segid_cur_base;
39     MppBuffer       segid_last_base;
40     void*           hw_regs;
41     RK_S32          mv_base_addr;
42     RK_U32          mv_base_offset;
43     RK_S32          pre_mv_base_addr;
44     RK_U32          pre_mv_base_offset;
45     Vp9dLastInfo    ls_info;
46     /*
47      * swap between segid_cur_base & segid_last_base
48      * 0  used segid_cur_base as last
49      * 1  used segid_last_base as
50      */
51     RK_U32          last_segid_flag;
52 } Vp9dRkvCtx;
53 
hal_vp9d_alloc_res(HalVp9dCtx * hal)54 static MPP_RET hal_vp9d_alloc_res(HalVp9dCtx *hal)
55 {
56     RK_S32 i = 0;
57     RK_S32 ret = 0;
58     HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
59     Vp9dRkvCtx *hw_ctx = (Vp9dRkvCtx*)p_hal->hw_ctx;
60 
61     if (p_hal->fast_mode) {
62         for (i = 0; i < MAX_GEN_REG; i++) {
63             hw_ctx->g_buf[i].hw_regs = mpp_calloc_size(void, sizeof(VP9_REGS));
64             ret = mpp_buffer_get(p_hal->group,
65                                  &hw_ctx->g_buf[i].probe_base, PROB_SIZE);
66             if (ret) {
67                 mpp_err("vp9 probe_base get buffer failed\n");
68                 return ret;
69             }
70             ret = mpp_buffer_get(p_hal->group,
71                                  &hw_ctx->g_buf[i].count_base, COUNT_SIZE);
72             if (ret) {
73                 mpp_err("vp9 count_base get buffer failed\n");
74                 return ret;
75             }
76             ret = mpp_buffer_get(p_hal->group,
77                                  &hw_ctx->g_buf[i].segid_cur_base, MAX_SEGMAP_SIZE);
78             if (ret) {
79                 mpp_err("vp9 segid_cur_base get buffer failed\n");
80                 return ret;
81             }
82             ret = mpp_buffer_get(p_hal->group,
83                                  &hw_ctx->g_buf[i].segid_last_base, MAX_SEGMAP_SIZE);
84             if (ret) {
85                 mpp_err("vp9 segid_last_base get buffer failed\n");
86                 return ret;
87             }
88         }
89     } else {
90         hw_ctx->hw_regs = mpp_calloc_size(void, sizeof(VP9_REGS));
91         ret = mpp_buffer_get(p_hal->group, &hw_ctx->probe_base, PROB_SIZE);
92         if (ret) {
93             mpp_err("vp9 probe_base get buffer failed\n");
94             return ret;
95         }
96         ret = mpp_buffer_get(p_hal->group, &hw_ctx->count_base, COUNT_SIZE);
97         if (ret) {
98             mpp_err("vp9 count_base get buffer failed\n");
99             return ret;
100         }
101         ret = mpp_buffer_get(p_hal->group, &hw_ctx->segid_cur_base, MAX_SEGMAP_SIZE);
102         if (ret) {
103             mpp_err("vp9 segid_cur_base get buffer failed\n");
104             return ret;
105         }
106         ret = mpp_buffer_get(p_hal->group, &hw_ctx->segid_last_base, MAX_SEGMAP_SIZE);
107         if (ret) {
108             mpp_err("vp9 segid_last_base get buffer failed\n");
109             return ret;
110         }
111     }
112     return MPP_OK;
113 }
114 
hal_vp9d_release_res(HalVp9dCtx * hal)115 static MPP_RET hal_vp9d_release_res(HalVp9dCtx *hal)
116 {
117     RK_S32 i = 0;
118     RK_S32 ret = 0;
119     HalVp9dCtx *p_hal = hal;
120     Vp9dRkvCtx *hw_ctx = (Vp9dRkvCtx*)p_hal->hw_ctx;
121 
122     if (p_hal->fast_mode) {
123         for (i = 0; i < MAX_GEN_REG; i++) {
124             if (hw_ctx->g_buf[i].probe_base) {
125                 ret = mpp_buffer_put(hw_ctx->g_buf[i].probe_base);
126                 if (ret) {
127                     mpp_err("vp9 probe_base put buffer failed\n");
128                     return ret;
129                 }
130             }
131             if (hw_ctx->g_buf[i].count_base) {
132                 ret = mpp_buffer_put(hw_ctx->g_buf[i].count_base);
133                 if (ret) {
134                     mpp_err("vp9 count_base put buffer failed\n");
135                     return ret;
136                 }
137             }
138             if (hw_ctx->g_buf[i].segid_cur_base) {
139                 ret = mpp_buffer_put(hw_ctx->g_buf[i].segid_cur_base);
140                 if (ret) {
141                     mpp_err("vp9 segid_cur_base put buffer failed\n");
142                     return ret;
143                 }
144             }
145             if (hw_ctx->g_buf[i].segid_last_base) {
146                 ret = mpp_buffer_put(hw_ctx->g_buf[i].segid_last_base);
147                 if (ret) {
148                     mpp_err("vp9 segid_last_base put buffer failed\n");
149                     return ret;
150                 }
151             }
152             if (hw_ctx->g_buf[i].hw_regs) {
153                 mpp_free(hw_ctx->g_buf[i].hw_regs);
154                 hw_ctx->g_buf[i].hw_regs = NULL;
155             }
156         }
157     } else {
158         if (hw_ctx->probe_base) {
159             ret = mpp_buffer_put(hw_ctx->probe_base);
160             if (ret) {
161                 mpp_err("vp9 probe_base get buffer failed\n");
162                 return ret;
163             }
164         }
165         if (hw_ctx->count_base) {
166             ret = mpp_buffer_put(hw_ctx->count_base);
167             if (ret) {
168                 mpp_err("vp9 count_base put buffer failed\n");
169                 return ret;
170             }
171         }
172         if (hw_ctx->segid_cur_base) {
173             ret = mpp_buffer_put(hw_ctx->segid_cur_base);
174             if (ret) {
175                 mpp_err("vp9 segid_cur_base put buffer failed\n");
176                 return ret;
177             }
178         }
179         if (hw_ctx->segid_last_base) {
180             ret = mpp_buffer_put(hw_ctx->segid_last_base);
181             if (ret) {
182                 mpp_err("vp9 segid_last_base put buffer failed\n");
183                 return ret;
184             }
185         }
186         if (hw_ctx->hw_regs) {
187             mpp_free(hw_ctx->hw_regs);
188             hw_ctx->hw_regs = NULL;
189         }
190     }
191     return MPP_OK;
192 }
193 
hal_vp9d_rkv_init(void * hal,MppHalCfg * cfg)194 MPP_RET hal_vp9d_rkv_init(void *hal, MppHalCfg *cfg)
195 {
196     MPP_RET ret = MPP_OK;
197     HalVp9dCtx *p_hal = (HalVp9dCtx *)hal;
198     MEM_CHECK(ret, p_hal->hw_ctx = mpp_calloc_size(void, sizeof(Vp9dRkvCtx)));
199     Vp9dRkvCtx *ctx = (Vp9dRkvCtx *)p_hal->hw_ctx;
200 
201     mpp_log("hal_vp9d_rkv_init in");
202     ctx->mv_base_addr = -1;
203     ctx->pre_mv_base_addr = -1;
204     mpp_slots_set_prop(p_hal->slots, SLOTS_HOR_ALIGN, vp9_hor_align);
205     mpp_slots_set_prop(p_hal->slots, SLOTS_VER_ALIGN, vp9_ver_align);
206 
207     if (p_hal->group == NULL) {
208         ret = mpp_buffer_group_get_internal(&p_hal->group, MPP_BUFFER_TYPE_ION);
209         if (ret) {
210             mpp_err("vp9 mpp_buffer_group_get failed\n");
211             return ret;
212         }
213     }
214 
215     ret = hal_vp9d_alloc_res(p_hal);
216     if (ret) {
217         mpp_err("hal_vp9d_alloc_res failed\n");
218         return ret;
219     }
220 
221     ctx->last_segid_flag = 1;
222 
223     (void) cfg;
224     return ret = MPP_OK;
225 __FAILED:
226     return ret = MPP_NOK;
227 }
228 
hal_vp9d_rkv_deinit(void * hal)229 MPP_RET hal_vp9d_rkv_deinit(void *hal)
230 {
231     MPP_RET ret = MPP_OK;
232     HalVp9dCtx *p_hal = (HalVp9dCtx *)hal;
233 
234     hal_vp9d_release_res(p_hal);
235 
236     if (p_hal->group) {
237         ret = mpp_buffer_group_put(p_hal->group);
238         if (ret) {
239             mpp_err("vp9d group free buffer failed\n");
240             return ret;
241         }
242     }
243     MPP_FREE(p_hal->hw_ctx);
244     return ret = MPP_OK;
245 }
246 
hal_vp9d_rkv_gen_regs(void * hal,HalTaskInfo * task)247 MPP_RET hal_vp9d_rkv_gen_regs(void *hal, HalTaskInfo *task)
248 {
249     RK_S32   i;
250     RK_U8    bit_depth = 0;
251     RK_U32   pic_h[3] = { 0 };
252     RK_U32   ref_frame_width_y;
253     RK_U32   ref_frame_height_y;
254     RK_S32   stream_len = 0, aglin_offset = 0;
255     RK_U32   y_hor_virstride, uv_hor_virstride, y_virstride, uv_virstride, yuv_virstride;
256     RK_U8   *bitstream = NULL;
257     MppBuffer streambuf = NULL;
258     RK_U32 sw_y_hor_virstride;
259     RK_U32 sw_uv_hor_virstride;
260     RK_U32 sw_y_virstride;
261     RK_U32 sw_uv_virstride;
262     RK_U32 sw_yuv_virstride ;
263     RK_U8  ref_idx = 0;
264     RK_U32 *reg_ref_base = 0;
265     RK_S32 intraFlag = 0;
266     MppBuffer framebuf = NULL;
267     HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
268     Vp9dRkvCtx *hw_ctx = (Vp9dRkvCtx*)p_hal->hw_ctx;
269     DXVA_PicParams_VP9 *pic_param = (DXVA_PicParams_VP9*)task->dec.syntax.data;
270 
271     if (p_hal->fast_mode) {
272         for (i = 0; i < MAX_GEN_REG; i++) {
273             if (!hw_ctx->g_buf[i].use_flag) {
274                 task->dec.reg_index = i;
275                 hw_ctx->probe_base = hw_ctx->g_buf[i].probe_base;
276                 hw_ctx->count_base = hw_ctx->g_buf[i].count_base;
277                 hw_ctx->segid_cur_base = hw_ctx->g_buf[i].segid_cur_base;
278                 hw_ctx->segid_last_base = hw_ctx->g_buf[i].segid_last_base;
279                 hw_ctx->hw_regs = hw_ctx->g_buf[i].hw_regs;
280                 hw_ctx->g_buf[i].use_flag = 1;
281                 break;
282             }
283         }
284         if (i == MAX_GEN_REG) {
285             mpp_err("vp9 fast mode buf all used\n");
286             return MPP_ERR_NOMEM;
287         }
288     }
289     VP9_REGS *vp9_hw_regs = (VP9_REGS*)hw_ctx->hw_regs;
290     intraFlag = (!pic_param->frame_type || pic_param->intra_only);
291     hal_vp9d_output_probe(mpp_buffer_get_ptr(hw_ctx->probe_base), task->dec.syntax.data);
292     stream_len = (RK_S32)mpp_packet_get_length(task->dec.input_packet);
293     memset(hw_ctx->hw_regs, 0, sizeof(VP9_REGS));
294     vp9_hw_regs->swreg2_sysctrl.sw_dec_mode = 2; //set as vp9 dec
295     vp9_hw_regs->swreg5_stream_len = ((stream_len + 15) & (~15)) + 0x80;
296 
297     mpp_buf_slot_get_prop(p_hal->packet_slots, task->dec.input, SLOT_BUFFER, &streambuf);
298     bitstream = mpp_buffer_get_ptr(streambuf);
299     aglin_offset = vp9_hw_regs->swreg5_stream_len - stream_len;
300     if (aglin_offset > 0) {
301         memset((void *)(bitstream + stream_len), 0, aglin_offset);
302     }
303 
304     //--- caculate the yuv_frame_size and mv_size
305     bit_depth = pic_param->BitDepthMinus8Luma + 8;
306     pic_h[0] = vp9_ver_align(pic_param->height); //p_cm->height;
307     pic_h[1] = vp9_ver_align(pic_param->height) / 2; //(p_cm->height + 1) / 2;
308     pic_h[2] = pic_h[1];
309 
310     sw_y_hor_virstride = (vp9_hor_align((pic_param->width * bit_depth) >> 3) >> 4);
311     sw_uv_hor_virstride = (vp9_hor_align((pic_param->width * bit_depth) >> 3) >> 4);
312     sw_y_virstride = pic_h[0] * sw_y_hor_virstride;
313 
314     sw_uv_virstride = pic_h[1] * sw_uv_hor_virstride;
315     sw_yuv_virstride = sw_y_virstride + sw_uv_virstride;
316 
317     vp9_hw_regs->swreg3_picpar.sw_y_hor_virstride = sw_y_hor_virstride;
318     vp9_hw_regs->swreg3_picpar.sw_uv_hor_virstride = sw_uv_hor_virstride;
319     vp9_hw_regs->swreg8_y_virstride.sw_y_virstride = sw_y_virstride;
320     vp9_hw_regs->swreg9_yuv_virstride.sw_yuv_virstride = sw_yuv_virstride;
321 
322     if (!pic_param->intra_only && pic_param->frame_type &&
323         !pic_param->error_resilient_mode && hw_ctx->ls_info.last_show_frame) {
324         hw_ctx->pre_mv_base_addr = hw_ctx->mv_base_addr;
325         hw_ctx->pre_mv_base_offset = hw_ctx->mv_base_offset;
326     }
327 
328 
329     mpp_buf_slot_get_prop(p_hal->slots, task->dec.output, SLOT_BUFFER, &framebuf);
330     vp9_hw_regs->swreg7_decout_base =  mpp_buffer_get_fd(framebuf);
331     vp9_hw_regs->swreg4_strm_rlc_base = mpp_buffer_get_fd(streambuf);
332 
333     vp9_hw_regs->swreg6_cabactbl_prob_base = mpp_buffer_get_fd(hw_ctx->probe_base);
334     vp9_hw_regs->swreg14_vp9_count_base  = mpp_buffer_get_fd(hw_ctx->count_base);
335 
336     if (hw_ctx->last_segid_flag) {
337         vp9_hw_regs->swreg15_vp9_segidlast_base = mpp_buffer_get_fd(hw_ctx->segid_last_base);
338         vp9_hw_regs->swreg16_vp9_segidcur_base = mpp_buffer_get_fd(hw_ctx->segid_cur_base);
339     } else {
340         vp9_hw_regs->swreg15_vp9_segidlast_base = mpp_buffer_get_fd(hw_ctx->segid_cur_base);
341         vp9_hw_regs->swreg16_vp9_segidcur_base = mpp_buffer_get_fd(hw_ctx->segid_last_base);
342     }
343 
344     if (pic_param->stVP9Segments.enabled && pic_param->stVP9Segments.update_map) {
345         hw_ctx->last_segid_flag = !hw_ctx->last_segid_flag;
346     }
347 
348     hw_ctx->mv_base_addr = vp9_hw_regs->swreg7_decout_base;
349     hw_ctx->mv_base_offset = mpp_get_ioctl_version() ? sw_yuv_virstride << 4 : sw_yuv_virstride;
350     if (hw_ctx->pre_mv_base_addr < 0) {
351         hw_ctx->pre_mv_base_addr = hw_ctx->mv_base_addr;
352         hw_ctx->pre_mv_base_offset = hw_ctx->mv_base_offset;
353     }
354     vp9_hw_regs->swreg52_vp9_refcolmv_base = hw_ctx->pre_mv_base_addr;
355     mpp_dev_set_reg_offset(p_hal->dev, 52, hw_ctx->pre_mv_base_offset);
356 
357     vp9_hw_regs->swreg10_vp9_cprheader_offset.sw_vp9_cprheader_offset = 0; //no use now.
358     reg_ref_base = &vp9_hw_regs->swreg11_vp9_referlast_base;
359     for (i = 0; i < 3; i++) {
360         ref_idx = pic_param->frame_refs[i].Index7Bits;
361         ref_frame_width_y = pic_param->ref_frame_coded_width[ref_idx];
362         ref_frame_height_y = pic_param->ref_frame_coded_height[ref_idx];
363         pic_h[0] = vp9_ver_align(ref_frame_height_y);
364         pic_h[1] = vp9_ver_align(ref_frame_height_y) / 2;
365         y_hor_virstride = (vp9_hor_align((ref_frame_width_y * bit_depth) >> 3) >> 4);
366         uv_hor_virstride = (vp9_hor_align((ref_frame_width_y * bit_depth) >> 3) >> 4);
367         y_virstride = y_hor_virstride * pic_h[0];
368         uv_virstride = uv_hor_virstride * pic_h[1];
369         yuv_virstride = y_virstride + uv_virstride;
370 
371         if (pic_param->ref_frame_map[ref_idx].Index7Bits < 0x7f) {
372             mpp_buf_slot_get_prop(p_hal->slots, pic_param->ref_frame_map[ref_idx].Index7Bits, SLOT_BUFFER, &framebuf);
373         }
374 
375         if (pic_param->ref_frame_map[ref_idx].Index7Bits < 0x7f) {
376             switch (i) {
377             case 0: {
378 
379                 vp9_hw_regs->swreg17_vp9_frame_size_last.sw_framewidth_last = ref_frame_width_y;
380                 vp9_hw_regs->swreg17_vp9_frame_size_last.sw_frameheight_last = ref_frame_height_y;
381                 vp9_hw_regs->swreg37_vp9_lastf_hor_virstride.sw_vp9_lastfy_hor_virstride = y_hor_virstride;
382                 vp9_hw_regs->swreg37_vp9_lastf_hor_virstride.sw_vp9_lastfuv_hor_virstride = uv_hor_virstride;
383                 vp9_hw_regs->swreg48_vp9_last_ystride.sw_vp9_lastfy_virstride = y_virstride;
384                 vp9_hw_regs->swreg51_vp9_lastref_yuvstride.sw_vp9_lastref_yuv_virstride = yuv_virstride;
385                 break;
386             }
387             case 1: {
388                 vp9_hw_regs->swreg18_vp9_frame_size_golden.sw_framewidth_golden = ref_frame_width_y;
389                 vp9_hw_regs->swreg18_vp9_frame_size_golden.sw_frameheight_golden = ref_frame_height_y;
390                 vp9_hw_regs->swreg38_vp9_goldenf_hor_virstride.sw_vp9_goldenfy_hor_virstride = y_hor_virstride;
391                 vp9_hw_regs->swreg38_vp9_goldenf_hor_virstride.sw_vp9_goldenuv_hor_virstride = uv_hor_virstride;
392                 vp9_hw_regs->swreg49_vp9_golden_ystride.sw_vp9_goldeny_virstride = y_virstride;
393                 break;
394             }
395             case 2: {
396                 vp9_hw_regs->swreg19_vp9_frame_size_altref.sw_framewidth_alfter = ref_frame_width_y;
397                 vp9_hw_regs->swreg19_vp9_frame_size_altref.sw_frameheight_alfter = ref_frame_height_y;
398                 vp9_hw_regs->swreg39_vp9_altreff_hor_virstride.sw_vp9_altreffy_hor_virstride = y_hor_virstride;
399                 vp9_hw_regs->swreg39_vp9_altreff_hor_virstride.sw_vp9_altreffuv_hor_virstride = uv_hor_virstride;
400                 vp9_hw_regs->swreg50_vp9_altrefy_ystride.sw_vp9_altrefy_virstride = y_virstride;
401                 break;
402             }
403             default:
404                 break;
405 
406             }
407 
408             /*0 map to 11*/
409             /*1 map to 12*/
410             /*2 map to 13*/
411             if (framebuf != NULL) {
412                 reg_ref_base[i] = mpp_buffer_get_fd(framebuf);
413             } else {
414                 mpp_log("ref buff address is no valid used out as base slot index 0x%x", pic_param->ref_frame_map[ref_idx].Index7Bits);
415                 reg_ref_base[i] = vp9_hw_regs->swreg7_decout_base; //set
416             }
417         } else {
418             reg_ref_base[i] = vp9_hw_regs->swreg7_decout_base; //set
419         }
420     }
421 
422     for (i = 0; i < 8; i++) {
423         vp9_hw_regs->swreg20_27_vp9_segid_grp[i].sw_vp9segid_frame_qp_delta_en              = (hw_ctx->ls_info.feature_mask[i]) & 0x1;
424         vp9_hw_regs->swreg20_27_vp9_segid_grp[i].sw_vp9segid_frame_qp_delta                 = hw_ctx->ls_info.feature_data[i][0];
425         vp9_hw_regs->swreg20_27_vp9_segid_grp[i].sw_vp9segid_frame_loopfitler_value_en      = (hw_ctx->ls_info.feature_mask[i] >> 1) & 0x1;
426         vp9_hw_regs->swreg20_27_vp9_segid_grp[i].sw_vp9segid_frame_loopfilter_value         = hw_ctx->ls_info.feature_data[i][1];
427         vp9_hw_regs->swreg20_27_vp9_segid_grp[i].sw_vp9segid_referinfo_en                   = (hw_ctx->ls_info.feature_mask[i] >> 2) & 0x1;
428         vp9_hw_regs->swreg20_27_vp9_segid_grp[i].sw_vp9segid_referinfo                      = hw_ctx->ls_info.feature_data[i][2];
429         vp9_hw_regs->swreg20_27_vp9_segid_grp[i].sw_vp9segid_frame_skip_en                  = (hw_ctx->ls_info.feature_mask[i] >> 3) & 0x1;
430     }
431 
432 
433     vp9_hw_regs->swreg20_27_vp9_segid_grp[0].sw_vp9segid_abs_delta                              = hw_ctx->ls_info.abs_delta_last;
434 
435     vp9_hw_regs->swreg28_vp9_cprheader_config.sw_vp9_tx_mode                                    = pic_param->txmode;
436 
437     vp9_hw_regs->swreg28_vp9_cprheader_config.sw_vp9_frame_reference_mode                   = pic_param->refmode;
438 
439     vp9_hw_regs->swreg32_vp9_ref_deltas_lastframe.sw_vp9_ref_deltas_lastframe               = 0;
440 
441     if (!intraFlag) {
442         for (i = 0; i < 4; i++)
443             vp9_hw_regs->swreg32_vp9_ref_deltas_lastframe.sw_vp9_ref_deltas_lastframe           |= (hw_ctx->ls_info.last_ref_deltas[i] & 0x7f) << (7 * i);
444 
445         for (i = 0; i < 2; i++)
446             vp9_hw_regs->swreg33_vp9_info_lastframe.sw_vp9_mode_deltas_lastframe                |= (hw_ctx->ls_info.last_mode_deltas[i] & 0x7f) << (7 * i);
447 
448 
449     } else {
450         hw_ctx->ls_info.segmentation_enable_flag_last = 0;
451         hw_ctx->ls_info.last_intra_only = 1;
452     }
453 
454     vp9_hw_regs->swreg33_vp9_info_lastframe.sw_vp9_mode_deltas_lastframe                        = 0;
455 
456     vp9_hw_regs->swreg33_vp9_info_lastframe.sw_segmentation_enable_lstframe                  = hw_ctx->ls_info.segmentation_enable_flag_last;
457     vp9_hw_regs->swreg33_vp9_info_lastframe.sw_vp9_last_show_frame                          = hw_ctx->ls_info.last_show_frame;
458     vp9_hw_regs->swreg33_vp9_info_lastframe.sw_vp9_last_intra_only                          = hw_ctx->ls_info.last_intra_only;
459     vp9_hw_regs->swreg33_vp9_info_lastframe.sw_vp9_last_widthheight_eqcur                   = (pic_param->width == hw_ctx->ls_info.last_width) && (pic_param->height == hw_ctx->ls_info.last_height);
460 
461     vp9_hw_regs->swreg36_vp9_lasttile_size.sw_vp9_lasttile_size                             =  stream_len - pic_param->first_partition_size;
462 
463 
464     if (!intraFlag) {
465         vp9_hw_regs->swreg29_vp9_lref_scale.sw_vp9_lref_hor_scale = pic_param->mvscale[0][0];
466         vp9_hw_regs->swreg29_vp9_lref_scale.sw_vp9_lref_ver_scale = pic_param->mvscale[0][1];
467         vp9_hw_regs->swreg30_vp9_gref_scale.sw_vp9_gref_hor_scale = pic_param->mvscale[1][0];
468         vp9_hw_regs->swreg30_vp9_gref_scale.sw_vp9_gref_ver_scale = pic_param->mvscale[1][1];
469         vp9_hw_regs->swreg31_vp9_aref_scale.sw_vp9_aref_hor_scale = pic_param->mvscale[2][0];
470         vp9_hw_regs->swreg31_vp9_aref_scale.sw_vp9_aref_ver_scale = pic_param->mvscale[2][1];
471         // vp9_hw_regs.swreg33_vp9_info_lastframe.sw_vp9_color_space_lastkeyframe = p_cm->color_space_last;
472     }
473 
474 
475     //reuse reg64, and it will be written by hardware to show performance.
476     vp9_hw_regs->swreg64_performance_cycle.sw_performance_cycle = 0;
477     vp9_hw_regs->swreg64_performance_cycle.sw_performance_cycle |= pic_param->width;
478     vp9_hw_regs->swreg64_performance_cycle.sw_performance_cycle |= pic_param->height << 16;
479 
480     vp9_hw_regs->swreg1_int.sw_dec_e         = 1;
481     vp9_hw_regs->swreg1_int.sw_dec_timeout_e = 1;
482 
483     //last info  update
484     hw_ctx->ls_info.abs_delta_last = pic_param->stVP9Segments.abs_delta;
485     for (i = 0 ; i < 4; i ++) {
486         hw_ctx->ls_info.last_ref_deltas[i] = pic_param->ref_deltas[i];
487     }
488 
489     for (i = 0 ; i < 2; i ++) {
490         hw_ctx->ls_info.last_mode_deltas[i] = pic_param->mode_deltas[i];
491     }
492 
493     for (i = 0; i < 8; i++) {
494         hw_ctx->ls_info.feature_data[i][0] = pic_param->stVP9Segments.feature_data[i][0];
495         hw_ctx->ls_info.feature_data[i][1] = pic_param->stVP9Segments.feature_data[i][1];
496         hw_ctx->ls_info.feature_data[i][2] = pic_param->stVP9Segments.feature_data[i][2];
497         hw_ctx->ls_info.feature_data[i][3] = pic_param->stVP9Segments.feature_data[i][3];
498         hw_ctx->ls_info.feature_mask[i]  = pic_param->stVP9Segments.feature_mask[i];
499     }
500     if (!hw_ctx->ls_info.segmentation_enable_flag_last)
501         hw_ctx->ls_info.segmentation_enable_flag_last = pic_param->stVP9Segments.enabled;
502 
503     hw_ctx->ls_info.last_show_frame = pic_param->show_frame;
504     hw_ctx->ls_info.last_width = pic_param->width;
505     hw_ctx->ls_info.last_height = pic_param->height;
506     hw_ctx->ls_info.last_intra_only = (!pic_param->frame_type || pic_param->intra_only);
507     hal_vp9d_dbg_par("stVP9Segments.enabled %d show_frame %d  width %d  height %d last_intra_only %d",
508                      pic_param->stVP9Segments.enabled, pic_param->show_frame,
509                      pic_param->width, pic_param->height,
510                      hw_ctx->ls_info.last_intra_only);
511 
512     // whether need update counts
513     if (pic_param->refresh_frame_context && !pic_param->parallelmode) {
514         task->dec.flags.wait_done = 1;
515     }
516 
517     return MPP_OK;
518 }
519 
hal_vp9d_rkv_start(void * hal,HalTaskInfo * task)520 MPP_RET hal_vp9d_rkv_start(void *hal, HalTaskInfo *task)
521 {
522     MPP_RET ret = MPP_OK;
523     HalVp9dCtx *p_hal = (HalVp9dCtx *)hal;
524     Vp9dRkvCtx *hw_ctx = (Vp9dRkvCtx*)p_hal->hw_ctx;
525     VP9_REGS *hw_regs = (VP9_REGS *)hw_ctx->hw_regs;
526     MppDev dev = p_hal->dev;
527 
528     if (p_hal->fast_mode) {
529         RK_S32 index =  task->dec.reg_index;
530         hw_regs = (VP9_REGS *)hw_ctx->g_buf[index].hw_regs;
531     }
532 
533     mpp_assert(hw_regs);
534 
535     if (hal_vp9d_debug & HAL_VP9D_DBG_REG) {
536         RK_U32 *p = (RK_U32 *)hw_regs;
537         RK_U32 i = 0;
538 
539         for (i = 0; i < sizeof(VP9_REGS) / 4; i++)
540             mpp_log("set regs[%02d]: %08X\n", i, *p++);
541     }
542 
543     do {
544         MppDevRegWrCfg wr_cfg;
545         MppDevRegRdCfg rd_cfg;
546         RK_U32 reg_size = sizeof(VP9_REGS);
547 
548         wr_cfg.reg = hw_ctx->hw_regs;
549         wr_cfg.size = reg_size;
550         wr_cfg.offset = 0;
551 
552         ret = mpp_dev_ioctl(dev, MPP_DEV_REG_WR, &wr_cfg);
553         if (ret) {
554             mpp_err_f("set register write failed %d\n", ret);
555             break;
556         }
557 
558         rd_cfg.reg = hw_ctx->hw_regs;
559         rd_cfg.size = reg_size;
560         rd_cfg.offset = 0;
561 
562         ret = mpp_dev_ioctl(dev, MPP_DEV_REG_RD, &rd_cfg);
563         if (ret) {
564             mpp_err_f("set register read failed %d\n", ret);
565             break;
566         }
567 
568         ret = mpp_dev_ioctl(dev, MPP_DEV_CMD_SEND, NULL);
569         if (ret) {
570             mpp_err_f("send cmd failed %d\n", ret);
571             break;
572         }
573     } while (0);
574 
575     (void)task;
576     return ret;
577 }
578 
hal_vp9d_rkv_wait(void * hal,HalTaskInfo * task)579 MPP_RET hal_vp9d_rkv_wait(void *hal, HalTaskInfo *task)
580 {
581     MPP_RET ret = MPP_OK;
582     HalVp9dCtx *p_hal = (HalVp9dCtx *)hal;
583     Vp9dRkvCtx *hw_ctx = (Vp9dRkvCtx*)p_hal->hw_ctx;
584     VP9_REGS *hw_regs = (VP9_REGS *)hw_ctx->hw_regs;
585 
586     if (p_hal->fast_mode)
587         hw_regs = (VP9_REGS *)hw_ctx->g_buf[task->dec.reg_index].hw_regs;
588 
589     mpp_assert(hw_regs);
590 
591     ret = mpp_dev_ioctl(p_hal->dev, MPP_DEV_CMD_POLL, NULL);
592     if (ret)
593         mpp_err_f("poll cmd failed %d\n", ret);
594 
595     if (hal_vp9d_debug & HAL_VP9D_DBG_REG) {
596         RK_U32 *p = (RK_U32 *)hw_regs;
597         RK_U32 i = 0;
598 
599         for (i = 0; i < sizeof(VP9_REGS) / 4; i++)
600             mpp_log("get regs[%02d]: %08X\n", i, *p++);
601     }
602 
603     if (task->dec.flags.parse_err ||
604         task->dec.flags.ref_err ||
605         !hw_regs->swreg1_int.sw_dec_rdy_sta) {
606         MppFrame mframe = NULL;
607         mpp_buf_slot_get_prop(p_hal->slots, task->dec.output, SLOT_FRAME_PTR, &mframe);
608         mpp_frame_set_errinfo(mframe, 1);
609     }
610 
611     if (p_hal->dec_cb && task->dec.flags.wait_done) {
612         DXVA_PicParams_VP9 *pic_param = (DXVA_PicParams_VP9*)task->dec.syntax.data;
613 
614         hal_vp9d_update_counts(mpp_buffer_get_ptr(hw_ctx->count_base), task->dec.syntax.data);
615 
616         mpp_callback(p_hal->dec_cb, &pic_param->counts);
617     }
618     if (p_hal->fast_mode) {
619         hw_ctx->g_buf[task->dec.reg_index].use_flag = 0;
620     }
621 
622     (void)task;
623     return ret;
624 }
625 
hal_vp9d_rkv_reset(void * hal)626 MPP_RET hal_vp9d_rkv_reset(void *hal)
627 {
628     HalVp9dCtx *p_hal = (HalVp9dCtx *)hal;
629     Vp9dRkvCtx *hw_ctx = (Vp9dRkvCtx*)p_hal->hw_ctx;
630 
631     hal_vp9d_enter();
632 
633     memset(&hw_ctx->ls_info, 0, sizeof(hw_ctx->ls_info));
634     hw_ctx->mv_base_addr = -1;
635     hw_ctx->pre_mv_base_addr = -1;
636     hw_ctx->last_segid_flag = 1;
637 
638     hal_vp9d_leave();
639 
640     return MPP_OK;
641 }
642 
hal_vp9d_rkv_flush(void * hal)643 MPP_RET hal_vp9d_rkv_flush(void *hal)
644 {
645     HalVp9dCtx *p_hal = (HalVp9dCtx *)hal;
646     Vp9dRkvCtx *hw_ctx = p_hal->hw_ctx;
647 
648     hal_vp9d_enter();
649 
650     hw_ctx->mv_base_addr = -1;
651     hw_ctx->pre_mv_base_addr = -1;
652 
653     hal_vp9d_leave();
654 
655     return MPP_OK;
656 }
657 
hal_vp9d_rkv_control(void * hal,MpiCmd cmd_type,void * param)658 MPP_RET hal_vp9d_rkv_control(void *hal, MpiCmd cmd_type, void *param)
659 {
660     switch ((MpiCmd)cmd_type) {
661     case MPP_DEC_SET_FRAME_INFO: {
662         /* commit buffer stride */
663         RK_U32 width = mpp_frame_get_width((MppFrame)param);
664         RK_U32 height = mpp_frame_get_height((MppFrame)param);
665 
666         mpp_frame_set_hor_stride((MppFrame)param, vp9_hor_align(width));
667         mpp_frame_set_ver_stride((MppFrame)param, vp9_ver_align(height));
668     } break;
669     default: {
670     } break;
671     }
672     (void)hal;
673 
674     return MPP_OK;
675 }
676 
677 const MppHalApi hal_vp9d_rkv = {
678     .name = "vp9d_rkdec",
679     .type = MPP_CTX_DEC,
680     .coding = MPP_VIDEO_CodingVP9,
681     .ctx_size = sizeof(HalVp9dCtx),
682     .flag = 0,
683     .init = hal_vp9d_rkv_init,
684     .deinit = hal_vp9d_rkv_deinit,
685     .reg_gen = hal_vp9d_rkv_gen_regs,
686     .start = hal_vp9d_rkv_start,
687     .wait = hal_vp9d_rkv_wait,
688     .reset = hal_vp9d_rkv_reset,
689     .flush = hal_vp9d_rkv_flush,
690     .control = hal_vp9d_rkv_control,
691 };
692