1 /*
2 * Copyright 2022 Rockchip Electronics Co. LTD
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define MODULE_TAG "hal_vp9d_vdpu382"
18
19 #include <stdio.h>
20 #include <string.h>
21
22 #include "mpp_env.h"
23 #include "mpp_mem.h"
24 #include "mpp_common.h"
25 #include "mpp_device.h"
26 #include "mpp_hal.h"
27
28 #include "hal_bufs.h"
29 #include "hal_vp9d_debug.h"
30 #include "hal_vp9d_com.h"
31 #include "hal_vp9d_vdpu382.h"
32 #include "hal_vp9d_ctx.h"
33 #include "vdpu382_vp9d.h"
34 #include "vp9d_syntax.h"
35
36 #define HW_PROB 1
37 #define VP9_CONTEXT 4
38 #define VP9_CTU_SIZE 64
39 #define PROB_SIZE_ALIGN_TO_4K MPP_ALIGN(PROB_SIZE, SZ_4K)
40 #define COUNT_SIZE_ALIGN_TO_4K MPP_ALIGN(COUNT_SIZE, SZ_4K)
41 #define MAX_SEGMAP_SIZE_ALIGN_TO_4K MPP_ALIGN(MAX_SEGMAP_SIZE, SZ_4K)
42
43 #define VDPU382_OFFSET_COUNT (PROB_SIZE_ALIGN_TO_4K)
44 #define VDPU382_PROBE_BUFFER_SIZE (PROB_SIZE_ALIGN_TO_4K + COUNT_SIZE_ALIGN_TO_4K)
45
46 typedef struct Vdpu382Vp9dCtx_t {
47 Vp9dRegBuf g_buf[MAX_GEN_REG];
48 MppBuffer probe_base;
49 MppBuffer seg_base;
50 RK_U32 offset_count;
51 RK_U32 offset_segid_cur;
52 RK_U32 offset_segid_last;
53 MppBuffer prob_default_base;
54 void* hw_regs;
55 RK_S32 mv_base_addr;
56 RK_S32 pre_mv_base_addr;
57 Vp9dLastInfo ls_info;
58 /*
59 * swap between segid_cur_base & segid_last_base
60 * 0 used segid_cur_base as last
61 * 1 used segid_last_base as
62 */
63 RK_U32 last_segid_flag;
64 RK_S32 width;
65 RK_S32 height;
66 /* rcb buffers info */
67 RK_S32 rcb_buf_size;
68 Vdpu382RcbInfo rcb_info[RCB_BUF_COUNT];
69 MppBuffer rcb_buf;
70 RK_U32 num_row_tiles;
71 RK_U32 bit_depth;
72 /* colmv buffers info */
73 HalBufs cmv_bufs;
74 RK_S32 mv_size;
75 RK_S32 mv_count;
76 RK_U32 prob_ctx_valid[VP9_CONTEXT];
77 MppBuffer prob_loop_base[VP9_CONTEXT];
78 RK_U32 prob_ref_poc[VP9_CONTEXT];
79 RK_U32 col_ref_poc;
80 RK_U32 segid_ref_poc;
81 } Vdpu382Vp9dCtx;
82
hal_vp9d_alloc_res(HalVp9dCtx * hal)83 static MPP_RET hal_vp9d_alloc_res(HalVp9dCtx *hal)
84 {
85 RK_S32 i = 0;
86 RK_S32 ret = 0;
87 HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
88 Vdpu382Vp9dCtx *hw_ctx = (Vdpu382Vp9dCtx*)p_hal->hw_ctx;
89 hw_ctx->offset_count = VDPU382_OFFSET_COUNT;
90 hw_ctx->offset_segid_cur = 0;
91 hw_ctx->offset_segid_last = MAX_SEGMAP_SIZE_ALIGN_TO_4K;
92 /* alloc common buffer */
93 for (i = 0; i < VP9_CONTEXT; i++) {
94 ret = mpp_buffer_get(p_hal->group, &hw_ctx->prob_loop_base[i], PROB_SIZE);
95 if (ret) {
96 mpp_err("vp9 probe_loop_base get buffer failed\n");
97 return ret;
98 }
99 }
100 ret = mpp_buffer_get(p_hal->group, &hw_ctx->prob_default_base, PROB_SIZE);
101 if (ret) {
102 mpp_err("vp9 probe_default_base get buffer failed\n");
103 return ret;
104 }
105 /* alloc buffer for fast mode or normal */
106 if (p_hal->fast_mode) {
107 for (i = 0; i < MAX_GEN_REG; i++) {
108 hw_ctx->g_buf[i].hw_regs = mpp_calloc_size(void, sizeof(Vdpu382Vp9dRegSet));
109 ret = mpp_buffer_get(p_hal->group, &hw_ctx->g_buf[i].probe_base, VDPU382_PROBE_BUFFER_SIZE);
110 if (ret) {
111 mpp_err("vp9 probe_base get buffer failed\n");
112 return ret;
113 }
114 }
115 } else {
116 hw_ctx->hw_regs = mpp_calloc_size(void, sizeof(Vdpu382Vp9dRegSet));
117 ret = mpp_buffer_get(p_hal->group, &hw_ctx->probe_base, VDPU382_PROBE_BUFFER_SIZE);
118 if (ret) {
119 mpp_err("vp9 probe_base get buffer failed\n");
120 return ret;
121 }
122 }
123
124 ret = mpp_buffer_get(p_hal->group, &hw_ctx->seg_base, MAX_SEGMAP_SIZE_ALIGN_TO_4K * 2);
125 if (ret) {
126 mpp_err("vp9 segid_base get buffer failed\n");
127 return ret;
128 }
129 return MPP_OK;
130 }
131
hal_vp9d_release_res(HalVp9dCtx * hal)132 static MPP_RET hal_vp9d_release_res(HalVp9dCtx *hal)
133 {
134 RK_S32 i = 0;
135 RK_S32 ret = 0;
136 HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
137 Vdpu382Vp9dCtx *hw_ctx = (Vdpu382Vp9dCtx*)p_hal->hw_ctx;
138
139 if (hw_ctx->prob_default_base) {
140 ret = mpp_buffer_put(hw_ctx->prob_default_base);
141 if (ret) {
142 mpp_err("vp9 probe_wr_base put buffer failed\n");
143 return ret;
144 }
145 }
146 for (i = 0; i < VP9_CONTEXT; i++) {
147 if (hw_ctx->prob_loop_base[i]) {
148 ret = mpp_buffer_put(hw_ctx->prob_loop_base[i]);
149 if (ret) {
150 mpp_err("vp9 probe_base put buffer failed\n");
151 return ret;
152 }
153 }
154 }
155 if (p_hal->fast_mode) {
156 for (i = 0; i < MAX_GEN_REG; i++) {
157 if (hw_ctx->g_buf[i].probe_base) {
158 ret = mpp_buffer_put(hw_ctx->g_buf[i].probe_base);
159 if (ret) {
160 mpp_err("vp9 probe_base put buffer failed\n");
161 return ret;
162 }
163 }
164 if (hw_ctx->g_buf[i].hw_regs) {
165 mpp_free(hw_ctx->g_buf[i].hw_regs);
166 hw_ctx->g_buf[i].hw_regs = NULL;
167 }
168 if (hw_ctx->g_buf[i].rcb_buf) {
169 ret = mpp_buffer_put(hw_ctx->g_buf[i].rcb_buf);
170 if (ret) {
171 mpp_err("vp9 rcb_buf[%d] put buffer failed\n", i);
172 return ret;
173 }
174 }
175 }
176 } else {
177 if (hw_ctx->probe_base) {
178 ret = mpp_buffer_put(hw_ctx->probe_base);
179 if (ret) {
180 mpp_err("vp9 probe_base put buffer failed\n");
181 return ret;
182 }
183 }
184
185 if (hw_ctx->hw_regs) {
186 mpp_free(hw_ctx->hw_regs);
187 hw_ctx->hw_regs = NULL;
188 }
189 if (hw_ctx->rcb_buf) {
190 ret = mpp_buffer_put(hw_ctx->rcb_buf);
191 if (ret) {
192 mpp_err("vp9 rcb_buf put buffer failed\n");
193 return ret;
194 }
195 }
196 }
197
198 if (hw_ctx->cmv_bufs) {
199 ret = hal_bufs_deinit(hw_ctx->cmv_bufs);
200 if (ret) {
201 mpp_err("vp9 cmv bufs deinit buffer failed\n");
202 return ret;
203 }
204 }
205
206 if (hw_ctx->seg_base) {
207 ret = mpp_buffer_put(hw_ctx->seg_base);
208 if (ret) {
209 mpp_err("vp9 seg_base put buffer failed\n");
210 return ret;
211 }
212 }
213
214 return MPP_OK;
215 }
216
hal_vp9d_vdpu382_deinit(void * hal)217 static MPP_RET hal_vp9d_vdpu382_deinit(void *hal)
218 {
219 MPP_RET ret = MPP_OK;
220 HalVp9dCtx *p_hal = (HalVp9dCtx *)hal;
221
222 hal_vp9d_release_res(p_hal);
223
224 if (p_hal->group) {
225 ret = mpp_buffer_group_put(p_hal->group);
226 if (ret) {
227 mpp_err("vp9d group free buffer failed\n");
228 return ret;
229 }
230 }
231 MPP_FREE(p_hal->hw_ctx);
232 return ret = MPP_OK;
233 }
234
hal_vp9d_vdpu382_init(void * hal,MppHalCfg * cfg)235 static MPP_RET hal_vp9d_vdpu382_init(void *hal, MppHalCfg *cfg)
236 {
237 MPP_RET ret = MPP_OK;
238 HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
239 MEM_CHECK(ret, p_hal->hw_ctx = mpp_calloc_size(void, sizeof(Vdpu382Vp9dCtx)));
240 Vdpu382Vp9dCtx *hw_ctx = (Vdpu382Vp9dCtx*)p_hal->hw_ctx;
241
242 hw_ctx->mv_base_addr = -1;
243 hw_ctx->pre_mv_base_addr = -1;
244 mpp_slots_set_prop(p_hal->slots, SLOTS_HOR_ALIGN, vp9_hor_align);
245 mpp_slots_set_prop(p_hal->slots, SLOTS_VER_ALIGN, vp9_ver_align);
246
247 if (p_hal->group == NULL) {
248 ret = mpp_buffer_group_get_internal(&p_hal->group, MPP_BUFFER_TYPE_ION);
249 if (ret) {
250 mpp_err("vp9 mpp_buffer_group_get failed\n");
251 goto __FAILED;
252 }
253 }
254
255 ret = hal_vp9d_alloc_res(p_hal);
256 if (ret) {
257 mpp_err("hal_vp9d_alloc_res failed\n");
258 goto __FAILED;
259 }
260
261 hw_ctx->last_segid_flag = 1;
262 {
263 // report hw_info to parser
264 const MppSocInfo *info = mpp_get_soc_info();
265 const void *hw_info = NULL;
266 RK_U32 i;
267
268 for (i = 0; i < MPP_ARRAY_ELEMS(info->dec_caps); i++) {
269 if (info->dec_caps[i] && info->dec_caps[i]->type == VPU_CLIENT_RKVDEC) {
270 hw_info = info->dec_caps[i];
271 break;
272 }
273 }
274
275 mpp_assert(hw_info);
276 cfg->hw_info = hw_info;
277 p_hal->hw_info = hw_info;
278 }
279
280 return ret;
281 __FAILED:
282 hal_vp9d_vdpu382_deinit(hal);
283 return ret;
284 }
285
vp9d_refine_rcb_size(Vdpu382RcbInfo * rcb_info,Vdpu382Vp9dRegSet * vp9_hw_regs,RK_S32 width,RK_S32 height,void * data)286 static void vp9d_refine_rcb_size(Vdpu382RcbInfo *rcb_info,
287 Vdpu382Vp9dRegSet *vp9_hw_regs,
288 RK_S32 width, RK_S32 height, void* data)
289 {
290 RK_U32 rcb_bits = 0;
291 DXVA_PicParams_VP9 *pic_param = (DXVA_PicParams_VP9*)data;
292 RK_U32 num_tiles_col = 1 << pic_param->log2_tile_cols;
293 RK_U32 bit_depth = pic_param->BitDepthMinus8Luma + 8;
294 RK_U32 ext_align_size = num_tiles_col * 64 * 8;
295
296 width = MPP_ALIGN(width, VP9_CTU_SIZE);
297 height = MPP_ALIGN(height, VP9_CTU_SIZE);
298 /* RCB_STRMD_ROW */
299 if (width >= 4096)
300 rcb_bits = MPP_ALIGN(width, 64) * 232 + ext_align_size;
301 else
302 rcb_bits = 0;
303 rcb_info[RCB_STRMD_ROW].size = MPP_RCB_BYTES(rcb_bits);
304
305 /* RCB_TRANSD_ROW */
306 if (width >= 8192)
307 rcb_bits = (MPP_ALIGN(width - 8192, 4) << 1) + ext_align_size;
308 else
309 rcb_bits = 0;
310 rcb_info[RCB_TRANSD_ROW].size = MPP_RCB_BYTES(rcb_bits);
311
312 /* RCB_TRANSD_COL */
313 if ((height >= 8192) && (num_tiles_col > 1))
314 rcb_bits = (MPP_ALIGN(height - 8192, 4) << 1);
315 else
316 rcb_bits = 0;
317 rcb_info[RCB_TRANSD_COL].size = MPP_RCB_BYTES(rcb_bits);
318
319 /* RCB_INTER_ROW */
320 rcb_bits = width * 36 + ext_align_size;
321 rcb_info[RCB_INTER_ROW].size = MPP_RCB_BYTES(rcb_bits);
322
323 /* RCB_INTER_COL */
324 rcb_info[RCB_INTER_COL].size = 0;
325
326 /* RCB_INTRA_ROW */
327 rcb_bits = width * 2 * 11 + ext_align_size;
328 rcb_info[RCB_INTRA_ROW].size = MPP_RCB_BYTES(rcb_bits);
329
330 /* RCB_DBLK_ROW */
331 rcb_bits = width * (0.5 + 16 * bit_depth) + num_tiles_col * 192 * bit_depth + ext_align_size;
332 rcb_info[RCB_DBLK_ROW].size = MPP_RCB_BYTES(rcb_bits);
333
334 /* RCB_SAO_ROW */
335 rcb_info[RCB_SAO_ROW].size = 0;
336
337 /* RCB_FBC_ROW */
338 if (vp9_hw_regs->common.reg012.fbc_e) {
339 rcb_bits = 8 * width * bit_depth + ext_align_size;
340 } else
341 rcb_bits = 0;
342 rcb_info[RCB_FBC_ROW].size = MPP_RCB_BYTES(rcb_bits);
343
344 /* RCB_FILT_COL */
345 if (num_tiles_col > 1) {
346 if (vp9_hw_regs->common.reg012.fbc_e) {
347 rcb_bits = height * (4 + 24 * bit_depth);
348 } else
349 rcb_bits = height * (4 + 16 * bit_depth);
350 } else
351 rcb_bits = 0;
352 rcb_info[RCB_FILT_COL].size = MPP_RCB_BYTES(rcb_bits);
353 }
354
hal_vp9d_rcb_info_update(void * hal,Vdpu382Vp9dRegSet * hw_regs,void * data)355 static void hal_vp9d_rcb_info_update(void *hal, Vdpu382Vp9dRegSet *hw_regs, void *data)
356 {
357 HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
358 Vdpu382Vp9dCtx *hw_ctx = (Vdpu382Vp9dCtx*)p_hal->hw_ctx;
359 DXVA_PicParams_VP9 *pic_param = (DXVA_PicParams_VP9*)data;
360 RK_U32 num_tiles = pic_param->log2_tile_rows;
361 RK_U32 bit_depth = pic_param->BitDepthMinus8Luma + 8;
362 RK_S32 height = vp9_ver_align(pic_param->height);
363 RK_S32 width = vp9_ver_align(pic_param->width);
364
365 if (hw_ctx->num_row_tiles != num_tiles ||
366 hw_ctx->bit_depth != bit_depth ||
367 hw_ctx->width != width ||
368 hw_ctx->height != height) {
369
370 hw_ctx->rcb_buf_size = vdpu382_get_rcb_buf_size(hw_ctx->rcb_info, width, height);
371 vp9d_refine_rcb_size(hw_ctx->rcb_info, hw_regs, width, height, pic_param);
372
373 if (p_hal->fast_mode) {
374 RK_U32 i;
375
376 for (i = 0; i < MPP_ARRAY_ELEMS(hw_ctx->g_buf); i++) {
377 MppBuffer rcb_buf = hw_ctx->g_buf[i].rcb_buf;
378
379 if (rcb_buf) {
380 mpp_buffer_put(rcb_buf);
381 hw_ctx->g_buf[i].rcb_buf = NULL;
382 }
383 mpp_buffer_get(p_hal->group, &rcb_buf, hw_ctx->rcb_buf_size);
384 hw_ctx->g_buf[i].rcb_buf = rcb_buf;
385 }
386 } else {
387 MppBuffer rcb_buf = hw_ctx->rcb_buf;
388
389 if (rcb_buf) {
390 mpp_buffer_put(rcb_buf);
391 rcb_buf = NULL;
392 }
393 mpp_buffer_get(p_hal->group, &rcb_buf, hw_ctx->rcb_buf_size);
394 hw_ctx->rcb_buf = rcb_buf;
395 }
396
397 hw_ctx->num_row_tiles = num_tiles;
398 hw_ctx->bit_depth = bit_depth;
399 hw_ctx->width = width;
400 hw_ctx->height = height;
401 }
402 }
403
hal_vp9d_vdpu382_gen_regs(void * hal,HalTaskInfo * task)404 static MPP_RET hal_vp9d_vdpu382_gen_regs(void *hal, HalTaskInfo *task)
405 {
406 RK_S32 i;
407 RK_U8 bit_depth = 0;
408 RK_U32 pic_h[3] = { 0 };
409 RK_U32 ref_frame_width_y;
410 RK_U32 ref_frame_height_y;
411 RK_S32 stream_len = 0, aglin_offset = 0;
412 RK_U32 y_hor_virstride, uv_hor_virstride, y_virstride;
413 RK_U8 *bitstream = NULL;
414 MppBuffer streambuf = NULL;
415 RK_U32 sw_y_hor_virstride;
416 RK_U32 sw_uv_hor_virstride;
417 RK_U32 sw_y_virstride;
418 RK_U8 ref_idx = 0;
419 RK_U8 ref_frame_idx = 0;
420 RK_U32 *reg_ref_base = 0;
421 RK_S32 intraFlag = 0;
422 MppBuffer framebuf = NULL;
423 HalBuf *mv_buf = NULL;
424 RK_U32 fbc_en = 0;
425
426 HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
427 Vdpu382Vp9dCtx *hw_ctx = (Vdpu382Vp9dCtx*)p_hal->hw_ctx;
428 DXVA_PicParams_VP9 *pic_param = (DXVA_PicParams_VP9*)task->dec.syntax.data;
429 RK_S32 mv_size = pic_param->width * pic_param->height / 2;
430 RK_U32 frame_ctx_id = pic_param->frame_context_idx;
431
432 if (p_hal->fast_mode) {
433 for (i = 0; i < MAX_GEN_REG; i++) {
434 if (!hw_ctx->g_buf[i].use_flag) {
435 task->dec.reg_index = i;
436 hw_ctx->probe_base = hw_ctx->g_buf[i].probe_base;
437
438 hw_ctx->hw_regs = hw_ctx->g_buf[i].hw_regs;
439 hw_ctx->g_buf[i].use_flag = 1;
440 break;
441 }
442 }
443 if (i == MAX_GEN_REG) {
444 mpp_err("vp9 fast mode buf all used\n");
445 return MPP_ERR_NOMEM;
446 }
447 }
448
449 if (hw_ctx->cmv_bufs == NULL || hw_ctx->mv_size < mv_size) {
450 size_t size = mv_size;
451
452 if (hw_ctx->cmv_bufs) {
453 hal_bufs_deinit(hw_ctx->cmv_bufs);
454 hw_ctx->cmv_bufs = NULL;
455 }
456
457 hal_bufs_init(&hw_ctx->cmv_bufs);
458 if (hw_ctx->cmv_bufs == NULL) {
459 mpp_err_f("colmv bufs init fail");
460 return MPP_NOK;
461 }
462 hw_ctx->mv_size = mv_size;
463 hw_ctx->mv_count = mpp_buf_slot_get_count(p_hal ->slots);
464 hal_bufs_setup(hw_ctx->cmv_bufs, hw_ctx->mv_count, 1, &size);
465 }
466
467 Vdpu382Vp9dRegSet *vp9_hw_regs = (Vdpu382Vp9dRegSet*)hw_ctx->hw_regs;
468 intraFlag = (!pic_param->frame_type || pic_param->intra_only);
469 stream_len = (RK_S32)mpp_packet_get_length(task->dec.input_packet);
470 memset(hw_ctx->hw_regs, 0, sizeof(Vdpu382Vp9dRegSet));
471 #if HW_PROB
472 hal_vp9d_prob_flag_delta(mpp_buffer_get_ptr(hw_ctx->probe_base), task->dec.syntax.data);
473 if (intraFlag)
474 hal_vp9d_prob_default(mpp_buffer_get_ptr(hw_ctx->prob_default_base), task->dec.syntax.data);
475
476 /* config reg103 */
477 vp9_hw_regs->vp9d_param.reg103.prob_update_en = 1;
478 vp9_hw_regs->vp9d_param.reg103.intra_only_flag = intraFlag;
479 if (!intraFlag) {
480 vp9_hw_regs->vp9d_param.reg103.txfmmode_rfsh_en = (pic_param->txmode == 4) ? 1 : 0;
481 vp9_hw_regs->vp9d_param.reg103.interp_filter_switch_en = pic_param->interp_filter == 4 ? 1 : 0;
482 }
483 vp9_hw_regs->vp9d_param.reg103.ref_mode_rfsh_en = 1;
484 vp9_hw_regs->vp9d_param.reg103.single_ref_rfsh_en = 1;
485 vp9_hw_regs->vp9d_param.reg103.comp_ref_rfsh_en = 1;
486 vp9_hw_regs->vp9d_param.reg103.inter_coef_rfsh_flag = 0;
487 vp9_hw_regs->vp9d_param.reg103.refresh_en =
488 !pic_param->error_resilient_mode && !pic_param->parallelmode;
489 vp9_hw_regs->vp9d_param.reg103.prob_save_en = pic_param->refresh_frame_context;
490 vp9_hw_regs->vp9d_param.reg103.allow_high_precision_mv = pic_param->allow_high_precision_mv;
491 vp9_hw_regs->vp9d_param.reg103.last_key_frame_flag = hw_ctx->ls_info.last_intra_only;
492
493 /* set info for multi core */
494 {
495 MppFrame mframe = NULL;
496
497 vp9_hw_regs->common.reg028.sw_poc_arb_flag = 1;
498 mpp_buf_slot_get_prop(p_hal->slots, task->dec.output, SLOT_FRAME_PTR, &mframe);
499 vp9_hw_regs->vp9d_param.reg65.cur_poc = mframe ? mpp_frame_get_poc(mframe) : 0;
500 // last poc
501 ref_idx = pic_param->frame_refs[0].Index7Bits;
502 ref_frame_idx = pic_param->ref_frame_map[ref_idx].Index7Bits;
503 if (ref_frame_idx < 0x7f) {
504 mframe = NULL;
505 mpp_buf_slot_get_prop(p_hal ->slots, ref_frame_idx, SLOT_FRAME_PTR, &mframe);
506 vp9_hw_regs->vp9d_param.reg95.last_poc = mframe ? mpp_frame_get_poc(mframe) : 0;
507 }
508 // golden poc
509 ref_idx = pic_param->frame_refs[1].Index7Bits;
510 ref_frame_idx = pic_param->ref_frame_map[ref_idx].Index7Bits;
511 if (ref_frame_idx < 0x7f) {
512 mframe = NULL;
513 mpp_buf_slot_get_prop(p_hal ->slots, ref_frame_idx, SLOT_FRAME_PTR, &mframe);
514 vp9_hw_regs->vp9d_param.reg96.golden_poc = mframe ? mpp_frame_get_poc(mframe) : 0;
515 }
516 // altref poc
517 ref_idx = pic_param->frame_refs[2].Index7Bits;
518 ref_frame_idx = pic_param->ref_frame_map[ref_idx].Index7Bits;
519 if (ref_frame_idx < 0x7f) {
520 mframe = NULL;
521 mpp_buf_slot_get_prop(p_hal ->slots, ref_frame_idx, SLOT_FRAME_PTR, &mframe);
522 vp9_hw_regs->vp9d_param.reg97.altref_poc = mframe ? mpp_frame_get_poc(mframe) : 0;
523 }
524 // colref poc
525 vp9_hw_regs->vp9d_param.reg98.col_ref_poc =
526 hw_ctx->col_ref_poc ? hw_ctx->col_ref_poc : vp9_hw_regs->vp9d_param.reg65.cur_poc;
527 if (pic_param->show_frame && !pic_param->show_existing_frame)
528 hw_ctx->col_ref_poc = vp9_hw_regs->vp9d_param.reg65.cur_poc;
529 // segment id ref poc
530 vp9_hw_regs->vp9d_param.reg100.segid_ref_poc = hw_ctx->segid_ref_poc;
531
532 vp9_hw_regs->vp9d_addr.reg169_segidcur_base = mpp_buffer_get_fd(hw_ctx->seg_base);
533 vp9_hw_regs->vp9d_addr.reg168_segidlast_base = mpp_buffer_get_fd(hw_ctx->seg_base);
534 if (hw_ctx->last_segid_flag) {
535 mpp_dev_set_reg_offset(p_hal->dev, 168, hw_ctx->offset_segid_last);
536 mpp_dev_set_reg_offset(p_hal->dev, 169, hw_ctx->offset_segid_cur);
537 } else {
538 mpp_dev_set_reg_offset(p_hal->dev, 168, hw_ctx->offset_segid_cur);
539 mpp_dev_set_reg_offset(p_hal->dev, 169, hw_ctx->offset_segid_last);
540 }
541
542 if ((pic_param->stVP9Segments.enabled && pic_param->stVP9Segments.update_map) ||
543 (hw_ctx->ls_info.last_width != pic_param->width) ||
544 (hw_ctx->ls_info.last_height != pic_param->height) ||
545 intraFlag || pic_param->error_resilient_mode) {
546 hw_ctx->segid_ref_poc = vp9_hw_regs->vp9d_param.reg65.cur_poc;
547 hw_ctx->last_segid_flag = !hw_ctx->last_segid_flag;
548 vp9_hw_regs->vp9d_param.reg100.segid_ref_poc = 0;
549 vp9_hw_regs->vp9d_param.reg75.vp9_segment_id_update = 1;
550 } else
551 vp9_hw_regs->vp9d_param.reg75.vp9_segment_id_update = 0;
552 }
553
554 /* config last prob base and update write base */
555 {
556
557 if (intraFlag || pic_param->error_resilient_mode) {
558 if (intraFlag
559 || pic_param->error_resilient_mode
560 || (pic_param->reset_frame_context == 3)) {
561 memset(hw_ctx->prob_ctx_valid, 0, sizeof(hw_ctx->prob_ctx_valid));
562 } else if (pic_param->reset_frame_context == 2) {
563 hw_ctx->prob_ctx_valid[frame_ctx_id] = 0;
564 }
565 }
566
567 #if VP9_DUMP
568 {
569 static RK_U32 file_cnt = 0;
570 char file_name[128];
571 RK_U32 i = 0;
572 sprintf(file_name, "/data/vp9/prob_last_%d.txt", file_cnt);
573 FILE *fp = fopen(file_name, "wb");
574 RK_U32 *tmp = NULL;
575 if (hw_ctx->prob_ctx_valid[frame_ctx_id]) {
576 tmp = (RK_U32 *)mpp_buffer_get_ptr(hw_ctx->prob_loop_base[pic_param->frame_context_idx]);
577 } else {
578 tmp = (RK_U32 *)mpp_buffer_get_ptr(hw_ctx->prob_default_base);
579 }
580 for (i = 0; i < PROB_SIZE / 4; i += 2) {
581 fprintf(fp, "%08x%08x\n", tmp[i + 1], tmp[i]);
582 }
583 file_cnt++;
584 fflush(fp);
585 fclose(fp);
586 }
587 #endif
588
589 if (hw_ctx->prob_ctx_valid[frame_ctx_id]) {
590 vp9_hw_regs->vp9d_addr.reg162_last_prob_base =
591 mpp_buffer_get_fd(hw_ctx->prob_loop_base[frame_ctx_id]);
592 vp9_hw_regs->common.reg028.swreg_vp9_rd_prob_idx = frame_ctx_id + 1;
593 vp9_hw_regs->vp9d_param.reg99.prob_ref_poc = hw_ctx->prob_ref_poc[frame_ctx_id];
594 } else {
595 vp9_hw_regs->vp9d_addr.reg162_last_prob_base = mpp_buffer_get_fd(hw_ctx->prob_default_base);
596 hw_ctx->prob_ctx_valid[frame_ctx_id] |= pic_param->refresh_frame_context;
597 vp9_hw_regs->common.reg028.swreg_vp9_rd_prob_idx = 0;
598 vp9_hw_regs->vp9d_param.reg99.prob_ref_poc = 0;
599 if (pic_param->refresh_frame_context)
600 hw_ctx->prob_ref_poc[frame_ctx_id] = vp9_hw_regs->vp9d_param.reg65.cur_poc;
601 }
602 vp9_hw_regs->vp9d_addr.reg172_update_prob_wr_base =
603 mpp_buffer_get_fd(hw_ctx->prob_loop_base[frame_ctx_id]);
604 vp9_hw_regs->common.reg028.swreg_vp9_wr_prob_idx = frame_ctx_id + 1;
605
606 }
607 vp9_hw_regs->vp9d_addr.reg160_delta_prob_base = mpp_buffer_get_fd(hw_ctx->probe_base);
608 #else
609 hal_vp9d_output_probe(mpp_buffer_get_ptr(hw_ctx->probe_base), task->dec.syntax.data);
610 #endif
611 vp9_hw_regs->common.reg013.cur_pic_is_idr = !pic_param->frame_type;
612 vp9_hw_regs->common.reg009.dec_mode = 2; //set as vp9 dec
613 vp9_hw_regs->common.reg016_str_len = ((stream_len + 15) & (~15)) + 0x80;
614
615 mpp_buf_slot_get_prop(p_hal ->packet_slots, task->dec.input, SLOT_BUFFER, &streambuf);
616 bitstream = mpp_buffer_get_ptr(streambuf);
617 aglin_offset = vp9_hw_regs->common.reg016_str_len - stream_len;
618 if (aglin_offset > 0) {
619 memset((void *)(bitstream + stream_len), 0, aglin_offset);
620 }
621
622 //--- caculate the yuv_frame_size and mv_size
623 bit_depth = pic_param->BitDepthMinus8Luma + 8;
624 pic_h[0] = vp9_ver_align(pic_param->height);
625 pic_h[1] = vp9_ver_align(pic_param->height) / 2;
626 pic_h[2] = pic_h[1];
627
628 {
629 MppFrame mframe = NULL;
630
631 mpp_buf_slot_get_prop(p_hal->slots, task->dec.output, SLOT_FRAME_PTR, &mframe);
632 fbc_en = MPP_FRAME_FMT_IS_FBC(mpp_frame_get_fmt(mframe));
633
634 if (fbc_en) {
635 RK_U32 fbc_hdr_stride = mpp_frame_get_fbc_hdr_stride(mframe);
636 RK_U32 h = MPP_ALIGN(mpp_frame_get_height(mframe), 64);
637 RK_U32 fbd_offset = MPP_ALIGN(fbc_hdr_stride * (h + 16) / 16, SZ_4K);
638
639 vp9_hw_regs->common.reg012.fbc_e = 1;
640 vp9_hw_regs->common.reg018.y_hor_virstride = fbc_hdr_stride >> 4;
641 vp9_hw_regs->common.reg019.uv_hor_virstride = fbc_hdr_stride >> 4;
642 vp9_hw_regs->common.reg020_fbc_payload_off.payload_st_offset = fbd_offset >> 4;
643 } else {
644 sw_y_hor_virstride = (vp9_hor_align((pic_param->width * bit_depth) >> 3) >> 4);
645 sw_uv_hor_virstride = (vp9_hor_align((pic_param->width * bit_depth) >> 3) >> 4);
646 sw_y_virstride = pic_h[0] * sw_y_hor_virstride;
647
648 vp9_hw_regs->common.reg012.fbc_e = 0;
649 vp9_hw_regs->common.reg018.y_hor_virstride = sw_y_hor_virstride;
650 vp9_hw_regs->common.reg019.uv_hor_virstride = sw_uv_hor_virstride;
651 vp9_hw_regs->common.reg020_y_virstride.y_virstride = sw_y_virstride;
652 }
653 }
654 if (!pic_param->intra_only && pic_param->frame_type &&
655 !pic_param->error_resilient_mode && hw_ctx->ls_info.last_show_frame) {
656 hw_ctx->pre_mv_base_addr = hw_ctx->mv_base_addr;
657 }
658
659 mpp_buf_slot_get_prop(p_hal ->slots, task->dec.output, SLOT_BUFFER, &framebuf);
660 vp9_hw_regs->common_addr.reg130_decout_base = mpp_buffer_get_fd(framebuf);
661 vp9_hw_regs->common_addr.reg128_rlc_base = mpp_buffer_get_fd(streambuf);
662 vp9_hw_regs->common_addr.reg129_rlcwrite_base = mpp_buffer_get_fd(streambuf);
663
664 vp9_hw_regs->vp9d_addr.reg167_count_prob_base = mpp_buffer_get_fd(hw_ctx->probe_base);
665 mpp_dev_set_reg_offset(p_hal->dev, 167, hw_ctx->offset_count);
666
667 //set cur colmv base
668 mv_buf = hal_bufs_get_buf(hw_ctx->cmv_bufs, task->dec.output);
669 vp9_hw_regs->common_addr.reg131_colmv_cur_base = mpp_buffer_get_fd(mv_buf->buf[0]);
670 hw_ctx->mv_base_addr = vp9_hw_regs->common_addr.reg131_colmv_cur_base;
671 if (hw_ctx->pre_mv_base_addr < 0) {
672 hw_ctx->pre_mv_base_addr = hw_ctx->mv_base_addr;
673 }
674 vp9_hw_regs->vp9d_addr.reg170_ref_colmv_base = hw_ctx->pre_mv_base_addr;
675
676 vp9_hw_regs->vp9d_param.reg64.cprheader_offset = 0;
677 reg_ref_base = (RK_U32*)&vp9_hw_regs->vp9d_addr.reg164_ref_last_base;
678 for (i = 0; i < 3; i++) {
679 MppFrame frame = NULL;
680
681 ref_idx = pic_param->frame_refs[i].Index7Bits;
682 ref_frame_idx = pic_param->ref_frame_map[ref_idx].Index7Bits;
683 ref_frame_width_y = pic_param->ref_frame_coded_width[ref_idx];
684 ref_frame_height_y = pic_param->ref_frame_coded_height[ref_idx];
685 pic_h[0] = vp9_ver_align(ref_frame_height_y);
686 pic_h[1] = vp9_ver_align(ref_frame_height_y) / 2;
687
688 if (ref_frame_idx < 0x7f)
689 mpp_buf_slot_get_prop(p_hal ->slots, ref_frame_idx, SLOT_FRAME_PTR, &frame);
690
691 if (fbc_en && frame) {
692 RK_U32 fbc_hdr_stride = mpp_frame_get_fbc_hdr_stride(frame);
693 RK_U32 h = MPP_ALIGN(mpp_frame_get_height(frame), 64);
694 RK_U32 fbd_offset = MPP_ALIGN(fbc_hdr_stride * (h + 16) / 16, SZ_4K);
695
696 y_hor_virstride = uv_hor_virstride = fbc_hdr_stride >> 4;
697 y_virstride = fbd_offset;
698 } else {
699 y_hor_virstride = uv_hor_virstride = (vp9_hor_align((ref_frame_width_y * bit_depth) >> 3) >> 4);
700 y_virstride = y_hor_virstride * pic_h[0];
701 }
702
703 if (pic_param->ref_frame_map[ref_idx].Index7Bits < 0x7f) {
704 mpp_buf_slot_get_prop(p_hal ->slots, pic_param->ref_frame_map[ref_idx].Index7Bits, SLOT_BUFFER, &framebuf);
705 }
706
707 if (pic_param->ref_frame_map[ref_idx].Index7Bits < 0x7f) {
708 switch (i) {
709 case 0: {
710 vp9_hw_regs->vp9d_param.reg106.framewidth_last = ref_frame_width_y;
711 vp9_hw_regs->vp9d_param.reg107.frameheight_last = ref_frame_height_y;
712 vp9_hw_regs->vp9d_param.reg79.lastfy_hor_virstride = y_hor_virstride;
713 vp9_hw_regs->vp9d_param.reg80.lastfuv_hor_virstride = uv_hor_virstride;
714 vp9_hw_regs->vp9d_param.reg85.lastfy_virstride = y_virstride;
715 } break;
716 case 1: {
717 vp9_hw_regs->vp9d_param.reg108.framewidth_golden = ref_frame_width_y;
718 vp9_hw_regs->vp9d_param.reg109.frameheight_golden = ref_frame_height_y;
719 vp9_hw_regs->vp9d_param.reg81.goldenfy_hor_virstride = y_hor_virstride;
720 vp9_hw_regs->vp9d_param.reg82.goldenfuv_hor_virstride = uv_hor_virstride;
721 vp9_hw_regs->vp9d_param.reg86.goldeny_virstride = y_virstride;
722 } break;
723 case 2: {
724 vp9_hw_regs->vp9d_param.reg110.framewidth_alfter = ref_frame_width_y;
725 vp9_hw_regs->vp9d_param.reg111.frameheight_alfter = ref_frame_height_y;
726 vp9_hw_regs->vp9d_param.reg83.altreffy_hor_virstride = y_hor_virstride;
727 vp9_hw_regs->vp9d_param.reg84.altreffuv_hor_virstride = uv_hor_virstride;
728 vp9_hw_regs->vp9d_param.reg87.altrefy_virstride = y_virstride;
729 } break;
730 default:
731 break;
732 }
733
734 /*0 map to 11*/
735 /*1 map to 12*/
736 /*2 map to 13*/
737 if (framebuf != NULL) {
738 reg_ref_base[i] = mpp_buffer_get_fd(framebuf);
739 } else {
740 mpp_log("ref buff address is no valid used out as base slot index 0x%x", pic_param->ref_frame_map[ref_idx].Index7Bits);
741 reg_ref_base[i] = vp9_hw_regs->common_addr.reg130_decout_base;
742 }
743 mv_buf = hal_bufs_get_buf(hw_ctx->cmv_bufs, pic_param->ref_frame_map[ref_idx].Index7Bits);
744 vp9_hw_regs->vp9d_addr.reg181_196_ref_colmv_base[i] = mpp_buffer_get_fd(mv_buf->buf[0]);
745 } else {
746 reg_ref_base[i] = vp9_hw_regs->common_addr.reg130_decout_base;
747 vp9_hw_regs->vp9d_addr.reg181_196_ref_colmv_base[i] = vp9_hw_regs->common_addr.reg131_colmv_cur_base;
748 }
749 }
750
751 for (i = 0; i < 8; i++) {
752 vp9_hw_regs->vp9d_param.reg67_74[i].segid_frame_qp_delta_en = (hw_ctx->ls_info.feature_mask[i]) & 0x1;
753 vp9_hw_regs->vp9d_param.reg67_74[i].segid_frame_qp_delta = hw_ctx->ls_info.feature_data[i][0];
754 vp9_hw_regs->vp9d_param.reg67_74[i].segid_frame_loopfitler_value_en = (hw_ctx->ls_info.feature_mask[i] >> 1) & 0x1;
755 vp9_hw_regs->vp9d_param.reg67_74[i].segid_frame_loopfilter_value = hw_ctx->ls_info.feature_data[i][1];
756 vp9_hw_regs->vp9d_param.reg67_74[i].segid_referinfo_en = (hw_ctx->ls_info.feature_mask[i] >> 2) & 0x1;
757 vp9_hw_regs->vp9d_param.reg67_74[i].segid_referinfo = hw_ctx->ls_info.feature_data[i][2];
758 vp9_hw_regs->vp9d_param.reg67_74[i].segid_frame_skip_en = (hw_ctx->ls_info.feature_mask[i] >> 3) & 0x1;
759 }
760
761 vp9_hw_regs->vp9d_param.reg67_74[0].segid_abs_delta = hw_ctx->ls_info.abs_delta_last;
762 vp9_hw_regs->vp9d_param.reg76.tx_mode = pic_param->txmode;
763 vp9_hw_regs->vp9d_param.reg76.frame_reference_mode = pic_param->refmode;
764 vp9_hw_regs->vp9d_param.reg94.ref_deltas_lastframe = 0;
765
766 if (!intraFlag) {
767 for (i = 0; i < 4; i++)
768 vp9_hw_regs->vp9d_param.reg94.ref_deltas_lastframe |= (hw_ctx->ls_info.last_ref_deltas[i] & 0x7f) << (7 * i);
769
770 for (i = 0; i < 2; i++)
771 vp9_hw_regs->vp9d_param.reg75.mode_deltas_lastframe |= (hw_ctx->ls_info.last_mode_deltas[i] & 0x7f) << (7 * i);
772 } else {
773 hw_ctx->ls_info.segmentation_enable_flag_last = 0;
774 hw_ctx->ls_info.last_intra_only = 1;
775 }
776
777 vp9_hw_regs->vp9d_param.reg75.segmentation_enable_lstframe = hw_ctx->ls_info.segmentation_enable_flag_last;
778 vp9_hw_regs->vp9d_param.reg75.last_show_frame = hw_ctx->ls_info.last_show_frame;
779 vp9_hw_regs->vp9d_param.reg75.last_intra_only = hw_ctx->ls_info.last_intra_only;
780 vp9_hw_regs->vp9d_param.reg75.last_widthheight_eqcur = (pic_param->width == hw_ctx->ls_info.last_width) && (pic_param->height == hw_ctx->ls_info.last_height);
781 vp9_hw_regs->vp9d_param.reg78.lasttile_size = stream_len - pic_param->first_partition_size;
782
783
784 if (!intraFlag) {
785 vp9_hw_regs->vp9d_param.reg88.lref_hor_scale = pic_param->mvscale[0][0];
786 vp9_hw_regs->vp9d_param.reg89.lref_ver_scale = pic_param->mvscale[0][1];
787 vp9_hw_regs->vp9d_param.reg90.gref_hor_scale = pic_param->mvscale[1][0];
788 vp9_hw_regs->vp9d_param.reg91.gref_ver_scale = pic_param->mvscale[1][1];
789 vp9_hw_regs->vp9d_param.reg92.aref_hor_scale = pic_param->mvscale[2][0];
790 vp9_hw_regs->vp9d_param.reg93.aref_ver_scale = pic_param->mvscale[2][1];
791 }
792
793 vp9_hw_regs->common.reg010.dec_e = 1;
794 vp9_hw_regs->common.reg011.buf_empty_en = 1;
795 vp9_hw_regs->common.reg011.dec_clkgate_e = 1;
796 vp9_hw_regs->common.reg011.err_head_fill_e = 1;
797 vp9_hw_regs->common.reg011.err_colmv_fill_e = 1;
798
799 vp9_hw_regs->common.reg026.inter_auto_gating_e = 1;
800 vp9_hw_regs->common.reg026.filterd_auto_gating_e = 1;
801 vp9_hw_regs->common.reg026.strmd_auto_gating_e = 1;
802 vp9_hw_regs->common.reg026.mcp_auto_gating_e = 1;
803 vp9_hw_regs->common.reg026.busifd_auto_gating_e = 1;
804 vp9_hw_regs->common.reg026.dec_ctrl_auto_gating_e = 1;
805 vp9_hw_regs->common.reg026.intra_auto_gating_e = 1;
806 vp9_hw_regs->common.reg026.mc_auto_gating_e = 1;
807 vp9_hw_regs->common.reg026.transd_auto_gating_e = 1;
808 vp9_hw_regs->common.reg026.sram_auto_gating_e = 1;
809 vp9_hw_regs->common.reg026.cru_auto_gating_e = 1;
810 vp9_hw_regs->common.reg026.reg_cfg_gating_en = 1;
811
812 vp9_hw_regs->common.reg032_timeout_threshold = 0x3ffff;
813
814 //last info update
815 hw_ctx->ls_info.abs_delta_last = pic_param->stVP9Segments.abs_delta;
816 for (i = 0 ; i < 4; i ++) {
817 hw_ctx->ls_info.last_ref_deltas[i] = pic_param->ref_deltas[i];
818 }
819
820 for (i = 0 ; i < 2; i ++) {
821 hw_ctx->ls_info.last_mode_deltas[i] = pic_param->mode_deltas[i];
822 }
823
824 for (i = 0; i < 8; i++) {
825 hw_ctx->ls_info.feature_data[i][0] = pic_param->stVP9Segments.feature_data[i][0];
826 hw_ctx->ls_info.feature_data[i][1] = pic_param->stVP9Segments.feature_data[i][1];
827 hw_ctx->ls_info.feature_data[i][2] = pic_param->stVP9Segments.feature_data[i][2];
828 hw_ctx->ls_info.feature_data[i][3] = pic_param->stVP9Segments.feature_data[i][3];
829 hw_ctx->ls_info.feature_mask[i] = pic_param->stVP9Segments.feature_mask[i];
830 }
831 if (!hw_ctx->ls_info.segmentation_enable_flag_last)
832 hw_ctx->ls_info.segmentation_enable_flag_last = pic_param->stVP9Segments.enabled;
833
834 hw_ctx->ls_info.last_show_frame = pic_param->show_frame;
835 hw_ctx->ls_info.last_width = pic_param->width;
836 hw_ctx->ls_info.last_height = pic_param->height;
837 hw_ctx->ls_info.last_intra_only = (!pic_param->frame_type || pic_param->intra_only);
838 hal_vp9d_dbg_par("stVP9Segments.enabled %d show_frame %d width %d height %d last_intra_only %d",
839 pic_param->stVP9Segments.enabled, pic_param->show_frame,
840 pic_param->width, pic_param->height,
841 hw_ctx->ls_info.last_intra_only);
842
843 hal_vp9d_rcb_info_update(hal, vp9_hw_regs, pic_param);
844 {
845 MppBuffer rcb_buf = NULL;
846
847 rcb_buf = p_hal->fast_mode ? hw_ctx->g_buf[task->dec.reg_index].rcb_buf : hw_ctx->rcb_buf;
848 vdpu382_setup_rcb(&vp9_hw_regs->common_addr, p_hal->dev, rcb_buf, hw_ctx->rcb_info);
849 }
850
851 {
852 MppFrame mframe = NULL;
853
854 mpp_buf_slot_get_prop(p_hal->slots, task->dec.output, SLOT_FRAME_PTR, &mframe);
855 if (mpp_frame_get_thumbnail_en(mframe)) {
856 vp9_hw_regs->vp9d_addr.reg198_scale_down_luma_base =
857 vp9_hw_regs->common_addr.reg130_decout_base;
858 vp9_hw_regs->vp9d_addr.reg199_scale_down_chorme_base =
859 vp9_hw_regs->common_addr.reg130_decout_base;
860 vdpu382_setup_down_scale(mframe, p_hal->dev, &vp9_hw_regs->common);
861 } else {
862 vp9_hw_regs->vp9d_addr.reg198_scale_down_luma_base = 0;
863 vp9_hw_regs->vp9d_addr.reg199_scale_down_chorme_base = 0;
864 vp9_hw_regs->common.reg012.scale_down_en = 0;
865 }
866 }
867 vdpu382_setup_statistic(&vp9_hw_regs->common, &vp9_hw_regs->statistic);
868
869 // whether need update counts
870 if (pic_param->refresh_frame_context && !pic_param->parallelmode) {
871 task->dec.flags.wait_done = 1;
872 }
873
874 return MPP_OK;
875 }
876
hal_vp9d_vdpu382_start(void * hal,HalTaskInfo * task)877 static MPP_RET hal_vp9d_vdpu382_start(void *hal, HalTaskInfo *task)
878 {
879 MPP_RET ret = MPP_OK;
880 HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
881 Vdpu382Vp9dCtx *hw_ctx = (Vdpu382Vp9dCtx*)p_hal->hw_ctx;
882 Vdpu382Vp9dRegSet *hw_regs = (Vdpu382Vp9dRegSet *)hw_ctx->hw_regs;
883 MppDev dev = p_hal->dev;
884
885 if (p_hal->fast_mode) {
886 RK_S32 index = task->dec.reg_index;
887 hw_regs = (Vdpu382Vp9dRegSet *)hw_ctx->g_buf[index].hw_regs;
888 }
889
890 mpp_assert(hw_regs);
891
892
893 #if VP9_DUMP
894 {
895 static RK_U32 file_cnt = 0;
896 char file_name[128];
897 sprintf(file_name, "/data/vp9_regs/reg_%d.txt", file_cnt);
898 FILE *fp = fopen(file_name, "wb");
899 RK_U32 i = 0;
900 RK_U32 *tmp = NULL;
901 tmp = (RK_U32 *)&hw_regs->common;
902 for (i = 0; i < sizeof(hw_regs->common) / 4; i++) {
903 fprintf(fp, "reg[%d] 0x%08x\n", i + 8, tmp[i]);
904 }
905 fprintf(fp, "\n");
906 tmp = (RK_U32 *)&hw_regs->vp9d_param;
907 for (i = 0; i < sizeof(hw_regs->vp9d_param) / 4; i++) {
908 fprintf(fp, "reg[%d] 0x%08x\n", i + 64, tmp[i]);
909 }
910 fprintf(fp, "\n");
911 tmp = (RK_U32 *)&hw_regs->common_addr;
912 for (i = 0; i < sizeof(hw_regs->common_addr) / 4; i++) {
913 fprintf(fp, "reg[%d] 0x%08x\n", i + 128, tmp[i]);
914 }
915 fprintf(fp, "\n");
916 tmp = (RK_U32 *)&hw_regs->vp9d_addr;
917 for (i = 0; i < sizeof(hw_regs->vp9d_addr) / 4; i++) {
918 fprintf(fp, "reg[%d] 0x%08x\n", i + 160, tmp[i]);
919 }
920 file_cnt++;
921 fflush(fp);
922 fclose(fp);
923 }
924 #endif
925
926 do {
927 MppDevRegWrCfg wr_cfg;
928 MppDevRegRdCfg rd_cfg;
929
930 wr_cfg.reg = &hw_regs->common;
931 wr_cfg.size = sizeof(hw_regs->common);
932 wr_cfg.offset = OFFSET_COMMON_REGS;
933
934 ret = mpp_dev_ioctl(dev, MPP_DEV_REG_WR, &wr_cfg);
935 if (ret) {
936 mpp_err_f("set register write failed %d\n", ret);
937 break;
938 }
939
940 wr_cfg.reg = &hw_regs->vp9d_param;
941 wr_cfg.size = sizeof(hw_regs->vp9d_param);
942 wr_cfg.offset = OFFSET_CODEC_PARAMS_REGS;
943
944 ret = mpp_dev_ioctl(dev, MPP_DEV_REG_WR, &wr_cfg);
945 if (ret) {
946 mpp_err_f("set register write failed %d\n", ret);
947 break;
948 }
949
950 wr_cfg.reg = &hw_regs->common_addr;
951 wr_cfg.size = sizeof(hw_regs->common_addr);
952 wr_cfg.offset = OFFSET_COMMON_ADDR_REGS;
953
954 ret = mpp_dev_ioctl(dev, MPP_DEV_REG_WR, &wr_cfg);
955 if (ret) {
956 mpp_err_f("set register write failed %d\n", ret);
957 break;
958 }
959
960 wr_cfg.reg = &hw_regs->vp9d_addr;
961 wr_cfg.size = sizeof(hw_regs->vp9d_addr);
962 wr_cfg.offset = OFFSET_CODEC_ADDR_REGS;
963
964 ret = mpp_dev_ioctl(dev, MPP_DEV_REG_WR, &wr_cfg);
965 if (ret) {
966 mpp_err_f("set register write failed %d\n", ret);
967 break;
968 }
969
970 wr_cfg.reg = &hw_regs->statistic;
971 wr_cfg.size = sizeof(hw_regs->statistic);
972 wr_cfg.offset = OFFSET_STATISTIC_REGS;
973
974 ret = mpp_dev_ioctl(dev, MPP_DEV_REG_WR, &wr_cfg);
975 if (ret) {
976 mpp_err_f("set register write failed %d\n", ret);
977 break;
978 }
979
980 rd_cfg.reg = &hw_regs->irq_status;
981 rd_cfg.size = sizeof(hw_regs->irq_status);
982 rd_cfg.offset = OFFSET_INTERRUPT_REGS;
983
984 ret = mpp_dev_ioctl(dev, MPP_DEV_REG_RD, &rd_cfg);
985 if (ret) {
986 mpp_err_f("set register read failed %d\n", ret);
987 break;
988 }
989 /* rcb info for sram */
990 vdpu382_set_rcbinfo(dev, hw_ctx->rcb_info);
991 ret = mpp_dev_ioctl(dev, MPP_DEV_CMD_SEND, NULL);
992 if (ret) {
993 mpp_err_f("send cmd failed %d\n", ret);
994 break;
995 }
996 } while (0);
997
998 (void)task;
999 return ret;
1000 }
1001
hal_vp9d_vdpu382_wait(void * hal,HalTaskInfo * task)1002 static MPP_RET hal_vp9d_vdpu382_wait(void *hal, HalTaskInfo *task)
1003 {
1004 MPP_RET ret = MPP_OK;
1005 HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
1006 Vdpu382Vp9dCtx *hw_ctx = (Vdpu382Vp9dCtx*)p_hal->hw_ctx;
1007 Vdpu382Vp9dRegSet *hw_regs = (Vdpu382Vp9dRegSet *)hw_ctx->hw_regs;
1008
1009 if (p_hal->fast_mode)
1010 hw_regs = (Vdpu382Vp9dRegSet *)hw_ctx->g_buf[task->dec.reg_index].hw_regs;
1011
1012 mpp_assert(hw_regs);
1013
1014 ret = mpp_dev_ioctl(p_hal->dev, MPP_DEV_CMD_POLL, NULL);
1015 if (ret)
1016 mpp_err_f("poll cmd failed %d\n", ret);
1017
1018 if (hal_vp9d_debug & HAL_VP9D_DBG_REG) {
1019 RK_U32 *p = (RK_U32 *)hw_regs;
1020 RK_U32 i = 0;
1021
1022 for (i = 0; i < sizeof(Vdpu382Vp9dRegSet) / 4; i++)
1023 mpp_log("get regs[%02d]: %08X\n", i, *p++);
1024 }
1025
1026 if (task->dec.flags.parse_err ||
1027 task->dec.flags.ref_err ||
1028 !hw_regs->irq_status.reg224.dec_rdy_sta) {
1029 MppFrame mframe = NULL;
1030 mpp_buf_slot_get_prop(p_hal->slots, task->dec.output, SLOT_FRAME_PTR, &mframe);
1031 mpp_frame_set_errinfo(mframe, 1);
1032 }
1033 #if !HW_PROB
1034 if (p_hal->dec_cb && task->dec.flags.wait_done) {
1035 DXVA_PicParams_VP9 *pic_param = (DXVA_PicParams_VP9*)task->dec.syntax.data;
1036 hal_vp9d_update_counts(mpp_buffer_get_ptr(hw_ctx->count_base), task->dec.syntax.data);
1037 mpp_callback(p_hal->dec_cb, &pic_param->counts);
1038 }
1039 #endif
1040 if (p_hal->fast_mode) {
1041 hw_ctx->g_buf[task->dec.reg_index].use_flag = 0;
1042 }
1043
1044 (void)task;
1045 return ret;
1046 }
1047
hal_vp9d_vdpu382_reset(void * hal)1048 static MPP_RET hal_vp9d_vdpu382_reset(void *hal)
1049 {
1050 HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
1051 Vdpu382Vp9dCtx *hw_ctx = (Vdpu382Vp9dCtx*)p_hal->hw_ctx;
1052
1053 hal_vp9d_enter();
1054
1055 memset(&hw_ctx->ls_info, 0, sizeof(hw_ctx->ls_info));
1056 hw_ctx->mv_base_addr = -1;
1057 hw_ctx->pre_mv_base_addr = -1;
1058 hw_ctx->last_segid_flag = 1;
1059 memset(&hw_ctx->prob_ref_poc, 0, sizeof(hw_ctx->prob_ref_poc));
1060 hw_ctx->col_ref_poc = 0;
1061 hw_ctx->segid_ref_poc = 0;
1062
1063 hal_vp9d_leave();
1064
1065 return MPP_OK;
1066 }
1067
hal_vp9d_vdpu382_flush(void * hal)1068 static MPP_RET hal_vp9d_vdpu382_flush(void *hal)
1069 {
1070 HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
1071 Vdpu382Vp9dCtx *hw_ctx = (Vdpu382Vp9dCtx*)p_hal->hw_ctx;
1072
1073 hal_vp9d_enter();
1074
1075 hw_ctx->mv_base_addr = -1;
1076 hw_ctx->pre_mv_base_addr = -1;
1077
1078 hal_vp9d_leave();
1079
1080 return MPP_OK;
1081 }
1082
hal_vp9d_vdpu382_control(void * hal,MpiCmd cmd_type,void * param)1083 static MPP_RET hal_vp9d_vdpu382_control(void *hal, MpiCmd cmd_type, void *param)
1084 {
1085 HalVp9dCtx *p_hal = (HalVp9dCtx*)hal;
1086
1087 switch ((MpiCmd)cmd_type) {
1088 case MPP_DEC_SET_FRAME_INFO : {
1089 /* commit buffer stride */
1090 RK_U32 width = mpp_frame_get_width((MppFrame)param);
1091 RK_U32 height = mpp_frame_get_height((MppFrame)param);
1092 MppFrameFormat fmt = mpp_frame_get_fmt((MppFrame)param);
1093
1094 if (MPP_FRAME_FMT_IS_FBC(fmt)) {
1095 vdpu382_afbc_align_calc(p_hal->slots, (MppFrame)param, 0);
1096 } else {
1097 mpp_frame_set_hor_stride((MppFrame)param, vp9_hor_align(width));
1098 mpp_frame_set_ver_stride((MppFrame)param, vp9_ver_align(height));
1099 }
1100 } break;
1101 default : {
1102 } break;
1103 }
1104
1105 return MPP_OK;
1106 }
1107
1108 const MppHalApi hal_vp9d_vdpu382 = {
1109 .name = "vp9d_vdpu382",
1110 .type = MPP_CTX_DEC,
1111 .coding = MPP_VIDEO_CodingVP9,
1112 .ctx_size = sizeof(Vdpu382Vp9dCtx),
1113 .flag = 0,
1114 .init = hal_vp9d_vdpu382_init,
1115 .deinit = hal_vp9d_vdpu382_deinit,
1116 .reg_gen = hal_vp9d_vdpu382_gen_regs,
1117 .start = hal_vp9d_vdpu382_start,
1118 .wait = hal_vp9d_vdpu382_wait,
1119 .reset = hal_vp9d_vdpu382_reset,
1120 .flush = hal_vp9d_vdpu382_flush,
1121 .control = hal_vp9d_vdpu382_control,
1122 };
1123