1 /*
2 *
3 * Copyright 2015 Rockchip Electronics Co. LTD
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18 /*
19 * @file h265d_parser2_syntax.c
20 * @brief
21 * @author csy(csy@rock-chips.com)
22
23 * @version 1.0.0
24 * @history
25 * 2015.7.15 : Create
26 */
27
28 #define MODULE_TAG "H265SYNATX"
29
30 #include "h265d_parser.h"
31 #include "h265d_syntax.h"
32
33
fill_picture_entry(DXVA_PicEntry_HEVC * pic,unsigned index,unsigned flag)34 static void fill_picture_entry(DXVA_PicEntry_HEVC *pic,
35 unsigned index, unsigned flag)
36 {
37 mpp_assert((index & 0x7f) == index && (flag & 0x01) == flag);
38 pic->bPicEntry = index | (flag << 7);
39 }
40
get_refpic_index(const DXVA_PicParams_HEVC * pp,int surface_index)41 static RK_S32 get_refpic_index(const DXVA_PicParams_HEVC *pp, int surface_index)
42 {
43 RK_U32 i;
44 for (i = 0; i < MPP_ARRAY_ELEMS(pp->RefPicList); i++) {
45 if ((pp->RefPicList[i].bPicEntry & 0x7f) == surface_index) {
46 //mpp_err("retun %d slot_index = %d",i,surface_index);
47 return i;
48 }
49 }
50 return 0xff;
51 }
52
fill_picture_parameters(const HEVCContext * h,DXVA_PicParams_HEVC * pp)53 static void fill_picture_parameters(const HEVCContext *h,
54 DXVA_PicParams_HEVC *pp)
55 {
56 const HEVCFrame *current_picture = h->ref;
57 const HEVCPPS *pps = (HEVCPPS *)h->pps_list[h->sh.pps_id];
58 const HEVCSPS *sps = (HEVCSPS *)h->sps_list[pps->sps_id];
59 const ShortTermRPS *src_rps = sps->st_rps;
60 Short_SPS_RPS_HEVC *dst_rps = pp->sps_st_rps;
61
62 RK_U32 i, j;
63 RK_U32 rps_used[16];
64 RK_U32 nb_rps_used;
65
66 memset(pp, 0, sizeof(*pp));
67
68 pp->PicWidthInMinCbsY = sps->min_cb_width;
69 pp->PicHeightInMinCbsY = sps->min_cb_height;
70 pp->pps_id = h->sh.pps_id;
71 pp->sps_id = pps->sps_id;
72 pp->vps_id = sps->vps_id;
73
74 pp->wFormatAndSequenceInfoFlags = (sps->chroma_format_idc << 0) |
75 (sps->separate_colour_plane_flag << 2) |
76 ((sps->bit_depth - 8) << 3) |
77 ((sps->bit_depth - 8) << 6) |
78 ((sps->log2_max_poc_lsb - 4) << 9) |
79 (0 << 13) |
80 (0 << 14) |
81 (0 << 15);
82
83 fill_picture_entry(&pp->CurrPic, current_picture->slot_index, 0);
84
85 pp->sps_max_dec_pic_buffering_minus1 = sps->temporal_layer[sps->max_sub_layers - 1].max_dec_pic_buffering - 1;
86 pp->log2_min_luma_coding_block_size_minus3 = sps->log2_min_cb_size - 3;
87 pp->log2_diff_max_min_luma_coding_block_size = sps->log2_diff_max_min_coding_block_size;
88 pp->log2_min_transform_block_size_minus2 = sps->log2_min_tb_size - 2;
89 pp->log2_diff_max_min_transform_block_size = sps->log2_max_trafo_size - sps->log2_min_tb_size;
90 pp->max_transform_hierarchy_depth_inter = sps->max_transform_hierarchy_depth_inter;
91 pp->max_transform_hierarchy_depth_intra = sps->max_transform_hierarchy_depth_intra;
92 pp->num_short_term_ref_pic_sets = sps->nb_st_rps;
93 pp->num_long_term_ref_pics_sps = sps->num_long_term_ref_pics_sps;
94
95 pp->num_ref_idx_l0_default_active_minus1 = pps->num_ref_idx_l0_default_active - 1;
96 pp->num_ref_idx_l1_default_active_minus1 = pps->num_ref_idx_l1_default_active - 1;
97 pp->init_qp_minus26 = pps->pic_init_qp_minus26;
98
99 if (h->sh.short_term_ref_pic_set_sps_flag == 0 && h->sh.short_term_rps) {
100 pp->ucNumDeltaPocsOfRefRpsIdx = h->sh.short_term_rps->rps_idx_num_delta_pocs;
101 pp->wNumBitsForShortTermRPSInSlice = h->sh.short_term_ref_pic_set_size;
102 }
103
104 pp->dwCodingParamToolFlags = (sps->scaling_list_enable_flag << 0) |
105 (sps->amp_enabled_flag << 1) |
106 (sps->sao_enabled << 2) |
107 (sps->pcm_enabled_flag << 3) |
108 ((sps->pcm_enabled_flag ? (sps->pcm.bit_depth - 1) : 0) << 4) |
109 ((sps->pcm_enabled_flag ? (sps->pcm.bit_depth_chroma - 1) : 0) << 8) |
110 ((sps->pcm_enabled_flag ? (sps->pcm.log2_min_pcm_cb_size - 3) : 0) << 12) |
111 ((sps->pcm_enabled_flag ? (sps->pcm.log2_max_pcm_cb_size - sps->pcm.log2_min_pcm_cb_size) : 0) << 14) |
112 (sps->pcm.loop_filter_disable_flag << 16) |
113 (sps->long_term_ref_pics_present_flag << 17) |
114 (sps->sps_temporal_mvp_enabled_flag << 18) |
115 (sps->sps_strong_intra_smoothing_enable_flag << 19) |
116 (pps->dependent_slice_segments_enabled_flag << 20) |
117 (pps->output_flag_present_flag << 21) |
118 (pps->num_extra_slice_header_bits << 22) |
119 (pps->sign_data_hiding_flag << 25) |
120 (pps->cabac_init_present_flag << 26) |
121 (0 << 27);
122
123 pp->dwCodingSettingPicturePropertyFlags = (pps->constrained_intra_pred_flag << 0) |
124 (pps->transform_skip_enabled_flag << 1) |
125 (pps->cu_qp_delta_enabled_flag << 2) |
126 (pps->pic_slice_level_chroma_qp_offsets_present_flag << 3) |
127 (pps->weighted_pred_flag << 4) |
128 (pps->weighted_bipred_flag << 5) |
129 (pps->transquant_bypass_enable_flag << 6) |
130 (pps->tiles_enabled_flag << 7) |
131 (pps->entropy_coding_sync_enabled_flag << 8) |
132 (pps->uniform_spacing_flag << 9) |
133 (pps->loop_filter_across_tiles_enabled_flag << 10) |
134 (pps->seq_loop_filter_across_slices_enabled_flag << 11) |
135 (pps->deblocking_filter_override_enabled_flag << 12) |
136 (pps->disable_dbf << 13) |
137 (pps->lists_modification_present_flag << 14) |
138 (pps->slice_header_extension_present_flag << 15) |
139 (0 << 19);
140
141 pp->IdrPicFlag = (h->first_nal_type == 19 || h->first_nal_type == 20);
142 pp->IrapPicFlag = (h->first_nal_type >= 16 && h->first_nal_type <= 23);
143 pp->IntraPicFlag = (h->first_nal_type >= 16 && h->first_nal_type <= 23) ||
144 (h->sh.slice_type == I_SLICE || (h->recovery.valid_flag &&
145 h->recovery.first_frm_valid &&
146 h->recovery.first_frm_id == current_picture->poc));
147 pp->pps_cb_qp_offset = pps->cb_qp_offset;
148 pp->pps_cr_qp_offset = pps->cr_qp_offset;
149 if (pps->tiles_enabled_flag) {
150 pp->num_tile_columns_minus1 = pps->num_tile_columns - 1;
151 pp->num_tile_rows_minus1 = pps->num_tile_rows - 1;
152
153 if (!pps->uniform_spacing_flag) {
154 for (i = 0; i < (RK_U32)pps->num_tile_columns; i++)
155 pp->column_width_minus1[i] = pps->bufs.column_width[i] - 1;
156
157 for (i = 0; i < (RK_U32)pps->num_tile_rows; i++)
158 pp->row_height_minus1[i] = pps->bufs.row_height[i] - 1;
159 }
160 }
161
162 pp->diff_cu_qp_delta_depth = pps->diff_cu_qp_delta_depth;
163 pp->pps_beta_offset_div2 = pps->beta_offset / 2;
164 pp->pps_tc_offset_div2 = pps->tc_offset / 2;
165 pp->log2_parallel_merge_level_minus2 = pps->log2_parallel_merge_level - 2;
166 pp->slice_segment_header_extension_present_flag = pps->slice_header_extension_present_flag;
167 pp->CurrPicOrderCntVal = h->poc;
168 pp->ps_update_flag = h->ps_need_upate;
169
170 if (pp->ps_update_flag) {
171 for (i = 0; i < 32; i++) {
172 pp->sps_lt_rps[i].lt_ref_pic_poc_lsb = sps->lt_ref_pic_poc_lsb_sps[i];
173 pp->sps_lt_rps[i].used_by_curr_pic_lt_flag = sps->used_by_curr_pic_lt_sps_flag[i];
174 }
175
176
177 for (i = 0; i < 64; i++) {
178 if (i < sps->nb_st_rps) {
179
180 RK_U32 n_pics = src_rps[i].num_negative_pics;
181 dst_rps[i].num_negative_pics = n_pics;
182 dst_rps[i].num_positive_pics = src_rps[i].num_delta_pocs - n_pics;
183 for (j = 0; j < dst_rps[i].num_negative_pics; j++) {
184 dst_rps[i].delta_poc_s0[j] = src_rps[i].delta_poc[j];
185 dst_rps[i].s0_used_flag[j] = src_rps[i].used[j];
186 }
187
188 for ( j = 0; j < dst_rps[i].num_positive_pics; j++) {
189 dst_rps[i].delta_poc_s1[j] = src_rps[i].delta_poc[j + n_pics];
190 dst_rps[i].s1_used_flag[j] = src_rps[i].used[j + n_pics];
191 }
192 }
193 }
194 }
195
196 nb_rps_used = 0;
197 for (i = 0; i < NB_RPS_TYPE; i++) {
198 for (j = 0; j < (RK_U32)h->rps[i].nb_refs; j++) {
199 if ((i == ST_FOLL) || (i == LT_FOLL)) {
200 ;
201 } else {
202 rps_used[nb_rps_used++] = h->rps[i].list[j];
203 }
204 }
205 }
206 // mpp_err("fill RefPicList from the DPB");
207 // fill RefPicList from the DPB
208 pp->current_poc = current_picture->poc;
209 for (i = 0, j = 0; i < MPP_ARRAY_ELEMS(pp->RefPicList); i++) {
210 const HEVCFrame *frame = NULL;
211 while (!frame && j < MPP_ARRAY_ELEMS(h->DPB)) {
212 if (&h->DPB[j] != current_picture &&
213 (h->DPB[j].flags & (HEVC_FRAME_FLAG_LONG_REF | HEVC_FRAME_FLAG_SHORT_REF))) {
214 RK_U32 k = 0;
215 for (k = 0; k < nb_rps_used; k++) { /*skip fill RefPicList no used in rps*/
216 if (rps_used[k] == (RK_U32)h->DPB[j].poc) {
217 frame = &h->DPB[j];
218 }
219 }
220 }
221 j++;
222 }
223
224 if (frame && (frame->slot_index != 0xff)) {
225 fill_picture_entry(&pp->RefPicList[i], frame->slot_index, !!(frame->flags & HEVC_FRAME_FLAG_LONG_REF));
226 pp->PicOrderCntValList[i] = frame->poc;
227 mpp_buf_slot_set_flag(h->slots, frame->slot_index, SLOT_HAL_INPUT);
228 h->task->refer[i] = frame->slot_index;
229 //mpp_err("ref[%d] = %d",i,frame->slot_index);
230 } else {
231 pp->RefPicList[i].bPicEntry = 0xff;
232 pp->PicOrderCntValList[i] = 0;
233 h->task->refer[i] = -1;
234 }
235 }
236
237 #define DO_REF_LIST(ref_idx, ref_list) { \
238 const RefPicList *rpl = &h->rps[ref_idx]; \
239 for (i = 0, j = 0; i < MPP_ARRAY_ELEMS(pp->ref_list); i++) { \
240 const HEVCFrame *frame = NULL; \
241 while (!frame && j < (RK_U32)rpl->nb_refs) \
242 frame = rpl->ref[j++]; \
243 if (frame) \
244 pp->ref_list[i] = get_refpic_index(pp, frame->slot_index); \
245 else \
246 pp->ref_list[i] = 0xff; \
247 } \
248 }
249
250 // Fill short term and long term lists
251 DO_REF_LIST(ST_CURR_BEF, RefPicSetStCurrBefore);
252 DO_REF_LIST(ST_CURR_AFT, RefPicSetStCurrAfter);
253 DO_REF_LIST(LT_CURR, RefPicSetLtCurr);
254
255 }
256 extern RK_U8 mpp_hevc_diag_scan4x4_x[16];
257 extern RK_U8 mpp_hevc_diag_scan4x4_y[16];
258 extern RK_U8 mpp_hevc_diag_scan8x8_x[64];
259 extern RK_U8 mpp_hevc_diag_scan8x8_y[64];
260
fill_scaling_lists(const HEVCContext * h,DXVA_Qmatrix_HEVC * qm)261 static void fill_scaling_lists(const HEVCContext *h, DXVA_Qmatrix_HEVC *qm)
262 {
263 RK_U32 i, j, pos;
264 const HEVCPPS *pps = (HEVCPPS *)h->pps_list[h->sh.pps_id];
265 const HEVCSPS *sps = (HEVCSPS *)h->sps_list[pps->sps_id];
266 const ScalingList *sl = pps->scaling_list_data_present_flag ?
267 &pps->scaling_list : &sps->scaling_list;
268 if (!sps->scaling_list_enable_flag) {
269 return;
270 }
271 memset(qm, 0, sizeof(DXVA_Qmatrix_HEVC));
272 for (i = 0; i < 6; i++) {
273 for (j = 0; j < 16; j++) {
274 pos = 4 * mpp_hevc_diag_scan4x4_y[j] + mpp_hevc_diag_scan4x4_x[j];
275 qm->ucScalingLists0[i][j] = sl->sl[0][i][pos];
276 }
277
278 for (j = 0; j < 64; j++) {
279 pos = 8 * mpp_hevc_diag_scan8x8_y[j] + mpp_hevc_diag_scan8x8_x[j];
280 qm->ucScalingLists1[i][j] = sl->sl[1][i][pos];
281 qm->ucScalingLists2[i][j] = sl->sl[2][i][pos];
282
283 if (i < 2)
284 qm->ucScalingLists3[i][j] = sl->sl[3][i * 3][pos];
285 }
286
287 qm->ucScalingListDCCoefSizeID2[i] = sl->sl_dc[0][i];
288
289 if (i < 2)
290 qm->ucScalingListDCCoefSizeID3[i] = sl->sl_dc[1][i * 3];
291 }
292 }
293
fill_slice_short(DXVA_Slice_HEVC_Short * slice,unsigned position,unsigned size)294 static void fill_slice_short(DXVA_Slice_HEVC_Short *slice,
295 unsigned position, unsigned size)
296 {
297 memset(slice, 0, sizeof(*slice));
298 slice->BSNALunitDataLocation = position;
299 slice->SliceBytesInBuffer = size;
300 slice->wBadSliceChopping = 0;
301 }
302
init_slice_cut_param(DXVA_Slice_HEVC_Cut_Param * slice)303 static void init_slice_cut_param(DXVA_Slice_HEVC_Cut_Param *slice)
304 {
305 memset(slice, 0, sizeof(*slice));
306 }
307
h265d_parser2_syntax(void * ctx)308 RK_S32 h265d_parser2_syntax(void *ctx)
309 {
310
311 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
312 const HEVCContext *h = (const HEVCContext *)h265dctx->priv_data;
313
314 h265d_dxva2_picture_context_t *ctx_pic = (h265d_dxva2_picture_context_t *)h->hal_pic_private;
315
316 /* Fill up DXVA_PicParams_HEVC */
317 fill_picture_parameters(h, &ctx_pic->pp);
318
319 /* Fill up DXVA_Qmatrix_HEVC */
320 fill_scaling_lists(h, &ctx_pic->qm);
321
322 return 0;
323 }
324
h265d_syntax_fill_slice(void * ctx,RK_S32 input_index)325 RK_S32 h265d_syntax_fill_slice(void *ctx, RK_S32 input_index)
326 {
327 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
328 const HEVCContext *h = (const HEVCContext *)h265dctx->priv_data;
329 h265d_dxva2_picture_context_t *ctx_pic = (h265d_dxva2_picture_context_t *)h->hal_pic_private;
330 MppBuffer streambuf = NULL;
331 RK_S32 i, count = 0;
332 RK_U32 position = 0;
333 RK_U8 *ptr = NULL;
334 RK_U8 *current = NULL;
335 RK_U32 size = 0, length = 0;
336 // mpp_err("input_index = %d",input_index);
337 if (-1 != input_index) {
338 mpp_buf_slot_get_prop(h->packet_slots, input_index, SLOT_BUFFER, &streambuf);
339 current = ptr = (RK_U8 *)mpp_buffer_get_ptr(streambuf);
340 if (current == NULL) {
341 return MPP_ERR_NULL_PTR;
342 }
343 } else {
344 RK_S32 buff_size = 0;
345 current = (RK_U8 *)mpp_packet_get_data(h->input_packet);
346 size = (RK_U32)mpp_packet_get_size(h->input_packet);
347 for (i = 0; i < h->nb_nals; i++) {
348 length += h->nals[i].size + 4;
349 }
350 length = MPP_ALIGN(length, 16) + 64;
351 if (length > size) {
352 mpp_free(current);
353 buff_size = MPP_ALIGN(length + 10 * 1024, 1024);
354 current = mpp_malloc(RK_U8, buff_size);
355 mpp_packet_set_data(h->input_packet, (void*)current);
356 mpp_packet_set_size(h->input_packet, buff_size);
357 }
358 }
359 if (ctx_pic->max_slice_num < h->nb_nals) {
360
361 MPP_FREE(ctx_pic->slice_short);
362
363 ctx_pic->slice_short = (DXVA_Slice_HEVC_Short *)mpp_malloc(DXVA_Slice_HEVC_Short, h->nb_nals);
364 if (!ctx_pic->slice_short)
365 return MPP_ERR_NOMEM;
366
367 MPP_FREE(ctx_pic->slice_cut_param);
368
369 ctx_pic->slice_cut_param = (DXVA_Slice_HEVC_Cut_Param *)mpp_malloc(DXVA_Slice_HEVC_Cut_Param, h->nb_nals);
370 if (!ctx_pic->slice_cut_param)
371 return MPP_ERR_NOMEM;
372
373 ctx_pic->max_slice_num = h->nb_nals;
374 }
375 for (i = 0; i < h->nb_nals; i++) {
376 static const RK_U8 start_code[] = {0, 0, 1 };
377 static const RK_U32 start_code_size = sizeof(start_code);
378 BitReadCtx_t gb_cxt, *gb;
379 RK_S32 value;
380 RK_U32 nal_type;
381
382 mpp_set_bitread_ctx(&gb_cxt, (RK_U8 *)h->nals[i].data,
383 h->nals[i].size);
384 mpp_set_bitread_pseudo_code_type(&gb_cxt, PSEUDO_CODE_H264_H265);
385
386 gb = &gb_cxt;
387
388 READ_ONEBIT(gb, &value); /*this bit should be zero*/
389
390 READ_BITS(gb, 6, &nal_type);
391
392 if (nal_type >= 32) {
393 continue;
394 }
395 memcpy(current, start_code, start_code_size);
396 current += start_code_size;
397 position += start_code_size;
398 memcpy(current, h->nals[i].data, h->nals[i].size);
399 // mpp_log("h->nals[%d].size = %d", i, h->nals[i].size);
400 fill_slice_short(&ctx_pic->slice_short[count], position, h->nals[i].size);
401 init_slice_cut_param(&ctx_pic->slice_cut_param[count]);
402 current += h->nals[i].size;
403 position += h->nals[i].size;
404 count++;
405 }
406 ctx_pic->slice_count = count;
407 ctx_pic->bitstream_size = position;
408 if (-1 != input_index) {
409 ctx_pic->bitstream = (RK_U8*)ptr;
410
411 mpp_buf_slot_set_flag(h->packet_slots, input_index, SLOT_CODEC_READY);
412 mpp_buf_slot_set_flag(h->packet_slots, input_index, SLOT_HAL_INPUT);
413 } else {
414 ctx_pic->bitstream = NULL;
415 mpp_packet_set_length(h->input_packet, position);
416 }
417 return MPP_OK;
418 __BITREAD_ERR:
419 return MPP_ERR_STREAM;
420 }
421