1 /*
2 *
3 * Copyright 2015 Rockchip Electronics Co. LTD
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18 /*
19 * @file h265d_parser.c
20 * @brief
21 * @author csy(csy@rock-chips.com)
22
23 * @version 1.0.0
24 * @history
25 * 2015.7.15 : Create
26 */
27
28 #define MODULE_TAG "H265D_PARSER"
29
30 #include "mpp_env.h"
31 #include "mpp_mem.h"
32 #include "mpp_bitread.h"
33 #include "mpp_packet_impl.h"
34 #include "rk_hdr_meta_com.h"
35
36 #include "h265d_parser.h"
37 #include "h265d_syntax.h"
38 #include "h265d_api.h"
39 #include "h2645d_sei.h"
40
41 #define START_CODE 0x000001 ///< start_code_prefix_one_3bytes
42
43 RK_U32 h265d_debug;
44 #ifdef dump
45 FILE *fp = NULL;
46 #endif
47 //static RK_U32 start_write = 0, value = 0;
48
49 /**
50 * Find the end of the current frame in the bitstream.
51 * @return the position of the first byte of the next frame, or END_NOT_FOUND
52 */
hevc_find_frame_end(SplitContext_t * sc,const RK_U8 * buf,int buf_size)53 static RK_S32 hevc_find_frame_end(SplitContext_t *sc, const RK_U8 *buf,
54 int buf_size)
55 {
56 RK_S32 i;
57
58 for (i = 0; i < buf_size; i++) {
59 int nut, layer_id;
60
61 sc->state64 = (sc->state64 << 8) | buf[i];
62
63 if (((sc->state64 >> 3 * 8) & 0xFFFFFF) != START_CODE)
64 continue;
65 nut = (sc->state64 >> (2 * 8 + 1)) & 0x3F;
66 layer_id = (((sc->state64 >> 2 * 8) & 0x01) << 5) + (((sc->state64 >> 1 * 8) & 0xF8) >> 3);
67 //mpp_log("nut = %d layer_id = %d\n",nut,layer_id);
68 // Beginning of access unit
69 if ((nut >= NAL_VPS && nut <= NAL_AUD) || nut == NAL_SEI_PREFIX ||
70 (nut >= 41 && nut <= 44) || (nut >= 48 && nut <= 55)) {
71 if (sc->frame_start_found && !layer_id) {
72 sc->frame_start_found = 0;
73 return i - 5;
74 }
75 } else if (nut <= NAL_RASL_R ||
76 (nut >= NAL_BLA_W_LP && nut <= NAL_CRA_NUT)) {
77 int first_slice_segment_in_pic_flag = buf[i] >> 7;
78 //mpp_log("nut = %d first_slice_segment_in_pic_flag %d layer_id = %d \n",nut,
79 // first_slice_segment_in_pic_flag,
80 // layer_id);
81 if (first_slice_segment_in_pic_flag && !layer_id) {
82 if (!sc->frame_start_found) {
83 sc->frame_start_found = 1;
84 } else { // First slice of next frame found
85 sc->frame_start_found = 0;
86 return i - 5;
87 }
88 }
89 }
90 }
91 return END_NOT_FOUND;
92 }
93
mpp_combine_frame(SplitContext_t * sc,RK_S32 next,const RK_U8 ** buf,RK_S32 * buf_size)94 static RK_S32 mpp_combine_frame(SplitContext_t *sc, RK_S32 next, const RK_U8 **buf, RK_S32 *buf_size)
95 {
96 if (sc->overread) {
97 mpp_log("overread %d, state:%X next:%d index:%d o_index:%d\n",
98 sc->overread, sc->state, next, sc->index, sc->overread_index);
99 mpp_log("%X %X %X %X\n", (*buf)[0], (*buf)[1], (*buf)[2], (*buf)[3]);
100 }
101
102 /* Copy overread bytes from last frame into buffer. */
103 for (; sc->overread > 0; sc->overread--) {
104 sc->buffer[sc->index++] = sc->buffer[sc->overread_index++];
105 }
106
107 /* flush remaining if EOF */
108 if (!*buf_size && next == END_NOT_FOUND) {
109 next = 0;
110 }
111
112 sc->last_index = sc->index;
113
114 /* copy into buffer end return */
115 if (next == END_NOT_FOUND) {
116 RK_U32 min_size = (*buf_size) + sc->index + MPP_INPUT_BUFFER_PADDING_SIZE;
117 void* new_buffer;
118 if (min_size > sc->buffer_size) {
119 min_size = MPP_MAX(17 * min_size / 16 + 32, min_size);
120 new_buffer = mpp_realloc(sc->buffer, RK_U8, min_size);
121 if (!new_buffer) {
122 sc->buffer_size = 0;
123 return MPP_ERR_NOMEM;
124 }
125 sc->buffer_size = min_size;
126 sc->buffer = new_buffer;
127 }
128
129 memcpy(&sc->buffer[sc->index], *buf, *buf_size);
130 sc->index += *buf_size;
131
132 return -1;
133 }
134
135 *buf_size =
136 sc->overread_index = sc->index + next;
137
138 /* append to buffer */
139 if (sc->index) {
140 RK_U32 min_size = next + sc->index + MPP_INPUT_BUFFER_PADDING_SIZE;
141 void* new_buffer;
142 if (min_size > sc->buffer_size) {
143 min_size = MPP_MAX(17 * min_size / 16 + 32, min_size);
144 new_buffer = mpp_realloc(sc->buffer, RK_U8, min_size);
145 if (!new_buffer) {
146 sc->buffer_size = 0;
147 return MPP_ERR_NOMEM;
148 }
149 sc->buffer_size = min_size;
150 sc->buffer = new_buffer;
151 }
152
153 if (next > -MPP_INPUT_BUFFER_PADDING_SIZE)
154 memcpy(&sc->buffer[sc->index], *buf,
155 next + MPP_INPUT_BUFFER_PADDING_SIZE);
156 sc->index = 0;
157 *buf = sc->buffer;
158 }
159
160 /* store overread bytes */
161 for (; next < 0; next++) {
162 sc->state = (sc->state << 8) | sc->buffer[sc->last_index + next];
163 sc->state64 = (sc->state64 << 8) | sc->buffer[sc->last_index + next];
164 sc->overread++;
165 }
166
167 if (sc->overread) {
168 mpp_log("overread %d, state:%X next:%d index:%d o_index:%d\n",
169 sc->overread, sc->state, next, sc->index, sc->overread_index);
170 mpp_log("%X %X %X %X\n", (*buf)[0], (*buf)[1], (*buf)[2], (*buf)[3]);
171 }
172
173 return 0;
174 }
175
h265d_split_init(void ** sc)176 static RK_S32 h265d_split_init(void **sc)
177 {
178 SplitContext_t *s = NULL;
179 if (s == NULL) {
180 s = mpp_calloc(SplitContext_t, 1);
181 if (s != NULL) {
182 *sc = s;
183 } else {
184 mpp_err("split alloc context fail");
185 return MPP_ERR_NOMEM;
186 }
187 }
188 s->buffer = mpp_malloc(RK_U8, MAX_FRAME_SIZE);
189 s->buffer_size = MAX_FRAME_SIZE;
190 s->fetch_timestamp = 1;
191 return MPP_OK;
192 }
193
mpp_fetch_timestamp(SplitContext_t * s,RK_S32 off)194 static void mpp_fetch_timestamp(SplitContext_t *s, RK_S32 off)
195 {
196 RK_S32 i;
197
198 s->dts = s->pts = -1;
199 s->offset = 0;
200 for (i = 0; i < MPP_PARSER_PTS_NB; i++) {
201 h265d_dbg(H265D_DBG_TIME, "s->cur_offset %lld s->cur_frame_offset[%d] %lld s->frame_offset %lld s->next_frame_offset %lld",
202 s->cur_offset, i, s->cur_frame_offset[i], s->frame_offset, s->next_frame_offset);
203 if ( s->cur_offset + off >= s->cur_frame_offset[i]
204 && (s->frame_offset < s->cur_frame_offset[i] ||
205 (!s->frame_offset && !s->next_frame_offset)) // first field/frame
206 // check disabled since MPEG-TS does not send complete PES packets
207 && /*s->next_frame_offset + off <*/ s->cur_frame_end[i]) {
208 s->dts = s->cur_frame_dts[i];
209 s->pts = s->cur_frame_pts[i];
210 s->offset = s->next_frame_offset - s->cur_frame_offset[i];
211 if (s->cur_offset + off < s->cur_frame_end[i])
212 break;
213 }
214 }
215 }
h265d_split_frame(void * sc,const RK_U8 ** poutbuf,RK_S32 * poutbuf_size,const RK_U8 * buf,RK_S32 buf_size,RK_S64 pts,RK_S64 dts)216 static RK_S32 h265d_split_frame(void *sc,
217 const RK_U8 **poutbuf, RK_S32 *poutbuf_size,
218 const RK_U8 *buf, RK_S32 buf_size, RK_S64 pts,
219 RK_S64 dts)
220 {
221 RK_S32 next, i;
222
223 SplitContext_t *s = (SplitContext_t*)sc;
224
225 if (s->cur_offset + buf_size !=
226 s->cur_frame_end[s->cur_frame_start_index]) { /* skip remainder packets */
227 /* add a new packet descriptor */
228 i = (s->cur_frame_start_index + 1) & (MPP_PARSER_PTS_NB - 1);
229 s->cur_frame_start_index = i;
230 s->cur_frame_offset[i] = s->cur_offset;
231 s->cur_frame_end[i] = s->cur_offset + buf_size;
232 s->cur_frame_pts[i] = pts;
233 s->cur_frame_dts[i] = dts;
234 h265d_dbg(H265D_DBG_TIME, "s->cur_frame_start_index = %d,cur_frame_offset = %lld,s->cur_frame_end = %lld pts = %lld",
235 s->cur_frame_start_index, s->cur_frame_offset[i], s->cur_frame_end[i], pts);
236 }
237
238 if (s->fetch_timestamp) {
239 s->fetch_timestamp = 0;
240 s->last_pts = s->pts;
241 s->last_dts = s->dts;
242 mpp_fetch_timestamp(s, 0);
243 }
244
245 if (s->eos && !buf_size) {
246 *poutbuf = s->buffer;
247 *poutbuf_size = s->index;
248 return 0;
249 }
250
251 next = hevc_find_frame_end(s, buf, buf_size);
252 if (s->eos && buf_size && next == END_NOT_FOUND) {
253 next = buf_size;
254 }
255
256 if (mpp_combine_frame(s, next, &buf, &buf_size) < 0) {
257 *poutbuf = NULL;
258 *poutbuf_size = 0;
259 s->cur_offset += buf_size;
260 return buf_size;
261 }
262
263 *poutbuf = buf;
264 *poutbuf_size = buf_size;
265
266 if (next < 0)
267 next = 0;
268
269 if (*poutbuf_size) {
270 /* fill the data for the current frame */
271 s->frame_offset = s->next_frame_offset;
272
273 /* offset of the next frame */
274 s->next_frame_offset = s->cur_offset + next;
275 s->fetch_timestamp = 1;
276 }
277
278 s->cur_offset += next;
279 return next;
280 }
281
h265d_split_reset(void * sc)282 static RK_S32 h265d_split_reset(void *sc)
283 {
284 RK_U8 *buf = NULL;
285 RK_U32 size = 0;
286 SplitContext_t *s = (SplitContext_t*)sc;
287 if (sc == NULL) {
288 return MPP_OK;
289 }
290 buf = s->buffer;
291 size = s->buffer_size;
292 memset(s, 0, sizeof(SplitContext_t));
293 s->fetch_timestamp = 1;
294 s->buffer = buf;
295 s->buffer_size = size;
296 s->eos = 0;
297 return MPP_OK;
298 }
299
300
h265d_split_deinit(void * sc)301 static RK_S32 h265d_split_deinit(void *sc)
302 {
303 SplitContext_t *s = (SplitContext_t *)sc;
304 if (s->buffer) {
305 mpp_free(s->buffer);
306 s->buffer = NULL;
307 }
308 if (s) {
309 mpp_free(s);
310 s = NULL;
311 }
312 return MPP_OK;
313 }
314
pred_weight_table(HEVCContext * s,BitReadCtx_t * gb)315 static RK_S32 pred_weight_table(HEVCContext *s, BitReadCtx_t *gb)
316 {
317 RK_U32 i = 0;
318 RK_U32 j = 0;
319 RK_U8 luma_weight_l0_flag[16];
320 RK_U8 chroma_weight_l0_flag[16];
321 RK_U8 luma_weight_l1_flag[16];
322 RK_U8 chroma_weight_l1_flag[16];
323
324 READ_UE(gb, &s->sh.luma_log2_weight_denom);
325 if (s->sps->chroma_format_idc != 0) {
326 RK_S32 delta = 0;
327 READ_SE(gb, &delta);
328 s->sh.chroma_log2_weight_denom = mpp_clip(s->sh.luma_log2_weight_denom + delta, 0, 7);
329 }
330
331 for (i = 0; i < s->sh.nb_refs[L0]; i++) {
332 READ_ONEBIT(gb, &luma_weight_l0_flag[i]);
333 if (!luma_weight_l0_flag[i]) {
334 s->sh.luma_weight_l0[i] = 1 << s->sh.luma_log2_weight_denom;
335 s->sh.luma_offset_l0[i] = 0;
336 }
337 }
338
339 if (s->sps->chroma_format_idc != 0) { // FIXME: invert "if" and "for"
340 for (i = 0; i < s->sh.nb_refs[L0]; i++) {
341 READ_ONEBIT(gb, &chroma_weight_l0_flag[i]);
342 }
343 } else {
344 for (i = 0; i < s->sh.nb_refs[L0]; i++)
345 chroma_weight_l0_flag[i] = 0;
346 }
347
348 for (i = 0; i < s->sh.nb_refs[L0]; i++) {
349 if (luma_weight_l0_flag[i]) {
350 RK_S32 delta_luma_weight_l0 = 0;
351 READ_SE(gb, &delta_luma_weight_l0);
352 s->sh.luma_weight_l0[i] = (1 << s->sh.luma_log2_weight_denom) + delta_luma_weight_l0;
353 READ_SE(gb, &s->sh.luma_offset_l0[i]);
354 }
355 if (chroma_weight_l0_flag[i]) {
356 for (j = 0; j < 2; j++) {
357 RK_S32 delta_chroma_weight_l0 = 0;
358 RK_S32 delta_chroma_offset_l0 = 0;
359 READ_SE(gb, &delta_chroma_weight_l0);
360 READ_SE(gb, &delta_chroma_offset_l0);
361 s->sh.chroma_weight_l0[i][j] = (1 << s->sh.chroma_log2_weight_denom) + delta_chroma_weight_l0;
362 s->sh.chroma_offset_l0[i][j] = mpp_clip((delta_chroma_offset_l0 - ((128 * s->sh.chroma_weight_l0[i][j])
363 >> s->sh.chroma_log2_weight_denom) + 128), -128, 127);
364 }
365 } else {
366 s->sh.chroma_weight_l0[i][0] = 1 << s->sh.chroma_log2_weight_denom;
367 s->sh.chroma_offset_l0[i][0] = 0;
368 s->sh.chroma_weight_l0[i][1] = 1 << s->sh.chroma_log2_weight_denom;
369 s->sh.chroma_offset_l0[i][1] = 0;
370 }
371 }
372
373 if (s->sh.slice_type == B_SLICE) {
374 for (i = 0; i < s->sh.nb_refs[L1]; i++) {
375 READ_ONEBIT(gb, &luma_weight_l1_flag[i]);
376 if (!luma_weight_l1_flag[i]) {
377 s->sh.luma_weight_l1[i] = 1 << s->sh.luma_log2_weight_denom;
378 s->sh.luma_offset_l1[i] = 0;
379 }
380 }
381 if (s->sps->chroma_format_idc != 0) {
382 for (i = 0; i < s->sh.nb_refs[L1]; i++)
383 READ_ONEBIT(gb, &chroma_weight_l1_flag[i]);
384 } else {
385 for (i = 0; i < s->sh.nb_refs[L1]; i++)
386 chroma_weight_l1_flag[i] = 0;
387 }
388 for (i = 0; i < s->sh.nb_refs[L1]; i++) {
389 if (luma_weight_l1_flag[i]) {
390 RK_S32 delta_luma_weight_l1 = 0;
391 READ_UE(gb, &delta_luma_weight_l1);
392 s->sh.luma_weight_l1[i] = (1 << s->sh.luma_log2_weight_denom) + delta_luma_weight_l1;
393 READ_SE(gb, &s->sh.luma_offset_l1[i]);
394 }
395 if (chroma_weight_l1_flag[i]) {
396 for (j = 0; j < 2; j++) {
397 RK_S32 delta_chroma_weight_l1 = 0;
398 RK_S32 delta_chroma_offset_l1 = 0;
399 READ_SE(gb, &delta_chroma_weight_l1);
400 READ_SE(gb, &delta_chroma_offset_l1);
401 s->sh.chroma_weight_l1[i][j] = (1 << s->sh.chroma_log2_weight_denom) + delta_chroma_weight_l1;
402 s->sh.chroma_offset_l1[i][j] = mpp_clip((delta_chroma_offset_l1 - ((128 * s->sh.chroma_weight_l1[i][j])
403 >> s->sh.chroma_log2_weight_denom) + 128), -128, 127);
404 }
405 } else {
406 s->sh.chroma_weight_l1[i][0] = 1 << s->sh.chroma_log2_weight_denom;
407 s->sh.chroma_offset_l1[i][0] = 0;
408 s->sh.chroma_weight_l1[i][1] = 1 << s->sh.chroma_log2_weight_denom;
409 s->sh.chroma_offset_l1[i][1] = 0;
410 }
411 }
412 }
413 return 0;
414 __BITREAD_ERR:
415 return MPP_ERR_STREAM;
416 }
417
decode_lt_rps(HEVCContext * s,LongTermRPS * rps,BitReadCtx_t * gb)418 static RK_S32 decode_lt_rps(HEVCContext *s, LongTermRPS *rps, BitReadCtx_t *gb)
419 {
420 const HEVCSPS *sps = s->sps;
421 RK_S32 max_poc_lsb = 1 << sps->log2_max_poc_lsb;
422 RK_S32 prev_delta_msb = 0;
423 RK_U32 nb_sps = 0, nb_sh;
424 RK_S32 i;
425
426 RK_S32 bit_begin = gb->used_bits;
427 s->rps_bit_offset[s->slice_idx] =
428 s->rps_bit_offset_st[s->slice_idx];
429
430 rps->nb_refs = 0;
431 if (!sps->long_term_ref_pics_present_flag)
432 return 0;
433
434 if (sps->num_long_term_ref_pics_sps > 0)
435 READ_UE(gb, &nb_sps);
436
437 READ_UE(gb, &nb_sh);
438
439 if (nb_sh + nb_sps > MPP_ARRAY_ELEMS(rps->poc))
440 return MPP_ERR_STREAM;
441
442 rps->nb_refs = nb_sh + nb_sps;
443
444 for (i = 0; i < rps->nb_refs; i++) {
445 RK_U8 delta_poc_msb_present;
446
447 if ((RK_U32)i < nb_sps) {
448 RK_U8 lt_idx_sps = 0;
449
450 if (sps->num_long_term_ref_pics_sps > 1)
451 READ_BITS(gb, mpp_ceil_log2(sps->num_long_term_ref_pics_sps), <_idx_sps);
452
453 rps->poc[i] = sps->lt_ref_pic_poc_lsb_sps[lt_idx_sps];
454 rps->used[i] = sps->used_by_curr_pic_lt_sps_flag[lt_idx_sps];
455 } else {
456 READ_BITS(gb, sps->log2_max_poc_lsb, &rps->poc[i]);
457 READ_ONEBIT(gb, &rps->used[i]);
458 }
459
460 READ_ONEBIT(gb, &delta_poc_msb_present);
461 if (delta_poc_msb_present) {
462 RK_S32 delta = 0;
463
464 READ_UE(gb, &delta);
465
466 if (i && (RK_U32)i != nb_sps)
467 delta += prev_delta_msb;
468
469 rps->poc[i] += s->poc - delta * max_poc_lsb - s->sh.pic_order_cnt_lsb;
470 prev_delta_msb = delta;
471 }
472 }
473
474 s->rps_bit_offset[s->slice_idx]
475 += (gb->used_bits - bit_begin);
476
477 return 0;
478 __BITREAD_ERR:
479 return MPP_ERR_STREAM;
480 }
481
set_sps(HEVCContext * s,const HEVCSPS * sps)482 static RK_S32 set_sps(HEVCContext *s, const HEVCSPS *sps)
483 {
484 RK_U32 num = 0, den = 0;
485 MppFrameFormat fmt = s->h265dctx->cfg->base.out_fmt & (~MPP_FRAME_FMT_MASK);
486
487 s->h265dctx->coded_width = sps->width;
488 s->h265dctx->coded_height = sps->height;
489 s->h265dctx->width = sps->output_width;
490 s->h265dctx->height = sps->output_height;
491 s->h265dctx->pix_fmt = fmt | sps->pix_fmt;
492 s->h265dctx->nBitDepth = sps->bit_depth;
493 s->h265dctx->sample_aspect_ratio = sps->vui.sar;
494
495 if (sps->vui.video_signal_type_present_flag)
496 s->h265dctx->color_range = sps->vui.video_full_range_flag ?
497 MPP_FRAME_RANGE_JPEG : MPP_FRAME_RANGE_MPEG;
498 else
499 s->h265dctx->color_range = MPP_FRAME_RANGE_MPEG;
500
501 if (sps->vui.colour_description_present_flag) {
502 s->h265dctx->colorspace = sps->vui.matrix_coeffs;
503 } else {
504 s->h265dctx->colorspace = MPP_FRAME_SPC_UNSPECIFIED;
505 }
506
507 s->sps = sps;
508 s->vps = (HEVCVPS*) s->vps_list[s->sps->vps_id];
509
510 if (s->vps->vps_timing_info_present_flag) {
511 num = s->vps->vps_num_units_in_tick;
512 den = s->vps->vps_time_scale;
513 } else if (sps->vui.vui_timing_info_present_flag) {
514 num = sps->vui.vui_num_units_in_tick;
515 den = sps->vui.vui_time_scale;
516 }
517
518 if (num != 0 && den != 0) {
519 // s->h265dctx->time_base.num = num;
520 // s->h265dctx->time_base.den = den;
521 // av_reduce(&s->h265dctx->time_base.num, &s->h265dctx->time_base.den,
522 // num, den, 1 << 30);
523 }
524
525 return 0;
526
527 }
compare_sliceheader(SliceHeader * openhevc_sh,SliceHeader * sh)528 static RK_S32 compare_sliceheader(SliceHeader *openhevc_sh, SliceHeader *sh)
529 {
530
531 if (openhevc_sh->pps_id != sh->pps_id) {
532 mpp_log(" pps_id diff \n");
533 return -1;
534 }
535
536 if (openhevc_sh->slice_type != sh->slice_type) {
537 mpp_log(" slice_type diff \n");
538 return -1;
539 }
540
541 if (openhevc_sh->pic_order_cnt_lsb != sh->pic_order_cnt_lsb) {
542 mpp_log(" pic_order_cnt_lsb diff \n");
543 return -1;
544 }
545
546 if (openhevc_sh->first_slice_in_pic_flag != sh->first_slice_in_pic_flag) {
547 mpp_log(" first_slice_in_pic_flag diff \n");
548 return -1;
549 }
550
551 if (openhevc_sh->dependent_slice_segment_flag != sh->dependent_slice_segment_flag) {
552 mpp_log(" dependent_slice_segment_flag diff \n");
553 return -1;
554 }
555
556 if (openhevc_sh->pic_output_flag != sh->pic_output_flag) {
557 mpp_log(" pic_output_flag diff \n");
558 return -1;
559 }
560
561 if (openhevc_sh->colour_plane_id != sh->colour_plane_id) {
562 mpp_log(" colour_plane_id diff \n");
563 return -1;
564 }
565
566 if (openhevc_sh->rpl_modification_flag[0] != sh->rpl_modification_flag[0]) {
567 mpp_log(" rpl_modification_flag[0] diff \n");
568 return -1;
569 }
570
571 if (openhevc_sh->rpl_modification_flag[1] != sh->rpl_modification_flag[1]) {
572 mpp_log(" rpl_modification_flag[1] diff \n");
573 return -1;
574 }
575
576 if (openhevc_sh->no_output_of_prior_pics_flag != sh->no_output_of_prior_pics_flag) {
577 mpp_log(" no_output_of_prior_pics_flag diff \n");
578 return -1;
579 }
580
581 if (openhevc_sh->slice_temporal_mvp_enabled_flag != sh->slice_temporal_mvp_enabled_flag) {
582 mpp_log(" slice_temporal_mvp_enabled_flag diff \n");
583 return -1;
584 }
585
586 if (openhevc_sh->nb_refs[0] != sh->nb_refs[0]) {
587 mpp_log(" nb_refs[0] diff \n");
588 return -1;
589 }
590
591 if (openhevc_sh->nb_refs[1] != sh->nb_refs[1]) {
592 mpp_log(" nb_refs[1] diff \n");
593 return -1;
594 }
595
596 if (openhevc_sh->slice_sample_adaptive_offset_flag[0] !=
597 sh->slice_sample_adaptive_offset_flag[0]) {
598 mpp_log(" slice_sample_adaptive_offset_flag[0] diff \n");
599 return -1;
600 }
601
602 if (openhevc_sh->slice_sample_adaptive_offset_flag[1] !=
603 sh->slice_sample_adaptive_offset_flag[1]) {
604 mpp_log(" slice_sample_adaptive_offset_flag[1] diff \n");
605 return -1;
606 }
607
608 if (openhevc_sh->slice_sample_adaptive_offset_flag[2] !=
609 sh->slice_sample_adaptive_offset_flag[2]) {
610 mpp_log(" slice_sample_adaptive_offset_flag[2] diff \n");
611 return -1;
612 }
613
614 if (openhevc_sh->mvd_l1_zero_flag != sh->mvd_l1_zero_flag) {
615 mpp_log(" mvd_l1_zero_flag diff \n");
616 return -1;
617 }
618 if (openhevc_sh->cabac_init_flag != sh->cabac_init_flag) {
619 mpp_log(" cabac_init_flag diff \n");
620 return -1;
621 }
622
623 if (openhevc_sh->disable_deblocking_filter_flag !=
624 sh->disable_deblocking_filter_flag) {
625 mpp_log(" disable_deblocking_filter_flag diff \n");
626 return -1;
627 }
628
629 if (openhevc_sh->slice_loop_filter_across_slices_enabled_flag !=
630 sh->slice_loop_filter_across_slices_enabled_flag) {
631 mpp_log(" slice_loop_filter_across_slices_enable diff \n");
632 return -1;
633 }
634
635 if (openhevc_sh->collocated_list != sh->collocated_list) {
636 mpp_log(" collocated_list diff \n");
637 return -1;
638 }
639
640 if (openhevc_sh->collocated_ref_idx != sh->collocated_ref_idx) {
641 mpp_log(" collocated_ref_idx diff \n");
642 return -1;
643 }
644
645 if (openhevc_sh->slice_qp_delta != sh->slice_qp_delta) {
646 mpp_log(" slice_qp_delta diff \n");
647 return -1;
648 }
649
650 if (openhevc_sh->slice_cb_qp_offset != sh->slice_cb_qp_offset) {
651 mpp_log(" slice_cb_qp_offset diff \n");
652 return -1;
653 }
654
655 if (openhevc_sh->slice_cr_qp_offset != sh->slice_cr_qp_offset) {
656 mpp_log(" slice_cr_qp_offset diff \n");
657 return -1;
658 }
659
660 if (openhevc_sh->beta_offset != sh->beta_offset) {
661 mpp_log(" beta_offset diff \n");
662 return -1;
663 }
664
665 if (openhevc_sh->tc_offset != sh->tc_offset) {
666 mpp_log(" tc_offset diff \n");
667 return -1;
668 }
669
670 if (openhevc_sh->max_num_merge_cand != sh->max_num_merge_cand) {
671 mpp_log(" max_num_merge_cand diff \n");
672 return -1;
673 }
674
675 if (openhevc_sh->num_entry_point_offsets != sh->num_entry_point_offsets) {
676 mpp_log(" num_entry_point_offsets diff \n");
677 return -1;
678 }
679
680 if (openhevc_sh->slice_qp != sh->slice_qp) {
681 mpp_log(" slice_qp diff \n");
682 return -1;
683 }
684
685 if (openhevc_sh->luma_log2_weight_denom != sh->luma_log2_weight_denom) {
686 mpp_log(" luma_log2_weight_denom diff \n");
687 return -1;
688 }
689
690 if (openhevc_sh->chroma_log2_weight_denom != sh->chroma_log2_weight_denom) {
691 mpp_log(" chroma_log2_weight_denom diff \n");
692 return -1;
693 }
694
695 /* if (openhevc_sh->slice_ctb_addr_rs != sh->slice_ctb_addr_rs) {
696 mpp_log(" slice_ctb_addr_rs diff \n");
697 return -1;
698 }*/
699 return 0;
700 }
701
hls_slice_header(HEVCContext * s)702 static RK_S32 hls_slice_header(HEVCContext *s)
703 {
704
705 BitReadCtx_t *gb = &s->HEVClc->gb;
706 SliceHeader *sh = &s->sh;
707 RK_S32 i, ret;
708 RK_S32 value;
709 RK_U32 pps_id;
710 RK_S32 bit_begin;
711
712 #ifdef JCTVC_M0458_INTERLAYER_RPS_SIG
713 int NumILRRefIdx;
714 #endif
715
716 // Coded parameters
717
718 READ_ONEBIT(gb, &sh->first_slice_in_pic_flag);
719 if ((IS_IDR(s) || IS_BLA(s)) && sh->first_slice_in_pic_flag) {
720 s->seq_decode = (s->seq_decode + 1) & 0xff;
721 s->max_ra = INT_MAX;
722 if (IS_IDR(s))
723 mpp_hevc_clear_refs(s);
724 }
725 if (s->nal_unit_type >= 16 && s->nal_unit_type <= 23)
726 READ_ONEBIT(gb, &sh->no_output_of_prior_pics_flag);
727
728 if (IS_IRAP(s) && s->miss_ref_flag && sh->first_slice_in_pic_flag) {
729 // mpp_err("s->nal_unit_type = %d s->poc %d",s->nal_unit_type,s->poc);
730 s->max_ra = INT_MAX;
731 s->miss_ref_flag = 0;
732 }
733 READ_UE(gb, &pps_id);
734
735 if (pps_id >= MAX_PPS_COUNT || !s->pps_list[pps_id]) {
736 mpp_err( "PPS id out of range: %d\n", pps_id);
737 return MPP_ERR_STREAM;
738 } else {
739 sh->pps_id = pps_id;
740 if (pps_id != s->pre_pps_id) {
741 s->ps_need_upate = 1;
742 s->pre_pps_id = pps_id;
743 }
744 }
745
746 if (!sh->first_slice_in_pic_flag &&
747 s->pps != (HEVCPPS*)s->pps_list[sh->pps_id]) {
748 mpp_err( "PPS changed between slices.\n");
749 return MPP_ERR_STREAM;
750 }
751 s->pps = (HEVCPPS*)s->pps_list[sh->pps_id];
752
753 if (s->sps != (HEVCSPS*)s->sps_list[s->pps->sps_id]) {
754 s->sps = (HEVCSPS*)s->sps_list[s->pps->sps_id];
755 mpp_hevc_clear_refs(s);
756
757 s->ps_need_upate = 1;
758 ret = set_sps(s, s->sps);
759 if (ret < 0)
760 return ret;
761
762 s->seq_decode = (s->seq_decode + 1) & 0xff;
763 s->max_ra = INT_MAX;
764 }
765
766 // s->h265dctx->profile = s->sps->ptl.general_ptl.profile_idc;
767 // s->h265dctx->level = s->sps->ptl.general_ptl.level_idc;
768
769 sh->dependent_slice_segment_flag = 0;
770 if (!sh->first_slice_in_pic_flag) {
771 RK_S32 slice_address_length;
772
773 if (s->pps->dependent_slice_segments_enabled_flag)
774 READ_ONEBIT(gb, &sh->dependent_slice_segment_flag);
775
776 slice_address_length = mpp_ceil_log2(s->sps->ctb_width *
777 s->sps->ctb_height);
778
779 READ_BITS(gb, slice_address_length, &sh->slice_segment_addr);
780
781 if (sh->slice_segment_addr >= (RK_U32)(s->sps->ctb_width * s->sps->ctb_height)) {
782 mpp_err(
783 "Invalid slice segment address: %u.\n",
784 sh->slice_segment_addr);
785 return MPP_ERR_STREAM;
786 }
787
788 if (!sh->dependent_slice_segment_flag) {
789 sh->slice_addr = sh->slice_segment_addr;
790 s->slice_idx++;
791 }
792 } else {
793 sh->slice_segment_addr = sh->slice_addr = 0;
794 s->slice_idx = 0;
795 s->slice_initialized = 0;
796 }
797
798 if (!sh->dependent_slice_segment_flag) {
799 s->slice_initialized = 0;
800
801 for (i = 0; i < s->pps->num_extra_slice_header_bits; i++)
802 SKIP_BITS(gb, 1); // slice_reserved_undetermined_flag[]
803
804 READ_UE(gb, &sh->slice_type);
805 if (!(sh->slice_type == I_SLICE ||
806 sh->slice_type == P_SLICE ||
807 sh->slice_type == B_SLICE)) {
808 mpp_err( "Unknown slice type: %d.\n",
809 sh->slice_type);
810 return MPP_ERR_STREAM;
811 }
812 if (!s->decoder_id && IS_IRAP(s) && sh->slice_type != I_SLICE) {
813 mpp_err( "Inter slices in an IRAP frame.\n");
814 return MPP_ERR_STREAM;
815 }
816
817 if (s->pps->output_flag_present_flag)
818 READ_ONEBIT(gb, &sh->pic_output_flag);
819
820 if (s->sps->separate_colour_plane_flag)
821 READ_BITS(gb, 2, &sh->colour_plane_id );
822
823 if (!IS_IDR(s)) {
824 int poc;
825
826 READ_BITS(gb, s->sps->log2_max_poc_lsb, &sh->pic_order_cnt_lsb);
827 poc = mpp_hevc_compute_poc(s, sh->pic_order_cnt_lsb);
828 if (!sh->first_slice_in_pic_flag && poc != s->poc) {
829 mpp_log("Ignoring POC change between slices: %d -> %d\n", s->poc, poc);
830 #if 0
831 if (s->h265dctx->err_recognition & AV_EF_EXPLODE)
832 return MPP_ERR_STREAM;
833 #endif
834 poc = s->poc;
835 }
836 s->poc = poc;
837
838 READ_ONEBIT(gb, &sh->short_term_ref_pic_set_sps_flag);
839
840 bit_begin = gb->used_bits;
841
842 if (!sh->short_term_ref_pic_set_sps_flag) {
843
844 ret = mpp_hevc_decode_short_term_rps(s, &sh->slice_rps, s->sps, 1);
845 if (ret < 0)
846 return ret;
847
848 sh->short_term_rps = &sh->slice_rps;
849 } else {
850 RK_S32 numbits, rps_idx;
851
852 if (!s->sps->nb_st_rps) {
853 mpp_err( "No ref lists in the SPS.\n");
854 return MPP_ERR_STREAM;
855 }
856
857 numbits = mpp_ceil_log2(s->sps->nb_st_rps);
858 rps_idx = 0;
859 if (numbits > 0)
860 READ_BITS(gb, numbits, &rps_idx);
861
862 sh->short_term_rps = &s->sps->st_rps[rps_idx];
863 }
864
865 s->rps_bit_offset_st[s->slice_idx] = gb->used_bits - bit_begin;
866
867 sh->short_term_ref_pic_set_size = s->rps_bit_offset_st[s->slice_idx];
868
869 ret = decode_lt_rps(s, &sh->long_term_rps, gb);
870 if (ret < 0) {
871 mpp_log("Invalid long term RPS.\n");
872 // if (s->h265dctx->err_recognition & AV_EF_EXPLODE)
873 // return MPP_ERR_STREAM;
874 }
875
876 if (s->sps->sps_temporal_mvp_enabled_flag)
877 READ_ONEBIT(gb, &sh->slice_temporal_mvp_enabled_flag);
878 else
879 sh->slice_temporal_mvp_enabled_flag = 0;
880 } else {
881 s->sh.short_term_rps = NULL;
882 s->poc = 0;
883 }
884
885 /* 8.3.1 */
886 if (s->temporal_id == 0 &&
887 s->nal_unit_type != NAL_TRAIL_N &&
888 s->nal_unit_type != NAL_TSA_N &&
889 s->nal_unit_type != NAL_STSA_N &&
890 s->nal_unit_type != NAL_RADL_N &&
891 s->nal_unit_type != NAL_RADL_R &&
892 s->nal_unit_type != NAL_RASL_N &&
893 s->nal_unit_type != NAL_RASL_R)
894 s->pocTid0 = s->poc;
895
896 if (s->sps->sao_enabled) {
897 READ_ONEBIT(gb, &sh->slice_sample_adaptive_offset_flag[0]);
898 READ_ONEBIT(gb, &sh->slice_sample_adaptive_offset_flag[1]);
899 sh->slice_sample_adaptive_offset_flag[2] =
900 sh->slice_sample_adaptive_offset_flag[1];
901 } else {
902 sh->slice_sample_adaptive_offset_flag[0] = 0;
903 sh->slice_sample_adaptive_offset_flag[1] = 0;
904 sh->slice_sample_adaptive_offset_flag[2] = 0;
905 }
906
907 sh->nb_refs[L0] = sh->nb_refs[L1] = 0;
908 if (sh->slice_type == P_SLICE || sh->slice_type == B_SLICE) {
909 int nb_refs;
910
911 sh->nb_refs[L0] = s->pps->num_ref_idx_l0_default_active;
912 if (sh->slice_type == B_SLICE)
913 sh->nb_refs[L1] = s->pps->num_ref_idx_l1_default_active;
914
915 READ_ONEBIT(gb, &value);
916
917 if (value) { // num_ref_idx_active_override_flag
918 READ_UE(gb, &sh->nb_refs[L0]);
919 sh->nb_refs[L0] += 1;
920 if (sh->slice_type == B_SLICE) {
921 READ_UE(gb, &sh->nb_refs[L1]);
922 sh->nb_refs[L1] += 1;
923 }
924 }
925 if (sh->nb_refs[L0] > MAX_REFS || sh->nb_refs[L1] > MAX_REFS) {
926 mpp_err( "Too many refs: %d/%d.\n",
927 sh->nb_refs[L0], sh->nb_refs[L1]);
928 return MPP_ERR_STREAM;
929 }
930
931 sh->rpl_modification_flag[0] = 0;
932 sh->rpl_modification_flag[1] = 0;
933 nb_refs = mpp_hevc_frame_nb_refs(s);
934 if (!nb_refs) {
935 mpp_err( "Zero refs for a frame with P or B slices.\n");
936 return MPP_ERR_STREAM;
937 }
938
939 if (s->pps->lists_modification_present_flag && nb_refs > 1) {
940 READ_ONEBIT(gb, &sh->rpl_modification_flag[0]);
941 if (sh->rpl_modification_flag[0]) {
942 for (i = 0; (RK_U32)i < sh->nb_refs[L0]; i++)
943 READ_BITS(gb, mpp_ceil_log2(nb_refs), &sh->list_entry_lx[0][i]);
944 }
945
946 if (sh->slice_type == B_SLICE) {
947 READ_ONEBIT(gb, &sh->rpl_modification_flag[1]);
948 if (sh->rpl_modification_flag[1] == 1)
949 for (i = 0; (RK_U32)i < sh->nb_refs[L1]; i++)
950 READ_BITS(gb, mpp_ceil_log2(nb_refs), &sh->list_entry_lx[1][i]);
951 }
952 }
953
954 if (sh->slice_type == B_SLICE)
955 READ_ONEBIT(gb, &sh->mvd_l1_zero_flag);
956
957 if (s->pps->cabac_init_present_flag)
958 READ_ONEBIT(gb, &sh->cabac_init_flag);
959 else
960 sh->cabac_init_flag = 0;
961
962 sh->collocated_ref_idx = 0;
963 if (sh->slice_temporal_mvp_enabled_flag) {
964 sh->collocated_list = L0;
965 if (sh->slice_type == B_SLICE) {
966 READ_ONEBIT(gb, &value);
967 sh->collocated_list = !value;
968 }
969
970 if (sh->nb_refs[sh->collocated_list] > 1) {
971 READ_UE(gb, &sh->collocated_ref_idx);
972 if (sh->collocated_ref_idx >= sh->nb_refs[sh->collocated_list]) {
973 mpp_err(
974 "Invalid collocated_ref_idx: %d.\n",
975 sh->collocated_ref_idx);
976 return MPP_ERR_STREAM;
977 }
978 }
979 }
980
981 if ((s->pps->weighted_pred_flag && sh->slice_type == P_SLICE) ||
982 (s->pps->weighted_bipred_flag && sh->slice_type == B_SLICE)) {
983 pred_weight_table(s, gb);
984 }
985
986 READ_UE(gb, &value);
987 sh->max_num_merge_cand = 5 - value;
988 if (sh->max_num_merge_cand < 1 || sh->max_num_merge_cand > 5) {
989 mpp_err(
990 "Invalid number of merging MVP candidates: %d.\n",
991 sh->max_num_merge_cand);
992 return MPP_ERR_STREAM;
993 }
994 }
995 READ_SE(gb, &sh->slice_qp_delta );
996 if (s->pps->pic_slice_level_chroma_qp_offsets_present_flag) {
997 READ_SE(gb, &sh->slice_cb_qp_offset);
998 READ_SE(gb, &sh->slice_cr_qp_offset);
999 } else {
1000 sh->slice_cb_qp_offset = 0;
1001 sh->slice_cr_qp_offset = 0;
1002 }
1003
1004 if (s->pps->deblocking_filter_control_present_flag) {
1005 int deblocking_filter_override_flag = 0;
1006
1007 if (s->pps->deblocking_filter_override_enabled_flag)
1008 READ_ONEBIT(gb, & deblocking_filter_override_flag);
1009
1010 if (deblocking_filter_override_flag) {
1011 READ_ONEBIT(gb, &sh->disable_deblocking_filter_flag);
1012 if (!sh->disable_deblocking_filter_flag) {
1013 READ_SE(gb, &sh->beta_offset);
1014 sh->beta_offset = sh->beta_offset * 2;
1015 READ_SE(gb, &sh->tc_offset);
1016 sh->tc_offset = sh->tc_offset * 2;
1017 }
1018 } else {
1019 sh->disable_deblocking_filter_flag = s->pps->disable_dbf;
1020 sh->beta_offset = s->pps->beta_offset;
1021 sh->tc_offset = s->pps->tc_offset;
1022 }
1023 } else {
1024 sh->disable_deblocking_filter_flag = 0;
1025 sh->beta_offset = 0;
1026 sh->tc_offset = 0;
1027 }
1028
1029 if (s->pps->seq_loop_filter_across_slices_enabled_flag &&
1030 (sh->slice_sample_adaptive_offset_flag[0] ||
1031 sh->slice_sample_adaptive_offset_flag[1] ||
1032 !sh->disable_deblocking_filter_flag)) {
1033 READ_ONEBIT(gb, &sh->slice_loop_filter_across_slices_enabled_flag);
1034 } else {
1035 sh->slice_loop_filter_across_slices_enabled_flag = s->pps->seq_loop_filter_across_slices_enabled_flag;
1036 }
1037 } else if (!s->slice_initialized) {
1038 mpp_err( "Independent slice segment missing.\n");
1039 return MPP_ERR_STREAM;
1040 }
1041
1042 sh->num_entry_point_offsets = 0;
1043 if (s->pps->tiles_enabled_flag || s->pps->entropy_coding_sync_enabled_flag) {
1044 READ_UE(gb, &sh->num_entry_point_offsets);
1045 if (s->pps->entropy_coding_sync_enabled_flag) {
1046 if (sh->num_entry_point_offsets > s->sps->ctb_height || sh->num_entry_point_offsets < 0) {
1047 mpp_err("The number of entries %d is higher than the number of CTB rows %d \n",
1048 sh->num_entry_point_offsets,
1049 s->sps->ctb_height);
1050 return MPP_ERR_STREAM;
1051 }
1052 } else {
1053 if (sh->num_entry_point_offsets > s->sps->ctb_height * s->sps->ctb_width || sh->num_entry_point_offsets < 0) {
1054 mpp_err("The number of entries %d is higher than the number of CTBs %d \n",
1055 sh->num_entry_point_offsets,
1056 s->sps->ctb_height * s->sps->ctb_width);
1057 return MPP_ERR_STREAM;
1058 }
1059 }
1060 }
1061 if (s->pps->slice_header_extension_present_flag) {
1062 //if slice_header_extension_present_flag is 1, we should cut the extension data.
1063 RK_U32 length = 0;
1064
1065 s->start_bit = gb->used_bits;
1066 READ_UE(gb, &length);
1067 for (i = 0; (RK_U32)i < length; i++) {
1068 SKIP_BITS(gb, 8); // slice_header_extension_data_byte
1069 }
1070 s->end_bit = gb->used_bits;
1071 }
1072
1073 // Inferred parameters
1074 sh->slice_qp = 26U + s->pps->pic_init_qp_minus26 + sh->slice_qp_delta;
1075 if (sh->slice_qp > 51 ||
1076 sh->slice_qp < -s->sps->qp_bd_offset) {
1077 mpp_err("The slice_qp %d is outside the valid range "
1078 "[%d, 51].\n",
1079 sh->slice_qp,
1080 -s->sps->qp_bd_offset);
1081 return MPP_ERR_STREAM;
1082 }
1083 if (s->h265dctx->compare_info != NULL && sh->first_slice_in_pic_flag) {
1084 CurrentFameInf_t *info = (CurrentFameInf_t *)s->h265dctx->compare_info;
1085 SliceHeader *openhevc_sh = (SliceHeader *)&info->sh;
1086 h265d_dbg(H265D_DBG_FUNCTION, "compare_sliceheader in");
1087 if (compare_sliceheader(openhevc_sh, &s->sh) < 0) {
1088 mpp_log("compare sliceHeader with openhevc diff\n");
1089 mpp_assert(0);
1090 }
1091 h265d_dbg(H265D_DBG_FUNCTION, "compare_sliceheader ok");
1092 }
1093
1094 sh->slice_ctb_addr_rs = sh->slice_segment_addr;
1095
1096 if (!s->sh.slice_ctb_addr_rs && s->sh.dependent_slice_segment_flag) {
1097 mpp_err("Impossible slice segment.\n");
1098 return MPP_ERR_STREAM;
1099 }
1100
1101 s->slice_initialized = 1;
1102
1103 return 0;
1104 __BITREAD_ERR:
1105 return MPP_ERR_STREAM;
1106 }
1107
1108 /**
1109 * @return AV MPP_ERR_STREAM if the packet is not a valid NAL unit,
1110 * 0 if the unit should be skipped, 1 otherwise
1111 */
hls_nal_unit(HEVCContext * s)1112 static RK_S32 hls_nal_unit(HEVCContext *s)
1113 {
1114 BitReadCtx_t*gb = &s->HEVClc->gb;
1115 RK_S32 value = 0;
1116
1117 READ_ONEBIT(gb, &value); /*this bit should be zero*/
1118
1119 READ_BITS(gb, 6, &s->nal_unit_type);
1120
1121 READ_BITS(gb, 6, &s->nuh_layer_id);
1122
1123 READ_BITS(gb, 3, &s->temporal_id);
1124
1125 s->temporal_id = s->temporal_id - 1;
1126
1127 h265d_dbg(H265D_DBG_GLOBAL,
1128 "nal_unit_type: %d, nuh_layer_id: %d temporal_id: %d\n",
1129 s->nal_unit_type, s->nuh_layer_id, s->temporal_id);
1130
1131 if (s->temporal_id < 0)
1132 return MPP_ERR_STREAM;
1133
1134 return (s->nuh_layer_id);
1135 __BITREAD_ERR:
1136 return MPP_ERR_STREAM;
1137 }
1138
mpp_hevc_out_dec_order(void * ctx)1139 static RK_S32 mpp_hevc_out_dec_order(void *ctx)
1140 {
1141 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
1142 HEVCContext *s = (HEVCContext *)h265dctx->priv_data;
1143
1144 if (s->ref && (s->ref->flags & HEVC_FRAME_FLAG_OUTPUT)) {
1145 s->ref->flags &= ~(HEVC_FRAME_FLAG_OUTPUT);
1146 mpp_buf_slot_set_flag(s->slots, s->ref->slot_index, SLOT_QUEUE_USE);
1147 mpp_buf_slot_enqueue(s->slots, s->ref->slot_index, QUEUE_DISPLAY);
1148 }
1149
1150 return 0;
1151 }
1152
mpp_hevc_output_frame(void * ctx,int flush)1153 static RK_S32 mpp_hevc_output_frame(void *ctx, int flush)
1154 {
1155
1156 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
1157 HEVCContext *s = (HEVCContext *)h265dctx->priv_data;
1158 MppDecCfgSet *cfg = h265dctx->cfg;
1159
1160 if (cfg->base.fast_out)
1161 return mpp_hevc_out_dec_order(ctx);
1162
1163 do {
1164 RK_S32 nb_output = 0;
1165 RK_S32 min_poc = INT_MAX;
1166 RK_S32 min_idx = 0;
1167 RK_U32 i;
1168
1169 for (i = 0; i < MPP_ARRAY_ELEMS(s->DPB); i++) {
1170 HEVCFrame *frame = &s->DPB[i];
1171 if ((frame->flags & HEVC_FRAME_FLAG_OUTPUT) &&
1172 frame->sequence == s->seq_output) {
1173 nb_output++;
1174 if (frame->poc < min_poc) {
1175 min_poc = frame->poc;
1176 min_idx = i;
1177 }
1178 }
1179 }
1180
1181 /* wait for more frames before output */
1182 if (!flush && s->seq_output == s->seq_decode && s->sps &&
1183 nb_output <= s->sps->temporal_layer[s->sps->max_sub_layers - 1].num_reorder_pics) {
1184 if (cfg->base.enable_fast_play && (IS_IDR(s) ||
1185 (IS_BLA(s) && !s->first_i_fast_play))) {
1186 s->first_i_fast_play = 1;
1187 } else {
1188 return 0;
1189 }
1190 }
1191
1192 if (nb_output) {
1193 HEVCFrame *frame = &s->DPB[min_idx];
1194
1195 frame->flags &= ~(HEVC_FRAME_FLAG_OUTPUT);
1196 s->output_frame_idx = min_idx;
1197
1198 mpp_buf_slot_set_flag(s->slots, frame->slot_index, SLOT_QUEUE_USE);
1199 mpp_buf_slot_enqueue(s->slots, frame->slot_index, QUEUE_DISPLAY);
1200
1201 h265d_dbg(H265D_DBG_REF,
1202 "Output frame with POC %d frame->slot_index = %d\n", frame->poc, frame->slot_index);
1203
1204
1205 return 1;
1206 }
1207
1208 if (s->seq_output != s->seq_decode)
1209 s->seq_output = (s->seq_output + 1) & 0xff;
1210 else
1211 break;
1212 } while (1);
1213
1214 return 0;
1215 }
1216
hevc_frame_start(HEVCContext * s)1217 static RK_S32 hevc_frame_start(HEVCContext *s)
1218 {
1219 int ret;
1220
1221 if (s->ref) {
1222 mpp_log_f("found two frame in one packet do nothing!\n");
1223 return 0;
1224 }
1225
1226 s->is_decoded = 0;
1227 s->first_nal_type = s->nal_unit_type;
1228 s->miss_ref_flag = 0;
1229
1230 ret = mpp_hevc_frame_rps(s);
1231 if (ret < 0) {
1232 mpp_err("Error constructing the frame RPS.\n");
1233 goto fail;
1234 }
1235
1236 ret = mpp_hevc_set_new_ref(s, &s->frame, s->poc);
1237 if (ret < 0)
1238 goto fail;
1239
1240 if (!s->h265dctx->cfg->base.disable_error && s->miss_ref_flag) {
1241 if (!IS_IRAP(s) && (!s->recovery.valid_flag ||
1242 (s->recovery.valid_flag && s->recovery.first_frm_valid &&
1243 s->recovery.first_frm_id != s->poc))) {
1244 mpp_frame_set_errinfo(s->frame, MPP_FRAME_ERR_UNKNOW);
1245 s->ref->error_flag = 1;
1246 } else {
1247 /*when found current I frame have miss refer
1248 may be stream have error so first set current frame
1249 no output and flush other frame output from dpb
1250 then set current frame can as output
1251 */
1252 HEVCFrame *frame = NULL;
1253 RK_U32 i = 0;
1254 for (i = 0; i < MPP_ARRAY_ELEMS(s->DPB); i++) {
1255 frame = &s->DPB[i];
1256 if (frame->poc == s->poc ) {
1257 frame->flags &= ~(HEVC_FRAME_FLAG_OUTPUT);
1258 break;
1259 } else {
1260 frame = NULL;
1261 }
1262 }
1263 do {
1264 ret = mpp_hevc_output_frame(s->h265dctx, 1);
1265 } while (ret);
1266 if (frame) {
1267 frame->flags |= HEVC_FRAME_FLAG_OUTPUT;
1268 }
1269 }
1270 }
1271
1272 mpp_buf_slot_set_prop(s->slots, s->ref->slot_index, SLOT_FRAME, s->ref->frame);
1273
1274 return 0;
1275
1276 fail:
1277 s->ref = NULL;
1278 return ret;
1279 }
1280
parser_nal_unit(HEVCContext * s,const RK_U8 * nal,int length)1281 static RK_S32 parser_nal_unit(HEVCContext *s, const RK_U8 *nal, int length)
1282 {
1283
1284 HEVCLocalContext *lc = s->HEVClc;
1285 BitReadCtx_t *gb = &lc->gb;
1286 RK_S32 ret;
1287 mpp_set_bitread_ctx(gb, (RK_U8*)nal, length);
1288 mpp_set_bitread_pseudo_code_type(gb, PSEUDO_CODE_H264_H265);
1289 ret = hls_nal_unit(s);
1290 if (ret < 0) {
1291 mpp_err("Invalid NAL unit %d, skipping.\n",
1292 s->nal_unit_type);
1293 goto fail;
1294 } else if (ret != (s->decoder_id) && s->nal_unit_type != NAL_VPS)
1295 return 0;
1296
1297 if (s->temporal_id > s->temporal_layer_id)
1298 return 0;
1299
1300 s->nuh_layer_id = ret;
1301 h265d_dbg(H265D_DBG_GLOBAL, "s->nal_unit_type = %d,len = %d \n", s->nal_unit_type, length);
1302
1303 if (s->deny_flag && (s->nal_unit_type != NAL_VPS && s->nal_unit_type != NAL_SPS)) {
1304 ret = MPP_ERR_STREAM;
1305 goto fail;
1306 }
1307
1308 switch (s->nal_unit_type) {
1309 case NAL_VPS:
1310 ret = mpp_hevc_decode_nal_vps(s);
1311 if (ret < 0 && !s->is_decoded) {
1312 mpp_err("mpp_hevc_decode_nal_vps error ret = %d", ret);
1313 goto fail;
1314 }
1315 break;
1316 case NAL_SPS:
1317 ret = mpp_hevc_decode_nal_sps(s);
1318 if (ret < 0 && !s->is_decoded) {
1319 mpp_err("mpp_hevc_decode_nal_sps error ret = %d", ret);
1320 goto fail;
1321 }
1322
1323 s->deny_flag = 0;
1324 break;
1325 case NAL_PPS:
1326 if (s->pre_pps_data == NULL) {
1327 s->pre_pps_data = mpp_calloc(RK_U8, length + 128);
1328 memcpy(s->pre_pps_data, nal, length);
1329 s->pps_len = length;
1330 s->pps_buf_size = length + 128;
1331 s->ps_need_upate = 1;
1332 } else if (s->pps_len == length) {
1333 if (memcmp(s->pre_pps_data, nal, length)) {
1334 s->ps_need_upate = 1;
1335 memcpy(s->pre_pps_data, nal, length);
1336 }
1337 } else {
1338 if (s->pps_buf_size < length) {
1339 MPP_FREE(s->pre_pps_data);
1340 s->pre_pps_data = mpp_calloc(RK_U8, length + 128);
1341 memcpy(s->pre_pps_data, nal, length);
1342 s->pps_buf_size = length + 128;
1343 s->pps_len = length;
1344 }
1345 s->ps_need_upate = 1;
1346 }
1347 ret = mpp_hevc_decode_nal_pps(s);
1348 if (ret < 0 && !s->is_decoded) {
1349 mpp_err("mpp_hevc_decode_nal_pps error ret = %d", ret);
1350 goto fail;
1351 }
1352 break;
1353 case NAL_SEI_PREFIX:
1354 case NAL_SEI_SUFFIX:
1355 ret = mpp_hevc_decode_nal_sei(s);
1356 if (ret < 0) {
1357 mpp_err("mpp_hevc_decode_nal_sei error ret = %d", ret);
1358 //goto fail;
1359 }
1360 break;
1361 case NAL_TRAIL_R:
1362 case NAL_TRAIL_N:
1363 case NAL_TSA_N:
1364 case NAL_TSA_R:
1365 case NAL_STSA_N:
1366 case NAL_STSA_R:
1367 case NAL_BLA_W_LP:
1368 case NAL_BLA_W_RADL:
1369 case NAL_BLA_N_LP:
1370 case NAL_IDR_W_RADL:
1371 case NAL_IDR_N_LP:
1372 case NAL_CRA_NUT:
1373 case NAL_RADL_N:
1374 case NAL_RADL_R:
1375 case NAL_RASL_N:
1376 case NAL_RASL_R:
1377 if (s->task == NULL) {
1378 s->extra_has_frame = 1;
1379 break;
1380 }
1381 h265d_dbg(H265D_DBG_FUNCTION, "hls_slice_header in");
1382 ret = hls_slice_header(s);
1383 h265d_dbg(H265D_DBG_FUNCTION, "hls_slice_header out");
1384
1385 if (ret < 0) {
1386 mpp_err("hls_slice_header error ret = %d", ret);
1387 /*s->first_nal_type == -1 means first nal is still not parsed.*/
1388 if ((s->first_nal_type != s->nal_unit_type) && (s->first_nal_type != NAL_INIT_VALUE))
1389 return 0;
1390
1391 return ret;
1392 }
1393
1394 if (s->recovery.valid_flag) {
1395 if (!s->recovery.first_frm_valid) {
1396 s->recovery.first_frm_id = s->poc;
1397 s->recovery.first_frm_valid = 1;
1398 s->recovery.recovery_pic_id = s->recovery.first_frm_id + s->recovery.recovery_frame_cnt;
1399 h265d_dbg(H265D_DBG_SEI, "First recovery frame found, poc %d", s->recovery.first_frm_id);
1400 } else {
1401 if (s->recovery.recovery_pic_id < s->poc)
1402 memset(&s->recovery, 0, sizeof(RecoveryPoint));
1403 }
1404 }
1405
1406 if (s->max_ra == INT_MAX) {
1407 if (s->nal_unit_type == NAL_CRA_NUT || IS_BLA(s) ||
1408 (s->recovery.valid_flag && s->recovery.first_frm_valid &&
1409 s->recovery.first_frm_id == s->poc)) {
1410 s->max_ra = s->poc;
1411 } else {
1412 if (IS_IDR(s))
1413 s->max_ra = INT_MIN;
1414 }
1415 }
1416
1417 if ((s->nal_unit_type == NAL_RASL_R || s->nal_unit_type == NAL_RASL_N) &&
1418 s->poc <= s->max_ra) {
1419 s->is_decoded = 0;
1420 break;
1421 } else if (!s->h265dctx->cfg->base.disable_error &&
1422 (s->poc < s->max_ra) && !IS_IRAP(s)) { //when seek to I slice skip the stream small then I slic poc
1423 s->is_decoded = 0;
1424 break;
1425 } else {
1426 if (s->nal_unit_type == NAL_RASL_R && s->poc > s->max_ra)
1427 s->max_ra = INT_MIN;
1428 }
1429
1430 if (s->sh.first_slice_in_pic_flag) {
1431 ret = hevc_frame_start(s);
1432 if (ret < 0) {
1433 mpp_err("hevc_frame_start = %d", ret);
1434 return ret;
1435 }
1436 } else if (!s->ref) {
1437 mpp_err("First slice in a frame missing.\n");
1438 goto fail;
1439 }
1440
1441 if (s->nal_unit_type != s->first_nal_type) {
1442 mpp_err("Non-matching NAL types of the VCL NALUs: %d %d\n",
1443 s->first_nal_type, s->nal_unit_type);
1444 goto fail;
1445 }
1446
1447 if (!s->sh.dependent_slice_segment_flag &&
1448 s->sh.slice_type != I_SLICE) {
1449 // ret = mpp_hevc_slice_rpl(s);
1450 if (ret < 0) {
1451 mpp_err("Error constructing the reference lists for the current slice.\n");
1452 goto fail;
1453 }
1454 // rk_get_ref_info(s);
1455 }
1456
1457
1458 s->is_decoded = 1;
1459
1460 break;
1461 case NAL_EOS_NUT:
1462 case NAL_EOB_NUT:
1463 s->seq_decode = (s->seq_decode + 1) & 0xff;
1464 s->max_ra = INT_MAX;
1465 break;
1466 case NAL_AUD:
1467 case NAL_FD_NUT:
1468 case NAL_UNSPEC62:
1469 break;
1470 default:
1471 mpp_log("Skipping NAL unit %d\n", s->nal_unit_type);
1472 }
1473
1474 return 0;
1475 fail:
1476
1477 return ret;
1478 }
1479
1480
1481 typedef union {
1482 RK_U32 u32;
1483 RK_U16 u16[2];
1484 RK_U8 u8 [4];
1485 float f32;
1486 } mpp_alias32;
1487
1488 #define MPP_FAST_UNALIGNED 1
1489
1490
1491 #ifndef MPP_RN32A
1492 #define MPP_RN32A(p) (((const mpp_alias32*)(p))->u32)
1493 #endif
mpp_hevc_extract_rbsp(HEVCContext * s,const RK_U8 * src,int length,HEVCNAL * nal)1494 RK_S32 mpp_hevc_extract_rbsp(HEVCContext *s, const RK_U8 *src, int length,
1495 HEVCNAL *nal)
1496 {
1497 RK_S32 i;
1498
1499 s->skipped_bytes = 0;
1500
1501 #define STARTCODE_TEST \
1502 if (i + 2 < length && src[i + 1] == 0 && src[i + 2] < 2) { \
1503 /* startcode, so we must be past the end */ \
1504 length = i; \
1505 break; \
1506 }
1507
1508 #if MPP_FAST_UNALIGNED
1509 #define FIND_FIRST_ZERO \
1510 if (i > 0 && !src[i]) \
1511 i--; \
1512 while (src[i]) \
1513 i++
1514
1515 for (i = 0; i + 1 < length; i += 5) {
1516 if (!((~MPP_RN32A(src + i) &
1517 (MPP_RN32A(src + i) - 0x01000101U)) &
1518 0x80008080U))
1519 continue;
1520
1521 FIND_FIRST_ZERO;
1522
1523 STARTCODE_TEST;
1524 i -= 3;
1525 }
1526 #else
1527 for (i = 0; i + 1 < length; i += 2) {
1528 if (src[i])
1529 continue;
1530 if (i > 0 && src[i - 1] == 0)
1531 i--;
1532 STARTCODE_TEST;
1533 }
1534 #endif
1535
1536 if (length + MPP_INPUT_BUFFER_PADDING_SIZE > nal->rbsp_buffer_size) {
1537 RK_S32 min_size = length + MPP_INPUT_BUFFER_PADDING_SIZE;
1538 mpp_free(nal->rbsp_buffer);
1539 nal->rbsp_buffer = NULL;
1540 min_size = MPP_MAX(17 * min_size / 16 + 32, min_size);
1541 nal->rbsp_buffer = mpp_malloc(RK_U8, min_size);
1542 if (nal->rbsp_buffer == NULL) {
1543 min_size = 0;
1544 }
1545 nal->rbsp_buffer_size = min_size;
1546 }
1547
1548 memcpy(nal->rbsp_buffer, src, length);
1549 nal->data = nal->rbsp_buffer;
1550 nal->size = length;
1551
1552 memset(nal->rbsp_buffer + length, 0, MPP_INPUT_BUFFER_PADDING_SIZE);
1553 return length;
1554 }
1555
split_nal_units(HEVCContext * s,RK_U8 * buf,RK_U32 length)1556 static RK_S32 split_nal_units(HEVCContext *s, RK_U8 *buf, RK_U32 length)
1557 {
1558 RK_S32 i, consumed;
1559 MPP_RET ret = MPP_OK;
1560 s->nb_nals = 0;
1561 while (length >= 4) {
1562 HEVCNAL *nal;
1563 RK_S32 extract_length = 0;
1564
1565 if (s->is_nalff) {
1566 for (i = 0; i < s->nal_length_size; i++)
1567 extract_length = (extract_length << 8) | buf[i];
1568 buf += s->nal_length_size;
1569 length -= s->nal_length_size;
1570
1571 if ((RK_U32)extract_length > length) {
1572 mpp_err( "Invalid NAL unit size.\n");
1573 ret = MPP_ERR_STREAM;
1574 goto fail;
1575 }
1576 } else {
1577 /* search start code */
1578 if (buf[2] == 0) {
1579 length--;
1580 buf++;
1581 continue;
1582 }
1583 if (buf[0] != 0 || buf[1] != 0 || buf[2] != 1) {
1584 RK_U32 state = (RK_U32) - 1;
1585 int has_nal = 0;
1586 for (i = 0; i < (RK_S32)length; i++) {
1587 state = (state << 8) | buf[i];
1588 if (((state >> 8) & 0xFFFFFF) == START_CODE) {
1589 has_nal = 1;
1590 i = i - 3;
1591 break;
1592 }
1593 }
1594
1595 if (has_nal) {
1596 length -= i;
1597 buf += i;
1598 continue;
1599 }
1600
1601 if (s->nb_nals) {
1602 return MPP_OK;
1603 } else {
1604 mpp_err( "No start code is found.\n");
1605 ret = MPP_ERR_STREAM;
1606 goto fail;
1607 }
1608 }
1609
1610 buf += 3;
1611 length -= 3;
1612 }
1613
1614 if (!s->is_nalff)
1615 extract_length = length;
1616
1617 if (!extract_length) {
1618 return MPP_OK;
1619 }
1620 if (s->nals_allocated < 1) {
1621 RK_S32 new_size = s->nals_allocated + 10;
1622 HEVCNAL *tmp = mpp_malloc(HEVCNAL, new_size);
1623 memset((void*)tmp, 0, new_size * sizeof(HEVCNAL));
1624 s->nals_allocated = new_size;
1625 s->nals = tmp;
1626 }
1627 if (s->nals_allocated < s->nb_nals + 1) {
1628 int new_size = s->nals_allocated + 10;
1629 HEVCNAL *tmp = mpp_malloc(HEVCNAL, new_size);
1630 memset((void*)tmp, 0, new_size * sizeof(HEVCNAL));
1631 if (!tmp) {
1632 mpp_err("return enomm new_size %d", new_size);
1633 ret = MPP_ERR_NOMEM;
1634 goto fail;
1635 }
1636 memcpy((void*)tmp, (void*)s->nals, (new_size - 10)*sizeof(HEVCNAL));
1637 mpp_free(s->nals);
1638 s->nals = NULL;
1639 s->nals = tmp;
1640 memset(s->nals + s->nals_allocated, 0,
1641 (new_size - s->nals_allocated) * sizeof(*tmp));
1642 s->nals_allocated = new_size;
1643 }
1644 nal = &s->nals[s->nb_nals];
1645
1646 consumed = mpp_hevc_extract_rbsp(s, buf, extract_length, nal);
1647
1648 if (consumed <= 0) {
1649 ret = MPP_ERR_STREAM;
1650 goto fail;
1651 }
1652
1653 s->nb_nals++;
1654
1655 mpp_set_bitread_ctx(&s->HEVClc->gb, (RK_U8 *)nal->data, nal->size);
1656 mpp_set_bitread_pseudo_code_type(&s->HEVClc->gb, PSEUDO_CODE_H264_H265);
1657 if (hls_nal_unit(s) < 0)
1658 s->nb_nals--;
1659
1660 if (s->nal_unit_type < NAL_VPS) {
1661
1662 if (nal->size != consumed)
1663 h265d_dbg(H265D_DBG_GLOBAL, "tag_stream: nal.size=%d, consumed=%d\n", nal->size, consumed);
1664
1665 }
1666
1667 /* if (s->nal_unit_type == NAL_EOB_NUT ||
1668 s->nal_unit_type == NAL_EOS_NUT)
1669 s->eos = 1;*/
1670
1671 buf += consumed;
1672 length -= consumed;
1673 }
1674 fail:
1675
1676 return (s->nb_nals) ? MPP_OK : ret;
1677 }
1678
mpp_hevc_fill_dynamic_meta(HEVCContext * s,const RK_U8 * data,RK_U32 size,RK_U32 hdr_fmt)1679 void mpp_hevc_fill_dynamic_meta(HEVCContext *s, const RK_U8 *data, RK_U32 size, RK_U32 hdr_fmt)
1680 {
1681 MppFrameHdrDynamicMeta *hdr_dynamic_meta = s->hdr_dynamic_meta;
1682
1683 if (hdr_dynamic_meta && (hdr_dynamic_meta->size < size)) {
1684 mpp_free(hdr_dynamic_meta);
1685 hdr_dynamic_meta = NULL;
1686 }
1687
1688 if (!hdr_dynamic_meta) {
1689 hdr_dynamic_meta = mpp_calloc_size(MppFrameHdrDynamicMeta,
1690 sizeof(MppFrameHdrDynamicMeta) + size);
1691 if (!hdr_dynamic_meta) {
1692 mpp_err_f("malloc hdr dynamic data failed!\n");
1693 return;
1694 }
1695 }
1696 if (size && data) {
1697 if (hdr_fmt == DOLBY) {
1698 RK_U8 start_code[4] = {0, 0, 0, 1};
1699
1700 memcpy((RK_U8*)hdr_dynamic_meta->data, start_code, 4);
1701 memcpy((RK_U8*)hdr_dynamic_meta->data + 4, (RK_U8*)data, size - 4);
1702 } else
1703 memcpy((RK_U8*)hdr_dynamic_meta->data, (RK_U8*)data, size);
1704 hdr_dynamic_meta->size = size;
1705 hdr_dynamic_meta->hdr_fmt = hdr_fmt;
1706 }
1707 s->hdr_dynamic_meta = hdr_dynamic_meta;
1708 s->hdr_dynamic = 1;
1709 s->is_hdr = 1;
1710 }
1711
check_rpus(HEVCContext * s)1712 static RK_S32 check_rpus(HEVCContext *s)
1713 {
1714 HEVCNAL *nal;
1715
1716 if (s->nb_nals <= 1)
1717 return 0;
1718
1719 nal = &s->nals[s->nb_nals - 1];
1720
1721 if (nal->size > 2) {
1722 BitReadCtx_t gb;
1723 RK_S32 value, nal_unit_type, nuh_layer_id, temporal_id;
1724
1725 mpp_set_bitread_ctx((&gb), (RK_U8*)nal->data, nal->size);
1726 mpp_set_bitread_pseudo_code_type((&gb), PSEUDO_CODE_H264_H265);
1727
1728 READ_ONEBIT((&gb), &value); /*this bit should be zero*/
1729 READ_BITS((&gb), 6, &nal_unit_type);
1730 READ_BITS((&gb), 6, &nuh_layer_id);
1731 READ_BITS((&gb), 3, &temporal_id);
1732
1733 /*
1734 * Check for RPU delimiter.
1735 *
1736 * Dolby Vision RPUs masquerade as unregistered NALs of type 62.
1737 *
1738 * We have to do this check here an create the rpu buffer, since RPUs are appended
1739 * to the end of an AU; they are the last non-EOB/EOS NAL in the AU.
1740 */
1741 if (nal_unit_type == NAL_UNSPEC62)
1742 mpp_hevc_fill_dynamic_meta(s, nal->data + 2, gb.bytes_left_ + 4, DOLBY);
1743 }
1744 return 0;
1745 __BITREAD_ERR:
1746 return MPP_ERR_STREAM;
1747 }
1748
parser_nal_units(HEVCContext * s)1749 static RK_S32 parser_nal_units(HEVCContext *s)
1750 {
1751 /* parse the NAL units */
1752 RK_S32 i, ret = 0, slice_cnt = 0;
1753
1754 check_rpus(s);
1755
1756 for (i = 0; i < s->nb_nals; i++) {
1757 ret = parser_nal_unit(s, s->nals[i].data, s->nals[i].size);
1758 if (ret < 0) {
1759 mpp_err("Error parsing NAL unit #%d,error ret = 0xd.\n", i, ret);
1760 goto fail;
1761 }
1762 /* update slice data if slice_header_extension_present_flag is 1*/
1763 if (s->nal_unit_type < 32) {
1764 switch (s->nal_unit_type) {
1765 case NAL_TRAIL_R:
1766 case NAL_TRAIL_N:
1767 case NAL_TSA_N:
1768 case NAL_TSA_R:
1769 case NAL_STSA_N:
1770 case NAL_STSA_R:
1771 case NAL_BLA_W_LP:
1772 case NAL_BLA_W_RADL:
1773 case NAL_BLA_N_LP:
1774 case NAL_IDR_W_RADL:
1775 case NAL_IDR_N_LP:
1776 case NAL_CRA_NUT:
1777 case NAL_RADL_N:
1778 case NAL_RADL_R:
1779 case NAL_RASL_N:
1780 case NAL_RASL_R:
1781 if (s->pps && s->pps->slice_header_extension_present_flag) {
1782 h265d_dxva2_picture_context_t *temp = (h265d_dxva2_picture_context_t *)s->hal_pic_private;
1783 temp->slice_cut_param[slice_cnt].start_bit = s->start_bit;
1784 temp->slice_cut_param[slice_cnt].end_bit = s->end_bit;
1785 temp->slice_cut_param[slice_cnt].is_enable = 1;
1786 break;
1787 }
1788 default: break;
1789 }
1790 slice_cnt++;
1791 }
1792 }
1793 fail:
1794 return ret;
1795 }
1796
U16_AT(const RK_U8 * ptr)1797 static RK_U16 U16_AT(const RK_U8 *ptr)
1798 {
1799 return ptr[0] << 8 | ptr[1];
1800 }
1801
hevc_parser_extradata(HEVCContext * s)1802 static RK_S32 hevc_parser_extradata(HEVCContext *s)
1803 {
1804 H265dContext_t *h265dctx = s->h265dctx;
1805 RK_S32 ret = MPP_SUCCESS;
1806 if (h265dctx->extradata_size > 3 &&
1807 (h265dctx->extradata[0] || h265dctx->extradata[1] ||
1808 h265dctx->extradata[2] > 1)) {
1809 /* It seems the extradata is encoded as hvcC format.
1810 * Temporarily, we support configurationVersion==0 until 14496-15 3rd
1811 * is finalized. When finalized, configurationVersion will be 1 and we
1812 * can recognize hvcC by checking if h265dctx->extradata[0]==1 or not. */
1813 const RK_U8 *ptr = (const RK_U8 *)h265dctx->extradata;
1814 RK_U32 size = h265dctx->extradata_size;
1815 RK_U32 numofArrays = 0, numofNals = 0;
1816 RK_U32 j = 0, i = 0;
1817 if (size < 7) {
1818 return MPP_NOK;
1819 }
1820
1821 mpp_log("extradata is encoded as hvcC format");
1822 s->is_nalff = 1;
1823 s->nal_length_size = 1 + (ptr[14 + 7] & 3);
1824 ptr += 22;
1825 size -= 22;
1826 numofArrays = (char)ptr[0];
1827 ptr += 1;
1828 size -= 1;
1829 for (i = 0; i < numofArrays; i++) {
1830 ptr += 1;
1831 size -= 1;
1832 // Num of nals
1833 numofNals = U16_AT(ptr);
1834 ptr += 2;
1835 size -= 2;
1836
1837 for (j = 0; j < numofNals; j++) {
1838 RK_U32 length = 0;
1839 if (size < 2) {
1840 return MPP_NOK;
1841 }
1842
1843 length = U16_AT(ptr);
1844
1845 ptr += 2;
1846 size -= 2;
1847 if (size < length) {
1848 return MPP_NOK;
1849 }
1850 parser_nal_unit(s, ptr, length);
1851 ptr += length;
1852 size -= length;
1853 }
1854 }
1855 } else {
1856 s->is_nalff = 0;
1857 ret = split_nal_units(s, h265dctx->extradata, h265dctx->extradata_size);
1858 if (ret < 0)
1859 return ret;
1860 ret = parser_nal_units(s);
1861 if (ret < 0)
1862 return ret;
1863 }
1864 return ret;
1865 }
1866
h265d_prepare(void * ctx,MppPacket pkt,HalDecTask * task)1867 MPP_RET h265d_prepare(void *ctx, MppPacket pkt, HalDecTask *task)
1868 {
1869
1870 MPP_RET ret = MPP_OK;
1871 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
1872 HEVCContext *s = (HEVCContext *)h265dctx->priv_data;
1873 SplitContext_t *sc = (SplitContext_t*)h265dctx->split_cxt;
1874 RK_S64 pts = -1, dts = -1;
1875 RK_U8 *buf = NULL;
1876 void *pos = NULL;
1877 RK_S32 length = 0;
1878
1879 task->valid = 0;
1880 s->eos = mpp_packet_get_eos(pkt);
1881
1882 if (sc != NULL) {
1883 sc->eos = s->eos;
1884 } else if (h265dctx->cfg->base.split_parse) {
1885 h265d_split_init((void**)&sc);
1886 if (sc == NULL) {
1887 mpp_err("split contxt malloc fail");
1888 return MPP_ERR_NOMEM;
1889 }
1890 h265dctx->split_cxt = sc;
1891 }
1892
1893 buf = (RK_U8 *)mpp_packet_get_pos(pkt);
1894 pts = mpp_packet_get_pts(pkt);
1895 dts = mpp_packet_get_dts(pkt);
1896 h265d_dbg(H265D_DBG_TIME, "prepare get pts %lld", pts);
1897 length = (RK_S32)mpp_packet_get_length(pkt);
1898
1899 if (mpp_packet_get_flag(pkt) & MPP_PACKET_FLAG_EXTRA_DATA) {
1900
1901 h265dctx->extradata_size = length;
1902 h265dctx->extradata = buf;
1903 s->extra_has_frame = 0;
1904 s->task = NULL;
1905 hevc_parser_extradata(s);
1906 if (!s->extra_has_frame) {
1907 pos = buf + length;
1908 mpp_packet_set_pos(pkt, pos);
1909 return MPP_OK;
1910 }
1911 }
1912
1913 if (h265dctx->cfg->base.split_parse && !s->is_nalff) {
1914 RK_S32 consume = 0;
1915 RK_U8 *split_out_buf = NULL;
1916 RK_S32 split_size = 0;
1917
1918 consume = h265d_split_frame(h265dctx->split_cxt, (const RK_U8**)&split_out_buf, &split_size,
1919 (const RK_U8*)buf, length, pts, dts);
1920 pos = buf + consume;
1921 mpp_packet_set_pos(pkt, pos);
1922 if (split_size) {
1923 buf = split_out_buf;
1924 length = split_size;
1925 s->checksum_buf = buf; //check with openhevc
1926 s->checksum_buf_size = split_size;
1927 h265d_dbg(H265D_DBG_TIME, "split frame get pts %lld", sc->pts);
1928 s->pts = sc->pts;
1929 s->eos = (s->eos && (mpp_packet_get_length(pkt) < 4)) ? 1 : 0;
1930 } else {
1931 return MPP_FAIL_SPLIT_FRAME;
1932 }
1933 } else {
1934 pos = buf + length;
1935 s->pts = pts;
1936 mpp_packet_set_pos(pkt, pos);
1937 if (s->eos && !length) {
1938 task->valid = 0;
1939 task->flags.eos = 1;
1940 h265d_flush(ctx);
1941 return ret;
1942 }
1943 }
1944 #ifdef dump
1945 if (s->nb_frame < 10 && fp != NULL) {
1946 fwrite(buf, 1, length, fp);
1947 }
1948 #endif
1949 ret = (MPP_RET)split_nal_units(s, buf, length);
1950
1951 if (MPP_OK == ret) {
1952 if (MPP_OK == h265d_syntax_fill_slice(s->h265dctx, task->input)) {
1953 task->valid = 1;
1954 task->input_packet = s->input_packet;
1955 }
1956 }
1957 return ret;
1958
1959 }
1960
h265d_get_stream(void * ctx,RK_U8 ** buf,RK_S32 * size)1961 MPP_RET h265d_get_stream(void *ctx, RK_U8 **buf, RK_S32 *size)
1962 {
1963 MPP_RET ret = MPP_OK;
1964 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
1965 HEVCContext *s = h265dctx->priv_data;
1966 *buf = s->checksum_buf;
1967 *size = s->checksum_buf_size;
1968 return ret;
1969 }
1970
h265d_set_compare_info(void * ctx,void * info)1971 MPP_RET h265d_set_compare_info(void *ctx, void *info)
1972 {
1973 MPP_RET ret = MPP_OK;
1974 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
1975 h265dctx->compare_info = info;
1976 return ret;
1977 }
1978
1979
h265d_parse(void * ctx,HalDecTask * task)1980 MPP_RET h265d_parse(void *ctx, HalDecTask *task)
1981 {
1982 MPP_RET ret;
1983 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
1984 HEVCContext *s = h265dctx->priv_data;
1985
1986 task->valid = 0;
1987 s->got_frame = 0;
1988 s->task = task;
1989 s->ref = NULL;
1990 ret = parser_nal_units(s);
1991 if (ret < 0) {
1992 if (ret == MPP_ERR_STREAM) {
1993 mpp_log("current stream is no right skip it %p\n", s->ref);
1994 ret = 0;
1995 }
1996 // return ret;
1997 task->flags.parse_err = 1;
1998 }
1999 h265d_dbg(H265D_DBG_GLOBAL, "decode poc = %d", s->poc);
2000 if (s->ref) {
2001 if (!task->flags.parse_err)
2002 h265d_parser2_syntax(h265dctx);
2003
2004 s->task->syntax.data = s->hal_pic_private;
2005 s->task->syntax.number = 1;
2006 s->task->valid = 1;
2007 }
2008 if (s->eos) {
2009 h265d_flush(ctx);
2010 s->task->flags.eos = 1;
2011 }
2012 s->nb_frame++;
2013 if (s->is_decoded) {
2014 h265d_dbg(H265D_DBG_GLOBAL, "Decoded frame with POC %d.\n", s->poc);
2015 s->is_decoded = 0;
2016 }
2017 mpp_hevc_output_frame(ctx, 0);
2018 return MPP_OK;
2019 }
2020
h265d_deinit(void * ctx)2021 MPP_RET h265d_deinit(void *ctx)
2022 {
2023 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
2024 HEVCContext *s = h265dctx->priv_data;
2025 SplitContext_t *sc = h265dctx->split_cxt;
2026 RK_U8 *buf = NULL;
2027 int i;
2028
2029 for (i = 0; i < MAX_DPB_SIZE; i++) {
2030 mpp_hevc_unref_frame(s, &s->DPB[i], ~0);
2031 mpp_frame_deinit(&s->DPB[i].frame);
2032 }
2033
2034 for (i = 0; i < MAX_VPS_COUNT; i++)
2035 mpp_free(s->vps_list[i]);
2036 for (i = 0; i < MAX_SPS_COUNT; i++) {
2037 if (s->sps_list[i])
2038 mpp_mem_pool_put(s->sps_pool, s->sps_list[i]);
2039 }
2040 for (i = 0; i < MAX_PPS_COUNT; i++)
2041 mpp_hevc_pps_free(s->pps_list[i]);
2042
2043 mpp_free(s->HEVClc);
2044
2045 s->HEVClc = NULL;
2046
2047 for (i = 0; i < s->nals_allocated; i++)
2048 mpp_free(s->nals[i].rbsp_buffer);
2049
2050 if (s->nals) {
2051 mpp_free(s->nals);
2052 }
2053
2054 MPP_FREE(s->pre_pps_data);
2055
2056 s->nals_allocated = 0;
2057
2058 if (s->hal_pic_private) {
2059 h265d_dxva2_picture_context_t *ctx_pic = (h265d_dxva2_picture_context_t *)s->hal_pic_private;
2060 MPP_FREE(ctx_pic->slice_short);
2061 MPP_FREE(ctx_pic->slice_cut_param);
2062 mpp_free(s->hal_pic_private);
2063 }
2064 if (s->input_packet) {
2065 buf = mpp_packet_get_data(s->input_packet);
2066 mpp_free(buf);
2067 mpp_packet_deinit(&s->input_packet);
2068 }
2069
2070 if (s->sps_pool)
2071 mpp_mem_pool_deinit(s->sps_pool);
2072
2073 MPP_FREE((s->hdr_dynamic_meta));
2074
2075 if (s) {
2076 mpp_free(s);
2077 }
2078
2079 if (sc) {
2080 h265d_split_deinit(sc);
2081 }
2082 return 0;
2083 }
2084
hevc_init_context(H265dContext_t * h265dctx)2085 static RK_S32 hevc_init_context(H265dContext_t *h265dctx)
2086 {
2087 HEVCContext *s = h265dctx->priv_data;
2088 RK_U32 i;
2089
2090 s->h265dctx = h265dctx;
2091
2092 s->HEVClc = (HEVCLocalContext*)mpp_calloc(HEVCLocalContext, 1);
2093 if (!s->HEVClc)
2094 goto fail;
2095
2096 for (i = 0; i < MPP_ARRAY_ELEMS(s->DPB); i++) {
2097 s->DPB[i].slot_index = 0xff;
2098 s->DPB[i].poc = INT_MAX;
2099 s->DPB[i].error_flag = 0;
2100 mpp_frame_init(&s->DPB[i].frame);
2101 if (!s->DPB[i].frame)
2102 goto fail;
2103 }
2104
2105 s->max_ra = INT_MAX;
2106
2107
2108 s->temporal_layer_id = 8;
2109 s->context_initialized = 1;
2110
2111 return 0;
2112
2113 fail:
2114 h265d_deinit(h265dctx);
2115 return MPP_ERR_NOMEM;
2116 }
2117
2118
h265d_init(void * ctx,ParserCfg * parser_cfg)2119 MPP_RET h265d_init(void *ctx, ParserCfg *parser_cfg)
2120 {
2121
2122 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
2123 HEVCContext *s = (HEVCContext *)h265dctx->priv_data;
2124 SplitContext_t *sc = (SplitContext_t*)h265dctx->split_cxt;
2125 RK_S32 ret;
2126 RK_U8 *buf = NULL;
2127 RK_S32 size = SZ_512K;
2128 if (s == NULL) {
2129 s = (HEVCContext*)mpp_calloc(HEVCContext, 1);
2130 if (s == NULL) {
2131 mpp_err("hevc contxt malloc fail");
2132 return MPP_ERR_NOMEM;
2133 }
2134 h265dctx->priv_data = s;
2135 }
2136
2137 s->first_nal_type = NAL_INIT_VALUE;
2138 h265dctx->cfg = parser_cfg->cfg;
2139
2140 if (sc == NULL && h265dctx->cfg->base.split_parse) {
2141 h265d_split_init((void**)&sc);
2142 if (sc == NULL) {
2143 mpp_err("split contxt malloc fail");
2144 return MPP_ERR_NOMEM;
2145 }
2146 h265dctx->split_cxt = sc;
2147 }
2148
2149 // mpp_env_set_u32("h265d_debug", H265D_DBG_REF);
2150 mpp_env_get_u32("h265d_debug", &h265d_debug, 0);
2151
2152 ret = hevc_init_context(h265dctx);
2153
2154 s->hal_pic_private = mpp_calloc_size(void, sizeof(h265d_dxva2_picture_context_t));
2155
2156 if (s->hal_pic_private) {
2157 h265d_dxva2_picture_context_t *ctx_pic = (h265d_dxva2_picture_context_t *)s->hal_pic_private;
2158 ctx_pic->slice_short = (DXVA_Slice_HEVC_Short *)mpp_malloc(DXVA_Slice_HEVC_Short, MAX_SLICES);
2159
2160 if (!ctx_pic->slice_short)
2161 return MPP_ERR_NOMEM;
2162
2163 ctx_pic->slice_cut_param = (DXVA_Slice_HEVC_Cut_Param *)mpp_malloc(DXVA_Slice_HEVC_Cut_Param, MAX_SLICES);
2164 if (!ctx_pic->slice_cut_param)
2165 return MPP_ERR_NOMEM;
2166 ctx_pic->max_slice_num = MAX_SLICES;
2167 } else {
2168 return MPP_ERR_NOMEM;
2169 }
2170
2171 if (ret < 0)
2172 return ret;
2173
2174 s->picture_struct = 0;
2175
2176 s->slots = parser_cfg->frame_slots;
2177
2178 s->packet_slots = parser_cfg->packet_slots;
2179
2180 if (h265dctx->extradata_size > 0 && h265dctx->extradata) {
2181 ret = hevc_parser_extradata(s);
2182 if (ret < 0) {
2183 h265d_deinit(h265dctx);
2184 return ret;
2185 }
2186 }
2187
2188 buf = mpp_malloc(RK_U8, size);
2189
2190 if (buf == NULL) {
2191 return MPP_ERR_NOMEM;
2192 }
2193
2194 if (MPP_OK != mpp_packet_init(&s->input_packet, (void*)buf, size)) {
2195 return MPP_ERR_NOMEM;
2196 }
2197 mpp_buf_slot_setup(s->slots, 25);
2198
2199 s->h265dctx->hw_info = parser_cfg->hw_info;
2200
2201 s->pre_pps_id = -1;
2202
2203 s->sps_pool = mpp_mem_pool_init(sizeof(HEVCSPS));
2204
2205 #ifdef dump
2206 fp = fopen("/data/dump1.bin", "wb+");
2207 #endif
2208 return 0;
2209 }
2210
h265d_flush(void * ctx)2211 MPP_RET h265d_flush(void *ctx)
2212 {
2213 RK_S32 ret = 0;
2214 do {
2215 ret = mpp_hevc_output_frame(ctx, 1);
2216 } while (ret);
2217 return MPP_OK;
2218 }
2219
h265d_reset(void * ctx)2220 MPP_RET h265d_reset(void *ctx)
2221 {
2222 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
2223 HEVCContext *s = (HEVCContext *)h265dctx->priv_data;
2224 RK_S32 ret = 0;
2225 do {
2226 ret = mpp_hevc_output_frame(ctx, 1);
2227 } while (ret);
2228 mpp_hevc_flush_dpb(s);
2229 h265d_split_reset(h265dctx->split_cxt);
2230 s->max_ra = INT_MAX;
2231 s->eos = 0;
2232 s->first_i_fast_play = 0;
2233 return MPP_OK;
2234 }
2235
h265d_control(void * ctx,MpiCmd cmd,void * param)2236 MPP_RET h265d_control(void *ctx, MpiCmd cmd, void *param)
2237 {
2238 (void) ctx;
2239 (void) cmd;
2240 (void) param;
2241 return MPP_OK;
2242 }
2243
h265d_callback(void * ctx,void * err_info)2244 MPP_RET h265d_callback(void *ctx, void *err_info)
2245 {
2246 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
2247 HalDecTask *task_dec = (HalDecTask *)err_info;
2248 HEVCContext *s = (HEVCContext *)h265dctx->priv_data;
2249
2250 if (!h265dctx->cfg->base.disable_error) {
2251 MppFrame frame = NULL;
2252 RK_U32 i = 0;
2253
2254 if (s->first_nal_type >= 16 && s->first_nal_type <= 23) {
2255 mpp_log("IS_IRAP frame found error");
2256 s->max_ra = INT_MAX;
2257 }
2258 // s->miss_ref_flag = 1;
2259 mpp_buf_slot_get_prop(s->slots, task_dec->output, SLOT_FRAME_PTR, &frame);
2260 mpp_frame_set_errinfo(frame, MPP_FRAME_ERR_UNKNOW);
2261 for (i = 0; i < MPP_ARRAY_ELEMS(s->DPB); i++) {
2262 if (s->DPB[i].slot_index == task_dec->output) {
2263 s->DPB[i].error_flag = 1;
2264 }
2265 }
2266 }
2267
2268 if (!task_dec->flags.parse_err)
2269 s->ps_need_upate = 0;
2270
2271 (void) err_info;
2272
2273 return MPP_OK;
2274 }
2275
2276 const ParserApi api_h265d_parser = {
2277 .name = "h265d_parse",
2278 .coding = MPP_VIDEO_CodingHEVC,
2279 .ctx_size = sizeof(H265dContext_t),
2280 .flag = 0,
2281 .init = h265d_init,
2282 .deinit = h265d_deinit,
2283 .prepare = h265d_prepare,
2284 .parse = h265d_parse,
2285 .reset = h265d_reset,
2286 .flush = h265d_flush,
2287 .control = h265d_control,
2288 .callback = h265d_callback,
2289 };
2290
2291