1 /*
2 *
3 * Copyright 2015 Rockchip Electronics Co. LTD
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18 /*
19 * @file h265d_parser.c
20 * @brief
21 * @author csy(csy@rock-chips.com)
22
23 * @version 1.0.0
24 * @history
25 * 2015.7.15 : Create
26 */
27
28 #define MODULE_TAG "H265D_PARSER"
29
30 #include "mpp_env.h"
31 #include "mpp_mem.h"
32 #include "mpp_bitread.h"
33 #include "mpp_packet_impl.h"
34 #include "rk_hdr_meta_com.h"
35
36 #include "h265d_parser.h"
37 #include "h265d_syntax.h"
38 #include "h265d_api.h"
39 #include "h2645d_sei.h"
40
41 #define START_CODE 0x000001 ///< start_code_prefix_one_3bytes
42
43 RK_U32 h265d_debug;
44 #ifdef dump
45 FILE *fp = NULL;
46 #endif
47 //static RK_U32 start_write = 0, value = 0;
48
49 // for hevc max ctu size 64
rkv_ctu_64_align(RK_U32 val)50 static RK_U32 rkv_ctu_64_align(RK_U32 val)
51 {
52 return MPP_ALIGN(val, 64);
53 }
54
55 /**
56 * Find the end of the current frame in the bitstream.
57 * @return the position of the first byte of the next frame, or END_NOT_FOUND
58 */
hevc_find_frame_end(SplitContext_t * sc,const RK_U8 * buf,int buf_size)59 static RK_S32 hevc_find_frame_end(SplitContext_t *sc, const RK_U8 *buf,
60 int buf_size)
61 {
62 RK_S32 i;
63
64 for (i = 0; i < buf_size; i++) {
65 int nut, layer_id;
66
67 sc->state64 = (sc->state64 << 8) | buf[i];
68
69 if (((sc->state64 >> 3 * 8) & 0xFFFFFF) != START_CODE)
70 continue;
71 nut = (sc->state64 >> (2 * 8 + 1)) & 0x3F;
72 layer_id = (((sc->state64 >> 2 * 8) & 0x01) << 5) + (((sc->state64 >> 1 * 8) & 0xF8) >> 3);
73 //mpp_log("nut = %d layer_id = %d\n",nut,layer_id);
74 // Beginning of access unit
75 if ((nut >= NAL_VPS && nut <= NAL_AUD) || nut == NAL_SEI_PREFIX ||
76 (nut >= 41 && nut <= 44) || (nut >= 48 && nut <= 55)) {
77 if (sc->frame_start_found && !layer_id) {
78 sc->frame_start_found = 0;
79 return i - 5;
80 }
81 } else if (nut <= NAL_RASL_R ||
82 (nut >= NAL_BLA_W_LP && nut <= NAL_CRA_NUT)) {
83 int first_slice_segment_in_pic_flag = buf[i] >> 7;
84 //mpp_log("nut = %d first_slice_segment_in_pic_flag %d layer_id = %d \n",nut,
85 // first_slice_segment_in_pic_flag,
86 // layer_id);
87 if (first_slice_segment_in_pic_flag && !layer_id) {
88 if (!sc->frame_start_found) {
89 sc->frame_start_found = 1;
90 } else { // First slice of next frame found
91 sc->frame_start_found = 0;
92 return i - 5;
93 }
94 }
95 }
96 }
97 return END_NOT_FOUND;
98 }
99
mpp_combine_frame(SplitContext_t * sc,RK_S32 next,const RK_U8 ** buf,RK_S32 * buf_size)100 static RK_S32 mpp_combine_frame(SplitContext_t *sc, RK_S32 next, const RK_U8 **buf, RK_S32 *buf_size)
101 {
102 if (sc->overread) {
103 mpp_log("overread %d, state:%X next:%d index:%d o_index:%d\n",
104 sc->overread, sc->state, next, sc->index, sc->overread_index);
105 mpp_log("%X %X %X %X\n", (*buf)[0], (*buf)[1], (*buf)[2], (*buf)[3]);
106 }
107
108 /* Copy overread bytes from last frame into buffer. */
109 for (; sc->overread > 0; sc->overread--) {
110 sc->buffer[sc->index++] = sc->buffer[sc->overread_index++];
111 }
112
113 /* flush remaining if EOF */
114 if (!*buf_size && next == END_NOT_FOUND) {
115 next = 0;
116 }
117
118 sc->last_index = sc->index;
119
120 /* copy into buffer end return */
121 if (next == END_NOT_FOUND) {
122 RK_U32 min_size = (*buf_size) + sc->index + MPP_INPUT_BUFFER_PADDING_SIZE;
123 void* new_buffer;
124 if (min_size > sc->buffer_size) {
125 min_size = MPP_MAX(17 * min_size / 16 + 32, min_size);
126 new_buffer = mpp_realloc(sc->buffer, RK_U8, min_size);
127 if (!new_buffer) {
128 sc->buffer_size = 0;
129 return MPP_ERR_NOMEM;
130 }
131 sc->buffer_size = min_size;
132 sc->buffer = new_buffer;
133 }
134
135 memcpy(&sc->buffer[sc->index], *buf, *buf_size);
136 sc->index += *buf_size;
137
138 return -1;
139 }
140
141 *buf_size =
142 sc->overread_index = sc->index + next;
143
144 /* append to buffer */
145 if (sc->index) {
146 RK_U32 min_size = next + sc->index + MPP_INPUT_BUFFER_PADDING_SIZE;
147 void* new_buffer;
148 if (min_size > sc->buffer_size) {
149 min_size = MPP_MAX(17 * min_size / 16 + 32, min_size);
150 new_buffer = mpp_realloc(sc->buffer, RK_U8, min_size);
151 if (!new_buffer) {
152 sc->buffer_size = 0;
153 return MPP_ERR_NOMEM;
154 }
155 sc->buffer_size = min_size;
156 sc->buffer = new_buffer;
157 }
158
159 if (next > -MPP_INPUT_BUFFER_PADDING_SIZE)
160 memcpy(&sc->buffer[sc->index], *buf,
161 next + MPP_INPUT_BUFFER_PADDING_SIZE);
162 sc->index = 0;
163 *buf = sc->buffer;
164 }
165
166 /* store overread bytes */
167 for (; next < 0; next++) {
168 sc->state = (sc->state << 8) | sc->buffer[sc->last_index + next];
169 sc->state64 = (sc->state64 << 8) | sc->buffer[sc->last_index + next];
170 sc->overread++;
171 }
172
173 if (sc->overread) {
174 mpp_log("overread %d, state:%X next:%d index:%d o_index:%d\n",
175 sc->overread, sc->state, next, sc->index, sc->overread_index);
176 mpp_log("%X %X %X %X\n", (*buf)[0], (*buf)[1], (*buf)[2], (*buf)[3]);
177 }
178
179 return 0;
180 }
181
h265d_split_init(void ** sc)182 static RK_S32 h265d_split_init(void **sc)
183 {
184 SplitContext_t *s = NULL;
185 if (s == NULL) {
186 s = mpp_calloc(SplitContext_t, 1);
187 if (s != NULL) {
188 *sc = s;
189 } else {
190 mpp_err("split alloc context fail");
191 return MPP_ERR_NOMEM;
192 }
193 }
194 s->buffer = mpp_malloc(RK_U8, MAX_FRAME_SIZE);
195 s->buffer_size = MAX_FRAME_SIZE;
196 s->fetch_timestamp = 1;
197 return MPP_OK;
198 }
199
mpp_fetch_timestamp(SplitContext_t * s,RK_S32 off)200 static void mpp_fetch_timestamp(SplitContext_t *s, RK_S32 off)
201 {
202 RK_S32 i;
203
204 s->dts = s->pts = -1;
205 s->offset = 0;
206 for (i = 0; i < MPP_PARSER_PTS_NB; i++) {
207 h265d_dbg(H265D_DBG_TIME, "s->cur_offset %lld s->cur_frame_offset[%d] %lld s->frame_offset %lld s->next_frame_offset %lld",
208 s->cur_offset, i, s->cur_frame_offset[i], s->frame_offset, s->next_frame_offset);
209 if ( s->cur_offset + off >= s->cur_frame_offset[i]
210 && (s->frame_offset < s->cur_frame_offset[i] ||
211 (!s->frame_offset && !s->next_frame_offset)) // first field/frame
212 // check disabled since MPEG-TS does not send complete PES packets
213 && /*s->next_frame_offset + off <*/ s->cur_frame_end[i]) {
214 s->dts = s->cur_frame_dts[i];
215 s->pts = s->cur_frame_pts[i];
216 s->offset = s->next_frame_offset - s->cur_frame_offset[i];
217 if (s->cur_offset + off < s->cur_frame_end[i])
218 break;
219 }
220 }
221 }
h265d_split_frame(void * sc,const RK_U8 ** poutbuf,RK_S32 * poutbuf_size,const RK_U8 * buf,RK_S32 buf_size,RK_S64 pts,RK_S64 dts)222 static RK_S32 h265d_split_frame(void *sc,
223 const RK_U8 **poutbuf, RK_S32 *poutbuf_size,
224 const RK_U8 *buf, RK_S32 buf_size, RK_S64 pts,
225 RK_S64 dts)
226 {
227 RK_S32 next, i;
228
229 SplitContext_t *s = (SplitContext_t*)sc;
230
231 if (s->cur_offset + buf_size !=
232 s->cur_frame_end[s->cur_frame_start_index]) { /* skip remainder packets */
233 /* add a new packet descriptor */
234 i = (s->cur_frame_start_index + 1) & (MPP_PARSER_PTS_NB - 1);
235 s->cur_frame_start_index = i;
236 s->cur_frame_offset[i] = s->cur_offset;
237 s->cur_frame_end[i] = s->cur_offset + buf_size;
238 s->cur_frame_pts[i] = pts;
239 s->cur_frame_dts[i] = dts;
240 h265d_dbg(H265D_DBG_TIME, "s->cur_frame_start_index = %d,cur_frame_offset = %lld,s->cur_frame_end = %lld pts = %lld",
241 s->cur_frame_start_index, s->cur_frame_offset[i], s->cur_frame_end[i], pts);
242 }
243
244 if (s->fetch_timestamp) {
245 s->fetch_timestamp = 0;
246 s->last_pts = s->pts;
247 s->last_dts = s->dts;
248 mpp_fetch_timestamp(s, 0);
249 }
250
251 if (s->eos && !buf_size) {
252 *poutbuf = s->buffer;
253 *poutbuf_size = s->index;
254 return 0;
255 }
256
257 next = hevc_find_frame_end(s, buf, buf_size);
258 if (s->eos && buf_size && next == END_NOT_FOUND) {
259 next = buf_size;
260 }
261
262 if (mpp_combine_frame(s, next, &buf, &buf_size) < 0) {
263 *poutbuf = NULL;
264 *poutbuf_size = 0;
265 s->cur_offset += buf_size;
266 return buf_size;
267 }
268
269 *poutbuf = buf;
270 *poutbuf_size = buf_size;
271
272 if (next < 0)
273 next = 0;
274
275 if (*poutbuf_size) {
276 /* fill the data for the current frame */
277 s->frame_offset = s->next_frame_offset;
278
279 /* offset of the next frame */
280 s->next_frame_offset = s->cur_offset + next;
281 s->fetch_timestamp = 1;
282 }
283
284 s->cur_offset += next;
285 return next;
286 }
287
h265d_split_reset(void * sc)288 static RK_S32 h265d_split_reset(void *sc)
289 {
290 RK_U8 *buf = NULL;
291 RK_U32 size = 0;
292 SplitContext_t *s = (SplitContext_t*)sc;
293 if (sc == NULL) {
294 return MPP_OK;
295 }
296 buf = s->buffer;
297 size = s->buffer_size;
298 memset(s, 0, sizeof(SplitContext_t));
299 s->fetch_timestamp = 1;
300 s->buffer = buf;
301 s->buffer_size = size;
302 s->eos = 0;
303 return MPP_OK;
304 }
305
306
h265d_split_deinit(void * sc)307 static RK_S32 h265d_split_deinit(void *sc)
308 {
309 SplitContext_t *s = (SplitContext_t *)sc;
310 if (s->buffer) {
311 mpp_free(s->buffer);
312 s->buffer = NULL;
313 }
314 if (s) {
315 mpp_free(s);
316 s = NULL;
317 }
318 return MPP_OK;
319 }
320
pred_weight_table(HEVCContext * s,BitReadCtx_t * gb)321 static RK_S32 pred_weight_table(HEVCContext *s, BitReadCtx_t *gb)
322 {
323 RK_U32 i = 0;
324 RK_U32 j = 0;
325 RK_U8 luma_weight_l0_flag[16];
326 RK_U8 chroma_weight_l0_flag[16];
327 RK_U8 luma_weight_l1_flag[16];
328 RK_U8 chroma_weight_l1_flag[16];
329
330 READ_UE(gb, &s->sh.luma_log2_weight_denom);
331 if (s->sps->chroma_format_idc != 0) {
332 RK_S32 delta = 0;
333 READ_SE(gb, &delta);
334 s->sh.chroma_log2_weight_denom = mpp_clip(s->sh.luma_log2_weight_denom + delta, 0, 7);
335 }
336
337 for (i = 0; i < s->sh.nb_refs[L0]; i++) {
338 READ_ONEBIT(gb, &luma_weight_l0_flag[i]);
339 if (!luma_weight_l0_flag[i]) {
340 s->sh.luma_weight_l0[i] = 1 << s->sh.luma_log2_weight_denom;
341 s->sh.luma_offset_l0[i] = 0;
342 }
343 }
344
345 if (s->sps->chroma_format_idc != 0) { // FIXME: invert "if" and "for"
346 for (i = 0; i < s->sh.nb_refs[L0]; i++) {
347 READ_ONEBIT(gb, &chroma_weight_l0_flag[i]);
348 }
349 } else {
350 for (i = 0; i < s->sh.nb_refs[L0]; i++)
351 chroma_weight_l0_flag[i] = 0;
352 }
353
354 for (i = 0; i < s->sh.nb_refs[L0]; i++) {
355 if (luma_weight_l0_flag[i]) {
356 RK_S32 delta_luma_weight_l0 = 0;
357 READ_SE(gb, &delta_luma_weight_l0);
358 s->sh.luma_weight_l0[i] = (1 << s->sh.luma_log2_weight_denom) + delta_luma_weight_l0;
359 READ_SE(gb, &s->sh.luma_offset_l0[i]);
360 }
361 if (chroma_weight_l0_flag[i]) {
362 for (j = 0; j < 2; j++) {
363 RK_S32 delta_chroma_weight_l0 = 0;
364 RK_S32 delta_chroma_offset_l0 = 0;
365 READ_SE(gb, &delta_chroma_weight_l0);
366 READ_SE(gb, &delta_chroma_offset_l0);
367 s->sh.chroma_weight_l0[i][j] = (1 << s->sh.chroma_log2_weight_denom) + delta_chroma_weight_l0;
368 s->sh.chroma_offset_l0[i][j] = mpp_clip((delta_chroma_offset_l0 - ((128 * s->sh.chroma_weight_l0[i][j])
369 >> s->sh.chroma_log2_weight_denom) + 128), -128, 127);
370 }
371 } else {
372 s->sh.chroma_weight_l0[i][0] = 1 << s->sh.chroma_log2_weight_denom;
373 s->sh.chroma_offset_l0[i][0] = 0;
374 s->sh.chroma_weight_l0[i][1] = 1 << s->sh.chroma_log2_weight_denom;
375 s->sh.chroma_offset_l0[i][1] = 0;
376 }
377 }
378
379 if (s->sh.slice_type == B_SLICE) {
380 for (i = 0; i < s->sh.nb_refs[L1]; i++) {
381 READ_ONEBIT(gb, &luma_weight_l1_flag[i]);
382 if (!luma_weight_l1_flag[i]) {
383 s->sh.luma_weight_l1[i] = 1 << s->sh.luma_log2_weight_denom;
384 s->sh.luma_offset_l1[i] = 0;
385 }
386 }
387 if (s->sps->chroma_format_idc != 0) {
388 for (i = 0; i < s->sh.nb_refs[L1]; i++)
389 READ_ONEBIT(gb, &chroma_weight_l1_flag[i]);
390 } else {
391 for (i = 0; i < s->sh.nb_refs[L1]; i++)
392 chroma_weight_l1_flag[i] = 0;
393 }
394 for (i = 0; i < s->sh.nb_refs[L1]; i++) {
395 if (luma_weight_l1_flag[i]) {
396 RK_S32 delta_luma_weight_l1 = 0;
397 READ_UE(gb, &delta_luma_weight_l1);
398 s->sh.luma_weight_l1[i] = (1 << s->sh.luma_log2_weight_denom) + delta_luma_weight_l1;
399 READ_SE(gb, &s->sh.luma_offset_l1[i]);
400 }
401 if (chroma_weight_l1_flag[i]) {
402 for (j = 0; j < 2; j++) {
403 RK_S32 delta_chroma_weight_l1 = 0;
404 RK_S32 delta_chroma_offset_l1 = 0;
405 READ_SE(gb, &delta_chroma_weight_l1);
406 READ_SE(gb, &delta_chroma_offset_l1);
407 s->sh.chroma_weight_l1[i][j] = (1 << s->sh.chroma_log2_weight_denom) + delta_chroma_weight_l1;
408 s->sh.chroma_offset_l1[i][j] = mpp_clip((delta_chroma_offset_l1 - ((128 * s->sh.chroma_weight_l1[i][j])
409 >> s->sh.chroma_log2_weight_denom) + 128), -128, 127);
410 }
411 } else {
412 s->sh.chroma_weight_l1[i][0] = 1 << s->sh.chroma_log2_weight_denom;
413 s->sh.chroma_offset_l1[i][0] = 0;
414 s->sh.chroma_weight_l1[i][1] = 1 << s->sh.chroma_log2_weight_denom;
415 s->sh.chroma_offset_l1[i][1] = 0;
416 }
417 }
418 }
419 return 0;
420 __BITREAD_ERR:
421 return MPP_ERR_STREAM;
422 }
423
decode_lt_rps(HEVCContext * s,LongTermRPS * rps,BitReadCtx_t * gb)424 static RK_S32 decode_lt_rps(HEVCContext *s, LongTermRPS *rps, BitReadCtx_t *gb)
425 {
426 const HEVCSPS *sps = s->sps;
427 RK_S32 max_poc_lsb = 1 << sps->log2_max_poc_lsb;
428 RK_S32 prev_delta_msb = 0;
429 RK_U32 nb_sps = 0, nb_sh;
430 RK_S32 i;
431
432 RK_S32 bit_begin = gb->used_bits;
433 s->rps_bit_offset[s->slice_idx] =
434 s->rps_bit_offset_st[s->slice_idx];
435
436 rps->nb_refs = 0;
437 if (!sps->long_term_ref_pics_present_flag)
438 return 0;
439
440 if (sps->num_long_term_ref_pics_sps > 0)
441 READ_UE(gb, &nb_sps);
442
443 READ_UE(gb, &nb_sh);
444
445 if (nb_sh + nb_sps > MPP_ARRAY_ELEMS(rps->poc))
446 return MPP_ERR_STREAM;
447
448 rps->nb_refs = nb_sh + nb_sps;
449
450 for (i = 0; i < rps->nb_refs; i++) {
451 RK_U8 delta_poc_msb_present;
452
453 if ((RK_U32)i < nb_sps) {
454 RK_U8 lt_idx_sps = 0;
455
456 if (sps->num_long_term_ref_pics_sps > 1)
457 READ_BITS(gb, mpp_ceil_log2(sps->num_long_term_ref_pics_sps), <_idx_sps);
458
459 rps->poc[i] = sps->lt_ref_pic_poc_lsb_sps[lt_idx_sps];
460 rps->used[i] = sps->used_by_curr_pic_lt_sps_flag[lt_idx_sps];
461 } else {
462 READ_BITS(gb, sps->log2_max_poc_lsb, &rps->poc[i]);
463 READ_ONEBIT(gb, &rps->used[i]);
464 }
465
466 READ_ONEBIT(gb, &delta_poc_msb_present);
467 if (delta_poc_msb_present) {
468 RK_S32 delta = 0;
469
470 READ_UE(gb, &delta);
471
472 if (i && (RK_U32)i != nb_sps)
473 delta += prev_delta_msb;
474
475 rps->poc[i] += s->poc - delta * max_poc_lsb - s->sh.pic_order_cnt_lsb;
476 prev_delta_msb = delta;
477 }
478 }
479
480 s->rps_bit_offset[s->slice_idx]
481 += (gb->used_bits - bit_begin);
482
483 return 0;
484 __BITREAD_ERR:
485 return MPP_ERR_STREAM;
486 }
487
set_sps(HEVCContext * s,const HEVCSPS * sps)488 static RK_S32 set_sps(HEVCContext *s, const HEVCSPS *sps)
489 {
490 RK_U32 num = 0, den = 0;
491 MppFrameFormat fmt = s->h265dctx->cfg->base.out_fmt & (~MPP_FRAME_FMT_MASK);
492
493 s->h265dctx->coded_width = sps->width;
494 s->h265dctx->coded_height = sps->height;
495 s->h265dctx->width = sps->output_width;
496 s->h265dctx->height = sps->output_height;
497 s->h265dctx->pix_fmt = fmt | sps->pix_fmt;
498 s->h265dctx->nBitDepth = sps->bit_depth;
499 s->h265dctx->sample_aspect_ratio = sps->vui.sar;
500
501 if (sps->vui.video_signal_type_present_flag)
502 s->h265dctx->color_range = sps->vui.video_full_range_flag ?
503 MPP_FRAME_RANGE_JPEG : MPP_FRAME_RANGE_MPEG;
504 else
505 s->h265dctx->color_range = MPP_FRAME_RANGE_MPEG;
506
507 if (sps->vui.colour_description_present_flag) {
508 s->h265dctx->colorspace = sps->vui.matrix_coeffs;
509 } else {
510 s->h265dctx->colorspace = MPP_FRAME_SPC_UNSPECIFIED;
511 }
512
513 s->sps = sps;
514 s->vps = (HEVCVPS*) s->vps_list[s->sps->vps_id];
515
516 if (s->vps->vps_timing_info_present_flag) {
517 num = s->vps->vps_num_units_in_tick;
518 den = s->vps->vps_time_scale;
519 } else if (sps->vui.vui_timing_info_present_flag) {
520 num = sps->vui.vui_num_units_in_tick;
521 den = sps->vui.vui_time_scale;
522 }
523
524 if (num != 0 && den != 0) {
525 // s->h265dctx->time_base.num = num;
526 // s->h265dctx->time_base.den = den;
527 // av_reduce(&s->h265dctx->time_base.num, &s->h265dctx->time_base.den,
528 // num, den, 1 << 30);
529 }
530
531 return 0;
532
533 }
compare_sliceheader(SliceHeader * openhevc_sh,SliceHeader * sh)534 static RK_S32 compare_sliceheader(SliceHeader *openhevc_sh, SliceHeader *sh)
535 {
536
537 if (openhevc_sh->pps_id != sh->pps_id) {
538 mpp_log(" pps_id diff \n");
539 return -1;
540 }
541
542 if (openhevc_sh->slice_type != sh->slice_type) {
543 mpp_log(" slice_type diff \n");
544 return -1;
545 }
546
547 if (openhevc_sh->pic_order_cnt_lsb != sh->pic_order_cnt_lsb) {
548 mpp_log(" pic_order_cnt_lsb diff \n");
549 return -1;
550 }
551
552 if (openhevc_sh->first_slice_in_pic_flag != sh->first_slice_in_pic_flag) {
553 mpp_log(" first_slice_in_pic_flag diff \n");
554 return -1;
555 }
556
557 if (openhevc_sh->dependent_slice_segment_flag != sh->dependent_slice_segment_flag) {
558 mpp_log(" dependent_slice_segment_flag diff \n");
559 return -1;
560 }
561
562 if (openhevc_sh->pic_output_flag != sh->pic_output_flag) {
563 mpp_log(" pic_output_flag diff \n");
564 return -1;
565 }
566
567 if (openhevc_sh->colour_plane_id != sh->colour_plane_id) {
568 mpp_log(" colour_plane_id diff \n");
569 return -1;
570 }
571
572 if (openhevc_sh->rpl_modification_flag[0] != sh->rpl_modification_flag[0]) {
573 mpp_log(" rpl_modification_flag[0] diff \n");
574 return -1;
575 }
576
577 if (openhevc_sh->rpl_modification_flag[1] != sh->rpl_modification_flag[1]) {
578 mpp_log(" rpl_modification_flag[1] diff \n");
579 return -1;
580 }
581
582 if (openhevc_sh->no_output_of_prior_pics_flag != sh->no_output_of_prior_pics_flag) {
583 mpp_log(" no_output_of_prior_pics_flag diff \n");
584 return -1;
585 }
586
587 if (openhevc_sh->slice_temporal_mvp_enabled_flag != sh->slice_temporal_mvp_enabled_flag) {
588 mpp_log(" slice_temporal_mvp_enabled_flag diff \n");
589 return -1;
590 }
591
592 if (openhevc_sh->nb_refs[0] != sh->nb_refs[0]) {
593 mpp_log(" nb_refs[0] diff \n");
594 return -1;
595 }
596
597 if (openhevc_sh->nb_refs[1] != sh->nb_refs[1]) {
598 mpp_log(" nb_refs[1] diff \n");
599 return -1;
600 }
601
602 if (openhevc_sh->slice_sample_adaptive_offset_flag[0] !=
603 sh->slice_sample_adaptive_offset_flag[0]) {
604 mpp_log(" slice_sample_adaptive_offset_flag[0] diff \n");
605 return -1;
606 }
607
608 if (openhevc_sh->slice_sample_adaptive_offset_flag[1] !=
609 sh->slice_sample_adaptive_offset_flag[1]) {
610 mpp_log(" slice_sample_adaptive_offset_flag[1] diff \n");
611 return -1;
612 }
613
614 if (openhevc_sh->slice_sample_adaptive_offset_flag[2] !=
615 sh->slice_sample_adaptive_offset_flag[2]) {
616 mpp_log(" slice_sample_adaptive_offset_flag[2] diff \n");
617 return -1;
618 }
619
620 if (openhevc_sh->mvd_l1_zero_flag != sh->mvd_l1_zero_flag) {
621 mpp_log(" mvd_l1_zero_flag diff \n");
622 return -1;
623 }
624 if (openhevc_sh->cabac_init_flag != sh->cabac_init_flag) {
625 mpp_log(" cabac_init_flag diff \n");
626 return -1;
627 }
628
629 if (openhevc_sh->disable_deblocking_filter_flag !=
630 sh->disable_deblocking_filter_flag) {
631 mpp_log(" disable_deblocking_filter_flag diff \n");
632 return -1;
633 }
634
635 if (openhevc_sh->slice_loop_filter_across_slices_enabled_flag !=
636 sh->slice_loop_filter_across_slices_enabled_flag) {
637 mpp_log(" slice_loop_filter_across_slices_enable diff \n");
638 return -1;
639 }
640
641 if (openhevc_sh->collocated_list != sh->collocated_list) {
642 mpp_log(" collocated_list diff \n");
643 return -1;
644 }
645
646 if (openhevc_sh->collocated_ref_idx != sh->collocated_ref_idx) {
647 mpp_log(" collocated_ref_idx diff \n");
648 return -1;
649 }
650
651 if (openhevc_sh->slice_qp_delta != sh->slice_qp_delta) {
652 mpp_log(" slice_qp_delta diff \n");
653 return -1;
654 }
655
656 if (openhevc_sh->slice_cb_qp_offset != sh->slice_cb_qp_offset) {
657 mpp_log(" slice_cb_qp_offset diff \n");
658 return -1;
659 }
660
661 if (openhevc_sh->slice_cr_qp_offset != sh->slice_cr_qp_offset) {
662 mpp_log(" slice_cr_qp_offset diff \n");
663 return -1;
664 }
665
666 if (openhevc_sh->beta_offset != sh->beta_offset) {
667 mpp_log(" beta_offset diff \n");
668 return -1;
669 }
670
671 if (openhevc_sh->tc_offset != sh->tc_offset) {
672 mpp_log(" tc_offset diff \n");
673 return -1;
674 }
675
676 if (openhevc_sh->max_num_merge_cand != sh->max_num_merge_cand) {
677 mpp_log(" max_num_merge_cand diff \n");
678 return -1;
679 }
680
681 if (openhevc_sh->num_entry_point_offsets != sh->num_entry_point_offsets) {
682 mpp_log(" num_entry_point_offsets diff \n");
683 return -1;
684 }
685
686 if (openhevc_sh->slice_qp != sh->slice_qp) {
687 mpp_log(" slice_qp diff \n");
688 return -1;
689 }
690
691 if (openhevc_sh->luma_log2_weight_denom != sh->luma_log2_weight_denom) {
692 mpp_log(" luma_log2_weight_denom diff \n");
693 return -1;
694 }
695
696 if (openhevc_sh->chroma_log2_weight_denom != sh->chroma_log2_weight_denom) {
697 mpp_log(" chroma_log2_weight_denom diff \n");
698 return -1;
699 }
700
701 /* if (openhevc_sh->slice_ctb_addr_rs != sh->slice_ctb_addr_rs) {
702 mpp_log(" slice_ctb_addr_rs diff \n");
703 return -1;
704 }*/
705 return 0;
706 }
707
hls_slice_header(HEVCContext * s)708 static RK_S32 hls_slice_header(HEVCContext *s)
709 {
710
711 BitReadCtx_t *gb = &s->HEVClc->gb;
712 SliceHeader *sh = &s->sh;
713 RK_S32 i, ret;
714 RK_S32 value;
715 RK_U32 pps_id;
716 RK_S32 bit_begin;
717
718 #ifdef JCTVC_M0458_INTERLAYER_RPS_SIG
719 int NumILRRefIdx;
720 #endif
721
722 // Coded parameters
723
724 READ_ONEBIT(gb, &sh->first_slice_in_pic_flag);
725 if ((IS_IDR(s) || IS_BLA(s)) && sh->first_slice_in_pic_flag) {
726 s->seq_decode = (s->seq_decode + 1) & 0xff;
727 s->max_ra = INT_MAX;
728 if (IS_IDR(s))
729 mpp_hevc_clear_refs(s);
730 }
731 if (s->nal_unit_type >= 16 && s->nal_unit_type <= 23)
732 READ_ONEBIT(gb, &sh->no_output_of_prior_pics_flag);
733
734 if (IS_IRAP(s) && s->miss_ref_flag && sh->first_slice_in_pic_flag) {
735 // mpp_err("s->nal_unit_type = %d s->poc %d",s->nal_unit_type,s->poc);
736 s->max_ra = INT_MAX;
737 s->miss_ref_flag = 0;
738 }
739 READ_UE(gb, &pps_id);
740
741 if (pps_id >= MAX_PPS_COUNT || !s->pps_list[pps_id]) {
742 mpp_err( "PPS id out of range: %d\n", pps_id);
743 return MPP_ERR_STREAM;
744 } else {
745 sh->pps_id = pps_id;
746 if (pps_id != s->pre_pps_id) {
747 s->ps_need_upate = 1;
748 s->pre_pps_id = pps_id;
749 }
750 }
751
752 if (!sh->first_slice_in_pic_flag &&
753 s->pps != (HEVCPPS*)s->pps_list[sh->pps_id]) {
754 mpp_err( "PPS changed between slices.\n");
755 return MPP_ERR_STREAM;
756 }
757 s->pps = (HEVCPPS*)s->pps_list[sh->pps_id];
758
759 if (s->sps_need_upate || s->sps != (HEVCSPS*)s->sps_list[s->pps->sps_id]) {
760 s->sps = (HEVCSPS*)s->sps_list[s->pps->sps_id];
761 mpp_hevc_clear_refs(s);
762
763 s->ps_need_upate = 1;
764 s->sps_need_upate = 0;
765 ret = set_sps(s, s->sps);
766 if (ret < 0)
767 return ret;
768
769 s->seq_decode = (s->seq_decode + 1) & 0xff;
770 s->max_ra = INT_MAX;
771 }
772
773 // s->h265dctx->profile = s->sps->ptl.general_ptl.profile_idc;
774 // s->h265dctx->level = s->sps->ptl.general_ptl.level_idc;
775
776 sh->dependent_slice_segment_flag = 0;
777 if (!sh->first_slice_in_pic_flag) {
778 RK_S32 slice_address_length;
779
780 if (s->pps->dependent_slice_segments_enabled_flag)
781 READ_ONEBIT(gb, &sh->dependent_slice_segment_flag);
782
783 slice_address_length = mpp_ceil_log2(s->sps->ctb_width *
784 s->sps->ctb_height);
785
786 READ_BITS(gb, slice_address_length, &sh->slice_segment_addr);
787
788 if (sh->slice_segment_addr >= (RK_U32)(s->sps->ctb_width * s->sps->ctb_height)) {
789 mpp_err(
790 "Invalid slice segment address: %u.\n",
791 sh->slice_segment_addr);
792 return MPP_ERR_STREAM;
793 }
794
795 if (!sh->dependent_slice_segment_flag) {
796 sh->slice_addr = sh->slice_segment_addr;
797 s->slice_idx++;
798 }
799 } else {
800 sh->slice_segment_addr = sh->slice_addr = 0;
801 s->slice_idx = 0;
802 s->slice_initialized = 0;
803 }
804
805 if (!sh->dependent_slice_segment_flag) {
806 s->slice_initialized = 0;
807
808 for (i = 0; i < s->pps->num_extra_slice_header_bits; i++)
809 SKIP_BITS(gb, 1); // slice_reserved_undetermined_flag[]
810
811 READ_UE(gb, &sh->slice_type);
812 if (!(sh->slice_type == I_SLICE ||
813 sh->slice_type == P_SLICE ||
814 sh->slice_type == B_SLICE)) {
815 mpp_err( "Unknown slice type: %d.\n",
816 sh->slice_type);
817 return MPP_ERR_STREAM;
818 }
819 if (!s->decoder_id && IS_IRAP(s) && sh->slice_type != I_SLICE) {
820 mpp_err( "Inter slices in an IRAP frame.\n");
821 return MPP_ERR_STREAM;
822 }
823
824 if (s->pps->output_flag_present_flag)
825 READ_ONEBIT(gb, &sh->pic_output_flag);
826
827 if (s->sps->separate_colour_plane_flag)
828 READ_BITS(gb, 2, &sh->colour_plane_id );
829
830 if (!IS_IDR(s)) {
831 int poc;
832
833 READ_BITS(gb, s->sps->log2_max_poc_lsb, &sh->pic_order_cnt_lsb);
834 poc = mpp_hevc_compute_poc(s, sh->pic_order_cnt_lsb);
835 if (!sh->first_slice_in_pic_flag && poc != s->poc) {
836 mpp_log("Ignoring POC change between slices: %d -> %d\n", s->poc, poc);
837 #if 0
838 if (s->h265dctx->err_recognition & AV_EF_EXPLODE)
839 return MPP_ERR_STREAM;
840 #endif
841 poc = s->poc;
842 }
843 s->poc = poc;
844
845 READ_ONEBIT(gb, &sh->short_term_ref_pic_set_sps_flag);
846
847 bit_begin = gb->used_bits;
848
849 if (!sh->short_term_ref_pic_set_sps_flag) {
850
851 ret = mpp_hevc_decode_short_term_rps(s, &sh->slice_rps, s->sps, 1);
852 if (ret < 0)
853 return ret;
854
855 sh->short_term_rps = &sh->slice_rps;
856 } else {
857 RK_S32 numbits, rps_idx;
858
859 if (!s->sps->nb_st_rps) {
860 mpp_err( "No ref lists in the SPS.\n");
861 return MPP_ERR_STREAM;
862 }
863
864 numbits = mpp_ceil_log2(s->sps->nb_st_rps);
865 rps_idx = 0;
866 if (numbits > 0)
867 READ_BITS(gb, numbits, &rps_idx);
868
869 if (sh->short_term_rps != &s->sps->st_rps[rps_idx])
870 s->rps_need_upate = 1;
871 sh->short_term_rps = &s->sps->st_rps[rps_idx];
872 }
873
874 s->rps_bit_offset_st[s->slice_idx] = gb->used_bits - bit_begin;
875
876 sh->short_term_ref_pic_set_size = s->rps_bit_offset_st[s->slice_idx];
877
878 ret = decode_lt_rps(s, &sh->long_term_rps, gb);
879 if (ret < 0) {
880 mpp_log("Invalid long term RPS.\n");
881 // if (s->h265dctx->err_recognition & AV_EF_EXPLODE)
882 // return MPP_ERR_STREAM;
883 }
884
885 if (s->sps->sps_temporal_mvp_enabled_flag)
886 READ_ONEBIT(gb, &sh->slice_temporal_mvp_enabled_flag);
887 else
888 sh->slice_temporal_mvp_enabled_flag = 0;
889 } else {
890 s->sh.short_term_rps = NULL;
891 s->poc = 0;
892 }
893
894 /* 8.3.1 */
895 if (s->temporal_id == 0 &&
896 s->nal_unit_type != NAL_TRAIL_N &&
897 s->nal_unit_type != NAL_TSA_N &&
898 s->nal_unit_type != NAL_STSA_N &&
899 s->nal_unit_type != NAL_RADL_N &&
900 s->nal_unit_type != NAL_RADL_R &&
901 s->nal_unit_type != NAL_RASL_N &&
902 s->nal_unit_type != NAL_RASL_R)
903 s->pocTid0 = s->poc;
904
905 if (s->sps->sao_enabled) {
906 READ_ONEBIT(gb, &sh->slice_sample_adaptive_offset_flag[0]);
907 if (s->sps->chroma_format_idc) {
908 READ_ONEBIT(gb, &sh->slice_sample_adaptive_offset_flag[1]);
909 sh->slice_sample_adaptive_offset_flag[2] =
910 sh->slice_sample_adaptive_offset_flag[1];
911 } else {
912 sh->slice_sample_adaptive_offset_flag[1] = 0;
913 sh->slice_sample_adaptive_offset_flag[2] = 0;
914 }
915 } else {
916 sh->slice_sample_adaptive_offset_flag[0] = 0;
917 sh->slice_sample_adaptive_offset_flag[1] = 0;
918 sh->slice_sample_adaptive_offset_flag[2] = 0;
919 }
920
921 sh->nb_refs[L0] = sh->nb_refs[L1] = 0;
922 if (sh->slice_type == P_SLICE || sh->slice_type == B_SLICE) {
923 int nb_refs;
924
925 sh->nb_refs[L0] = s->pps->num_ref_idx_l0_default_active;
926 if (sh->slice_type == B_SLICE)
927 sh->nb_refs[L1] = s->pps->num_ref_idx_l1_default_active;
928
929 READ_ONEBIT(gb, &value);
930
931 if (value) { // num_ref_idx_active_override_flag
932 READ_UE(gb, &sh->nb_refs[L0]);
933 sh->nb_refs[L0] += 1;
934 if (sh->slice_type == B_SLICE) {
935 READ_UE(gb, &sh->nb_refs[L1]);
936 sh->nb_refs[L1] += 1;
937 }
938 }
939 if (sh->nb_refs[L0] > MAX_REFS || sh->nb_refs[L1] > MAX_REFS) {
940 mpp_err( "Too many refs: %d/%d.\n",
941 sh->nb_refs[L0], sh->nb_refs[L1]);
942 return MPP_ERR_STREAM;
943 }
944
945 sh->rpl_modification_flag[0] = 0;
946 sh->rpl_modification_flag[1] = 0;
947 nb_refs = mpp_hevc_frame_nb_refs(s);
948 if (!nb_refs) {
949 mpp_err( "Zero refs for a frame with P or B slices.\n");
950 return MPP_ERR_STREAM;
951 }
952
953 if (s->pps->lists_modification_present_flag && nb_refs > 1) {
954 READ_ONEBIT(gb, &sh->rpl_modification_flag[0]);
955 if (sh->rpl_modification_flag[0]) {
956 for (i = 0; (RK_U32)i < sh->nb_refs[L0]; i++)
957 READ_BITS(gb, mpp_ceil_log2(nb_refs), &sh->list_entry_lx[0][i]);
958 }
959
960 if (sh->slice_type == B_SLICE) {
961 READ_ONEBIT(gb, &sh->rpl_modification_flag[1]);
962 if (sh->rpl_modification_flag[1] == 1)
963 for (i = 0; (RK_U32)i < sh->nb_refs[L1]; i++)
964 READ_BITS(gb, mpp_ceil_log2(nb_refs), &sh->list_entry_lx[1][i]);
965 }
966 }
967
968 if (sh->slice_type == B_SLICE)
969 READ_ONEBIT(gb, &sh->mvd_l1_zero_flag);
970
971 if (s->pps->cabac_init_present_flag)
972 READ_ONEBIT(gb, &sh->cabac_init_flag);
973 else
974 sh->cabac_init_flag = 0;
975
976 sh->collocated_ref_idx = 0;
977 if (sh->slice_temporal_mvp_enabled_flag) {
978 sh->collocated_list = L0;
979 if (sh->slice_type == B_SLICE) {
980 READ_ONEBIT(gb, &value);
981 sh->collocated_list = !value;
982 }
983
984 if (sh->nb_refs[sh->collocated_list] > 1) {
985 READ_UE(gb, &sh->collocated_ref_idx);
986 if (sh->collocated_ref_idx >= sh->nb_refs[sh->collocated_list]) {
987 mpp_err(
988 "Invalid collocated_ref_idx: %d.\n",
989 sh->collocated_ref_idx);
990 return MPP_ERR_STREAM;
991 }
992 }
993 }
994
995 if ((s->pps->weighted_pred_flag && sh->slice_type == P_SLICE) ||
996 (s->pps->weighted_bipred_flag && sh->slice_type == B_SLICE)) {
997 pred_weight_table(s, gb);
998 }
999
1000 READ_UE(gb, &value);
1001 sh->max_num_merge_cand = 5 - value;
1002 if (sh->max_num_merge_cand < 1 || sh->max_num_merge_cand > 5) {
1003 mpp_err(
1004 "Invalid number of merging MVP candidates: %d.\n",
1005 sh->max_num_merge_cand);
1006 return MPP_ERR_STREAM;
1007 }
1008 }
1009 READ_SE(gb, &sh->slice_qp_delta );
1010 if (s->pps->pic_slice_level_chroma_qp_offsets_present_flag) {
1011 READ_SE(gb, &sh->slice_cb_qp_offset);
1012 READ_SE(gb, &sh->slice_cr_qp_offset);
1013 } else {
1014 sh->slice_cb_qp_offset = 0;
1015 sh->slice_cr_qp_offset = 0;
1016 }
1017
1018 if (s->pps->deblocking_filter_control_present_flag) {
1019 int deblocking_filter_override_flag = 0;
1020
1021 if (s->pps->deblocking_filter_override_enabled_flag)
1022 READ_ONEBIT(gb, & deblocking_filter_override_flag);
1023
1024 if (deblocking_filter_override_flag) {
1025 READ_ONEBIT(gb, &sh->disable_deblocking_filter_flag);
1026 if (!sh->disable_deblocking_filter_flag) {
1027 READ_SE(gb, &sh->beta_offset);
1028 sh->beta_offset = sh->beta_offset * 2;
1029 READ_SE(gb, &sh->tc_offset);
1030 sh->tc_offset = sh->tc_offset * 2;
1031 }
1032 } else {
1033 sh->disable_deblocking_filter_flag = s->pps->disable_dbf;
1034 sh->beta_offset = s->pps->beta_offset;
1035 sh->tc_offset = s->pps->tc_offset;
1036 }
1037 } else {
1038 sh->disable_deblocking_filter_flag = 0;
1039 sh->beta_offset = 0;
1040 sh->tc_offset = 0;
1041 }
1042
1043 if (s->pps->seq_loop_filter_across_slices_enabled_flag &&
1044 (sh->slice_sample_adaptive_offset_flag[0] ||
1045 sh->slice_sample_adaptive_offset_flag[1] ||
1046 !sh->disable_deblocking_filter_flag)) {
1047 READ_ONEBIT(gb, &sh->slice_loop_filter_across_slices_enabled_flag);
1048 } else {
1049 sh->slice_loop_filter_across_slices_enabled_flag = s->pps->seq_loop_filter_across_slices_enabled_flag;
1050 }
1051 } else if (!s->slice_initialized) {
1052 mpp_err( "Independent slice segment missing.\n");
1053 return MPP_ERR_STREAM;
1054 }
1055
1056 sh->num_entry_point_offsets = 0;
1057 if (s->pps->tiles_enabled_flag || s->pps->entropy_coding_sync_enabled_flag) {
1058 READ_UE(gb, &sh->num_entry_point_offsets);
1059 if (s->pps->entropy_coding_sync_enabled_flag) {
1060 if (sh->num_entry_point_offsets > s->sps->ctb_height || sh->num_entry_point_offsets < 0) {
1061 mpp_err("The number of entries %d is higher than the number of CTB rows %d \n",
1062 sh->num_entry_point_offsets,
1063 s->sps->ctb_height);
1064 return MPP_ERR_STREAM;
1065 }
1066 } else {
1067 if (sh->num_entry_point_offsets > s->sps->ctb_height * s->sps->ctb_width || sh->num_entry_point_offsets < 0) {
1068 mpp_err("The number of entries %d is higher than the number of CTBs %d \n",
1069 sh->num_entry_point_offsets,
1070 s->sps->ctb_height * s->sps->ctb_width);
1071 return MPP_ERR_STREAM;
1072 }
1073 }
1074 if (sh->num_entry_point_offsets) {
1075 RK_U32 offset_len_minus1 = 0;
1076
1077 READ_UE(gb, &offset_len_minus1);
1078 for (i = 0; i < sh->num_entry_point_offsets; i++)
1079 SKIP_BITS(gb, offset_len_minus1 + 1);
1080 }
1081 }
1082 if (s->pps->slice_header_extension_present_flag) {
1083 //if slice_header_extension_present_flag is 1, we should cut the extension data.
1084 RK_U32 length = 0;
1085
1086 s->start_bit = gb->used_bits;
1087 READ_UE(gb, &length);
1088 for (i = 0; (RK_U32)i < length; i++) {
1089 SKIP_BITS(gb, 8); // slice_header_extension_data_byte
1090 }
1091 s->end_bit = gb->used_bits;
1092 }
1093
1094 // Inferred parameters
1095 sh->slice_qp = 26U + s->pps->pic_init_qp_minus26 + sh->slice_qp_delta;
1096 if (sh->slice_qp > 51 ||
1097 sh->slice_qp < -s->sps->qp_bd_offset) {
1098 mpp_err("The slice_qp %d is outside the valid range "
1099 "[%d, 51].\n",
1100 sh->slice_qp,
1101 -s->sps->qp_bd_offset);
1102 return MPP_ERR_STREAM;
1103 }
1104 if (s->h265dctx->compare_info != NULL && sh->first_slice_in_pic_flag) {
1105 CurrentFameInf_t *info = (CurrentFameInf_t *)s->h265dctx->compare_info;
1106 SliceHeader *openhevc_sh = (SliceHeader *)&info->sh;
1107 h265d_dbg(H265D_DBG_FUNCTION, "compare_sliceheader in");
1108 if (compare_sliceheader(openhevc_sh, &s->sh) < 0) {
1109 mpp_log("compare sliceHeader with openhevc diff\n");
1110 mpp_assert(0);
1111 }
1112 h265d_dbg(H265D_DBG_FUNCTION, "compare_sliceheader ok");
1113 }
1114
1115 sh->slice_ctb_addr_rs = sh->slice_segment_addr;
1116
1117 if (!s->sh.slice_ctb_addr_rs && s->sh.dependent_slice_segment_flag) {
1118 mpp_err("Impossible slice segment.\n");
1119 return MPP_ERR_STREAM;
1120 }
1121
1122 s->slice_initialized = 1;
1123
1124 return 0;
1125 __BITREAD_ERR:
1126 return MPP_ERR_STREAM;
1127 }
1128
1129 /**
1130 * @return AV MPP_ERR_STREAM if the packet is not a valid NAL unit,
1131 * 0 if the unit should be skipped, 1 otherwise
1132 */
hls_nal_unit(HEVCContext * s)1133 static RK_S32 hls_nal_unit(HEVCContext *s)
1134 {
1135 BitReadCtx_t*gb = &s->HEVClc->gb;
1136 RK_S32 value = 0;
1137
1138 READ_ONEBIT(gb, &value); /*this bit should be zero*/
1139
1140 READ_BITS(gb, 6, &s->nal_unit_type);
1141
1142 READ_BITS(gb, 6, &s->nuh_layer_id);
1143
1144 READ_BITS(gb, 3, &s->temporal_id);
1145
1146 s->temporal_id = s->temporal_id - 1;
1147
1148 h265d_dbg(H265D_DBG_GLOBAL,
1149 "nal_unit_type: %d, nuh_layer_id: %d temporal_id: %d\n",
1150 s->nal_unit_type, s->nuh_layer_id, s->temporal_id);
1151
1152 if (s->temporal_id < 0)
1153 return MPP_ERR_STREAM;
1154
1155 return (s->nuh_layer_id);
1156 __BITREAD_ERR:
1157 return MPP_ERR_STREAM;
1158 }
1159
mpp_hevc_out_dec_order(void * ctx)1160 static RK_S32 mpp_hevc_out_dec_order(void *ctx)
1161 {
1162 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
1163 HEVCContext *s = (HEVCContext *)h265dctx->priv_data;
1164
1165 if (s->ref && (s->ref->flags & HEVC_FRAME_FLAG_OUTPUT)) {
1166 s->ref->flags &= ~(HEVC_FRAME_FLAG_OUTPUT);
1167 mpp_buf_slot_set_flag(s->slots, s->ref->slot_index, SLOT_QUEUE_USE);
1168 mpp_buf_slot_enqueue(s->slots, s->ref->slot_index, QUEUE_DISPLAY);
1169 }
1170
1171 return 0;
1172 }
1173
mpp_hevc_output_frame(void * ctx,int flush)1174 static RK_S32 mpp_hevc_output_frame(void *ctx, int flush)
1175 {
1176
1177 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
1178 HEVCContext *s = (HEVCContext *)h265dctx->priv_data;
1179 MppDecCfgSet *cfg = h265dctx->cfg;
1180 RK_U32 find_next_ready = 0;
1181
1182 if (cfg->base.fast_out)
1183 return mpp_hevc_out_dec_order(ctx);
1184
1185 do {
1186 RK_S32 nb_output = 0;
1187 RK_S32 min_poc = INT_MAX;
1188 RK_S32 min_idx = 0;
1189 RK_U32 i;
1190
1191 for (i = 0; i < MPP_ARRAY_ELEMS(s->DPB); i++) {
1192 HEVCFrame *frame = &s->DPB[i];
1193 if ((frame->flags & HEVC_FRAME_FLAG_OUTPUT) &&
1194 frame->sequence == s->seq_output) {
1195 nb_output++;
1196 if (frame->poc < min_poc) {
1197 min_poc = frame->poc;
1198 min_idx = i;
1199 }
1200 }
1201 }
1202
1203 /* wait for more frames before output */
1204 if (!flush && s->seq_output == s->seq_decode && s->sps &&
1205 nb_output <= s->sps->temporal_layer[s->sps->max_sub_layers - 1].num_reorder_pics) {
1206 if (cfg->base.enable_fast_play && (IS_IDR(s) ||
1207 (IS_BLA(s) && !s->first_i_fast_play))) {
1208 s->first_i_fast_play = 1;
1209 } else {
1210 return 0;
1211 }
1212 }
1213
1214 if (nb_output) {
1215 HEVCFrame *frame = &s->DPB[min_idx];
1216
1217 frame->flags &= ~(HEVC_FRAME_FLAG_OUTPUT);
1218 s->output_frame_idx = min_idx;
1219
1220 mpp_buf_slot_set_flag(s->slots, frame->slot_index, SLOT_QUEUE_USE);
1221 mpp_buf_slot_enqueue(s->slots, frame->slot_index, QUEUE_DISPLAY);
1222
1223 h265d_dbg(H265D_DBG_REF,
1224 "Output frame with POC %d frame->slot_index = %d\n", frame->poc, frame->slot_index);
1225
1226 do {
1227 find_next_ready = 0;
1228 for (i = 0; i < MPP_ARRAY_ELEMS(s->DPB); i++) {
1229 HEVCFrame *frame_next_ready = &s->DPB[i];
1230 if ((frame_next_ready->flags & HEVC_FRAME_FLAG_OUTPUT) &&
1231 frame_next_ready->sequence == s->seq_output) {
1232 if (frame_next_ready->poc == frame->poc + 1) {
1233 find_next_ready = 1;
1234 s->output_frame_idx = i;
1235 frame_next_ready->flags &= ~(HEVC_FRAME_FLAG_OUTPUT);
1236 frame = frame_next_ready;
1237 mpp_buf_slot_set_flag(s->slots, frame->slot_index, SLOT_QUEUE_USE);
1238 mpp_buf_slot_enqueue(s->slots, frame->slot_index, QUEUE_DISPLAY);
1239 h265d_dbg(H265D_DBG_REF,
1240 "Output frame with POC %d frm_next_ready->slot_index = %d\n",
1241 frame_next_ready->poc, frame_next_ready->slot_index);
1242 /* release any frames that are now unused */
1243 for (i = 0; i < MPP_ARRAY_ELEMS(s->DPB); i++) {
1244 mpp_hevc_unref_frame(s, &s->DPB[i], 0);
1245 }
1246 }
1247 }
1248 }
1249 } while (find_next_ready);
1250
1251 return 1;
1252 }
1253
1254 if (s->seq_output != s->seq_decode)
1255 s->seq_output = (s->seq_output + 1) & 0xff;
1256 else
1257 break;
1258 } while (1);
1259
1260 return 0;
1261 }
1262
hevc_frame_start(HEVCContext * s)1263 static RK_S32 hevc_frame_start(HEVCContext *s)
1264 {
1265 int ret;
1266
1267 if (s->ref) {
1268 mpp_log_f("found two frame in one packet do nothing!\n");
1269 return 0;
1270 }
1271
1272 s->is_decoded = 0;
1273 s->first_nal_type = s->nal_unit_type;
1274 s->miss_ref_flag = 0;
1275
1276 ret = mpp_hevc_frame_rps(s);
1277 if (ret < 0) {
1278 mpp_err("Error constructing the frame RPS.\n");
1279 goto fail;
1280 }
1281
1282 ret = mpp_hevc_set_new_ref(s, &s->frame, s->poc);
1283 if (ret < 0)
1284 goto fail;
1285
1286 if (!s->h265dctx->cfg->base.disable_error && s->recovery.valid_flag &&
1287 s->recovery.first_frm_valid && s->recovery.first_frm_ref_missing &&
1288 s->poc < s->recovery.recovery_pic_id && s->poc >= s->recovery.first_frm_id) {
1289 mpp_frame_set_discard(s->frame, 1);
1290 h265d_dbg(H265D_DBG_REF, "mark recovery frame discard, poc %d\n", mpp_frame_get_poc(s->frame));
1291 }
1292
1293 if (!s->h265dctx->cfg->base.disable_error && s->miss_ref_flag) {
1294 if (!IS_IRAP(s)) {
1295 if (s->recovery.valid_flag && s->recovery.first_frm_valid && s->recovery.first_frm_id == s->poc) {
1296 s->recovery.first_frm_ref_missing = 1;
1297 mpp_frame_set_discard(s->frame, 1);
1298 h265d_dbg(H265D_DBG_REF, "recovery frame missing ref mark discard, poc %d\n",
1299 mpp_frame_get_poc(s->frame));
1300 } else {
1301 mpp_frame_set_errinfo(s->frame, MPP_FRAME_ERR_UNKNOW);
1302 s->ref->error_flag = 1;
1303 h265d_dbg(H265D_DBG_REF, "missing ref mark error, poc %d\n", mpp_frame_get_poc(s->frame));
1304 }
1305 } else {
1306 /*when found current I frame have miss refer
1307 may be stream have error so first set current frame
1308 no output and flush other frame output from dpb
1309 then set current frame can as output
1310 */
1311 HEVCFrame *frame = NULL;
1312 RK_U32 i = 0;
1313 for (i = 0; i < MPP_ARRAY_ELEMS(s->DPB); i++) {
1314 frame = &s->DPB[i];
1315 if (frame->poc == s->poc ) {
1316 frame->flags &= ~(HEVC_FRAME_FLAG_OUTPUT);
1317 break;
1318 } else {
1319 frame = NULL;
1320 }
1321 }
1322 do {
1323 ret = mpp_hevc_output_frame(s->h265dctx, 1);
1324 } while (ret);
1325 if (frame) {
1326 frame->flags |= HEVC_FRAME_FLAG_OUTPUT;
1327 }
1328 }
1329 }
1330
1331 mpp_buf_slot_set_prop(s->slots, s->ref->slot_index, SLOT_FRAME, s->ref->frame);
1332
1333 return 0;
1334
1335 fail:
1336 s->ref = NULL;
1337 return ret;
1338 }
1339
parser_nal_unit(HEVCContext * s,const RK_U8 * nal,int length)1340 static RK_S32 parser_nal_unit(HEVCContext *s, const RK_U8 *nal, int length)
1341 {
1342
1343 HEVCLocalContext *lc = s->HEVClc;
1344 BitReadCtx_t *gb = &lc->gb;
1345 RK_S32 ret;
1346 mpp_set_bitread_ctx(gb, (RK_U8*)nal, length);
1347 mpp_set_bitread_pseudo_code_type(gb, PSEUDO_CODE_H264_H265);
1348 ret = hls_nal_unit(s);
1349 if (ret < 0) {
1350 mpp_err("Invalid NAL unit %d, skipping.\n",
1351 s->nal_unit_type);
1352 goto fail;
1353 } else if (ret != (s->decoder_id) && s->nal_unit_type != NAL_VPS)
1354 return 0;
1355
1356 if (s->temporal_id > s->temporal_layer_id)
1357 return 0;
1358
1359 s->nuh_layer_id = ret;
1360 h265d_dbg(H265D_DBG_GLOBAL, "s->nal_unit_type = %d,len = %d \n", s->nal_unit_type, length);
1361
1362 if (s->deny_flag && (s->nal_unit_type != NAL_VPS && s->nal_unit_type != NAL_SPS)) {
1363 ret = MPP_ERR_STREAM;
1364 goto fail;
1365 }
1366
1367 switch (s->nal_unit_type) {
1368 case NAL_VPS:
1369 ret = mpp_hevc_decode_nal_vps(s);
1370 if (ret < 0 && !s->is_decoded) {
1371 mpp_err("mpp_hevc_decode_nal_vps error ret = %d", ret);
1372 goto fail;
1373 }
1374 break;
1375 case NAL_SPS:
1376 ret = mpp_hevc_decode_nal_sps(s);
1377 if (ret < 0 && !s->is_decoded) {
1378 mpp_err("mpp_hevc_decode_nal_sps error ret = %d", ret);
1379 goto fail;
1380 }
1381
1382 s->deny_flag = 0;
1383 break;
1384 case NAL_PPS:
1385 if (s->pre_pps_data == NULL) {
1386 s->pre_pps_data = mpp_calloc(RK_U8, length + 128);
1387 memcpy(s->pre_pps_data, nal, length);
1388 s->pps_len = length;
1389 s->pps_buf_size = length + 128;
1390 s->ps_need_upate = 1;
1391 } else if (s->pps_len == length) {
1392 if (memcmp(s->pre_pps_data, nal, length)) {
1393 s->ps_need_upate = 1;
1394 memcpy(s->pre_pps_data, nal, length);
1395 }
1396 } else {
1397 if (s->pps_buf_size < length) {
1398 MPP_FREE(s->pre_pps_data);
1399 s->pre_pps_data = mpp_calloc(RK_U8, length + 128);
1400 memcpy(s->pre_pps_data, nal, length);
1401 s->pps_buf_size = length + 128;
1402 s->pps_len = length;
1403 }
1404 s->ps_need_upate = 1;
1405 }
1406 ret = mpp_hevc_decode_nal_pps(s);
1407 if (ret < 0 && !s->is_decoded) {
1408 mpp_err("mpp_hevc_decode_nal_pps error ret = %d", ret);
1409 goto fail;
1410 }
1411 break;
1412 case NAL_SEI_PREFIX:
1413 case NAL_SEI_SUFFIX:
1414 ret = mpp_hevc_decode_nal_sei(s);
1415 if (ret < 0) {
1416 mpp_err("mpp_hevc_decode_nal_sei error ret = %d", ret);
1417 //goto fail;
1418 }
1419 break;
1420 case NAL_TRAIL_R:
1421 case NAL_TRAIL_N:
1422 case NAL_TSA_N:
1423 case NAL_TSA_R:
1424 case NAL_STSA_N:
1425 case NAL_STSA_R:
1426 case NAL_BLA_W_LP:
1427 case NAL_BLA_W_RADL:
1428 case NAL_BLA_N_LP:
1429 case NAL_IDR_W_RADL:
1430 case NAL_IDR_N_LP:
1431 case NAL_CRA_NUT:
1432 case NAL_RADL_N:
1433 case NAL_RADL_R:
1434 case NAL_RASL_N:
1435 case NAL_RASL_R:
1436 if (s->task == NULL) {
1437 s->extra_has_frame = 1;
1438 break;
1439 }
1440 h265d_dbg(H265D_DBG_FUNCTION, "hls_slice_header in");
1441 ret = hls_slice_header(s);
1442 h265d_dbg(H265D_DBG_FUNCTION, "hls_slice_header out");
1443
1444 if (ret < 0) {
1445 mpp_err("hls_slice_header error ret = %d", ret);
1446 /*s->first_nal_type == -1 means first nal is still not parsed.*/
1447 if ((s->first_nal_type != s->nal_unit_type) && (s->first_nal_type != NAL_INIT_VALUE))
1448 return 0;
1449
1450 return ret;
1451 }
1452
1453 if (s->recovery.valid_flag) {
1454 if (!s->recovery.first_frm_valid) {
1455 s->recovery.first_frm_id = s->poc;
1456 s->recovery.first_frm_valid = 1;
1457 s->recovery.recovery_pic_id = s->recovery.first_frm_id + s->recovery.recovery_frame_cnt;
1458 h265d_dbg(H265D_DBG_SEI, "First recovery frame found, poc %d", s->recovery.first_frm_id);
1459 } else {
1460 if (s->recovery.recovery_pic_id < s->poc)
1461 memset(&s->recovery, 0, sizeof(RecoveryPoint));
1462 }
1463 }
1464
1465 if (s->max_ra == INT_MAX) {
1466 if (s->nal_unit_type == NAL_CRA_NUT || IS_BLA(s) ||
1467 (s->recovery.valid_flag && s->recovery.first_frm_valid &&
1468 s->recovery.first_frm_id == s->poc)) {
1469 s->max_ra = s->poc;
1470 } else {
1471 if (IS_IDR(s))
1472 s->max_ra = INT_MIN;
1473 }
1474 }
1475
1476 if ((s->nal_unit_type == NAL_RASL_R || s->nal_unit_type == NAL_RASL_N) &&
1477 s->poc <= s->max_ra) {
1478 s->is_decoded = 0;
1479 break;
1480 } else if (!s->h265dctx->cfg->base.disable_error &&
1481 (s->poc < s->max_ra) && !IS_IRAP(s)) { //when seek to I slice skip the stream small then I slic poc
1482 s->is_decoded = 0;
1483 break;
1484 } else {
1485 if (s->nal_unit_type == NAL_RASL_R && s->poc > s->max_ra)
1486 s->max_ra = INT_MIN;
1487 }
1488
1489 if (s->sh.first_slice_in_pic_flag) {
1490 ret = hevc_frame_start(s);
1491 if (ret < 0) {
1492 mpp_err("hevc_frame_start = %d", ret);
1493 return ret;
1494 }
1495 } else if (!s->ref) {
1496 mpp_err("First slice in a frame missing.\n");
1497 goto fail;
1498 }
1499
1500 if (s->nal_unit_type != s->first_nal_type) {
1501 mpp_err("Non-matching NAL types of the VCL NALUs: %d %d\n",
1502 s->first_nal_type, s->nal_unit_type);
1503 goto fail;
1504 }
1505
1506 if (!s->sh.dependent_slice_segment_flag &&
1507 s->sh.slice_type != I_SLICE) {
1508 // ret = mpp_hevc_slice_rpl(s);
1509 if (ret < 0) {
1510 mpp_err("Error constructing the reference lists for the current slice.\n");
1511 goto fail;
1512 }
1513 // rk_get_ref_info(s);
1514 }
1515
1516
1517 s->is_decoded = 1;
1518
1519 break;
1520 case NAL_EOS_NUT:
1521 case NAL_EOB_NUT:
1522 s->seq_decode = (s->seq_decode + 1) & 0xff;
1523 s->max_ra = INT_MAX;
1524 break;
1525 case NAL_AUD:
1526 case NAL_FD_NUT:
1527 case NAL_UNSPEC62:
1528 break;
1529 default:
1530 mpp_log("Skipping NAL unit %d\n", s->nal_unit_type);
1531 }
1532
1533 return 0;
1534 fail:
1535
1536 return ret;
1537 }
1538
1539
1540 typedef union {
1541 RK_U32 u32;
1542 RK_U16 u16[2];
1543 RK_U8 u8 [4];
1544 float f32;
1545 } mpp_alias32;
1546
1547 #define MPP_FAST_UNALIGNED 1
1548
1549
1550 #ifndef MPP_RN32A
1551 #define MPP_RN32A(p) (((const mpp_alias32*)(p))->u32)
1552 #endif
mpp_hevc_extract_rbsp(HEVCContext * s,const RK_U8 * src,int length,HEVCNAL * nal)1553 RK_S32 mpp_hevc_extract_rbsp(HEVCContext *s, const RK_U8 *src, int length,
1554 HEVCNAL *nal)
1555 {
1556 RK_S32 i;
1557
1558 s->skipped_bytes = 0;
1559
1560 #define STARTCODE_TEST \
1561 if (i + 2 < length && src[i + 1] == 0 && src[i + 2] == 1) { \
1562 /* startcode, so we must be past the end */ \
1563 length = i; \
1564 break; \
1565 }
1566
1567 #if MPP_FAST_UNALIGNED
1568 #define FIND_FIRST_ZERO \
1569 if (i > 0 && !src[i]) \
1570 i--; \
1571 while (src[i]) \
1572 i++
1573
1574 for (i = 0; i + 1 < length; i += 5) {
1575 if (!((~MPP_RN32A(src + i) &
1576 (MPP_RN32A(src + i) - 0x01000101U)) &
1577 0x80008080U))
1578 continue;
1579
1580 FIND_FIRST_ZERO;
1581
1582 STARTCODE_TEST;
1583 i -= 3;
1584 }
1585 #else
1586 for (i = 0; i + 1 < length; i += 2) {
1587 if (src[i])
1588 continue;
1589 if (i > 0 && src[i - 1] == 0)
1590 i--;
1591 STARTCODE_TEST;
1592 }
1593 #endif
1594
1595 if (length + MPP_INPUT_BUFFER_PADDING_SIZE > nal->rbsp_buffer_size) {
1596 RK_S32 min_size = length + MPP_INPUT_BUFFER_PADDING_SIZE;
1597 mpp_free(nal->rbsp_buffer);
1598 nal->rbsp_buffer = NULL;
1599 min_size = MPP_MAX(17 * min_size / 16 + 32, min_size);
1600 nal->rbsp_buffer = mpp_malloc(RK_U8, min_size);
1601 if (nal->rbsp_buffer == NULL) {
1602 min_size = 0;
1603 }
1604 nal->rbsp_buffer_size = min_size;
1605 }
1606
1607 memcpy(nal->rbsp_buffer, src, length);
1608 nal->data = nal->rbsp_buffer;
1609 nal->size = length;
1610
1611 memset(nal->rbsp_buffer + length, 0, MPP_INPUT_BUFFER_PADDING_SIZE);
1612 return length;
1613 }
1614
split_nal_units(HEVCContext * s,RK_U8 * buf,RK_U32 length)1615 static RK_S32 split_nal_units(HEVCContext *s, RK_U8 *buf, RK_U32 length)
1616 {
1617 RK_S32 i, consumed;
1618 MPP_RET ret = MPP_OK;
1619 s->nb_nals = 0;
1620 while (length >= 4) {
1621 HEVCNAL *nal;
1622 RK_S32 extract_length = 0;
1623
1624 if (s->is_nalff) {
1625 for (i = 0; i < s->nal_length_size; i++)
1626 extract_length = (extract_length << 8) | buf[i];
1627 buf += s->nal_length_size;
1628 length -= s->nal_length_size;
1629
1630 if ((RK_U32)extract_length > length) {
1631 mpp_err( "Invalid NAL unit size.\n");
1632 ret = MPP_ERR_STREAM;
1633 goto fail;
1634 }
1635 } else {
1636 /* search start code */
1637 if (buf[2] == 0) {
1638 length--;
1639 buf++;
1640 continue;
1641 }
1642 if (buf[0] != 0 || buf[1] != 0 || buf[2] != 1) {
1643 RK_U32 state = (RK_U32) - 1;
1644 int has_nal = 0;
1645 for (i = 0; i < (RK_S32)length; i++) {
1646 state = (state << 8) | buf[i];
1647 if (((state >> 8) & 0xFFFFFF) == START_CODE) {
1648 has_nal = 1;
1649 i = i - 3;
1650 break;
1651 }
1652 }
1653
1654 if (has_nal) {
1655 length -= i;
1656 buf += i;
1657 continue;
1658 }
1659
1660 if (s->nb_nals) {
1661 return MPP_OK;
1662 } else {
1663 mpp_err( "No start code is found.\n");
1664 ret = MPP_ERR_STREAM;
1665 goto fail;
1666 }
1667 }
1668
1669 buf += 3;
1670 length -= 3;
1671 }
1672
1673 if (!s->is_nalff)
1674 extract_length = length;
1675
1676 if (!extract_length) {
1677 return MPP_OK;
1678 }
1679 if (s->nals_allocated < 1) {
1680 RK_S32 new_size = s->nals_allocated + 10;
1681 HEVCNAL *tmp = mpp_malloc(HEVCNAL, new_size);
1682 memset((void*)tmp, 0, new_size * sizeof(HEVCNAL));
1683 s->nals_allocated = new_size;
1684 s->nals = tmp;
1685 }
1686 if (s->nals_allocated < s->nb_nals + 1) {
1687 int new_size = s->nals_allocated + 10;
1688 HEVCNAL *tmp = mpp_malloc(HEVCNAL, new_size);
1689 memset((void*)tmp, 0, new_size * sizeof(HEVCNAL));
1690 if (!tmp) {
1691 mpp_err("return enomm new_size %d", new_size);
1692 ret = MPP_ERR_NOMEM;
1693 goto fail;
1694 }
1695 memcpy((void*)tmp, (void*)s->nals, (new_size - 10)*sizeof(HEVCNAL));
1696 mpp_free(s->nals);
1697 s->nals = NULL;
1698 s->nals = tmp;
1699 memset(s->nals + s->nals_allocated, 0,
1700 (new_size - s->nals_allocated) * sizeof(*tmp));
1701 s->nals_allocated = new_size;
1702 }
1703 nal = &s->nals[s->nb_nals];
1704
1705 consumed = mpp_hevc_extract_rbsp(s, buf, extract_length, nal);
1706
1707 if (consumed < 0) {
1708 ret = MPP_ERR_STREAM;
1709 goto fail;
1710 }
1711
1712 s->nb_nals++;
1713
1714 mpp_set_bitread_ctx(&s->HEVClc->gb, (RK_U8 *)nal->data, nal->size);
1715 mpp_set_bitread_pseudo_code_type(&s->HEVClc->gb, PSEUDO_CODE_H264_H265);
1716 if (hls_nal_unit(s) < 0)
1717 s->nb_nals--;
1718
1719 if (s->nal_unit_type < NAL_VPS) {
1720
1721 if (nal->size != consumed)
1722 h265d_dbg(H265D_DBG_GLOBAL, "tag_stream: nal.size=%d, consumed=%d\n", nal->size, consumed);
1723
1724 }
1725
1726 /* if (s->nal_unit_type == NAL_EOB_NUT ||
1727 s->nal_unit_type == NAL_EOS_NUT)
1728 s->eos = 1;*/
1729
1730 buf += consumed;
1731 length -= consumed;
1732 }
1733 fail:
1734
1735 return (s->nb_nals) ? MPP_OK : ret;
1736 }
1737
mpp_hevc_fill_dynamic_meta(HEVCContext * s,const RK_U8 * data,RK_U32 size,RK_U32 hdr_fmt)1738 void mpp_hevc_fill_dynamic_meta(HEVCContext *s, const RK_U8 *data, RK_U32 size, RK_U32 hdr_fmt)
1739 {
1740 MppFrameHdrDynamicMeta *hdr_dynamic_meta = s->hdr_dynamic_meta;
1741
1742 if (hdr_dynamic_meta && (hdr_dynamic_meta->size < size)) {
1743 mpp_free(hdr_dynamic_meta);
1744 hdr_dynamic_meta = NULL;
1745 }
1746
1747 if (!hdr_dynamic_meta) {
1748 hdr_dynamic_meta = mpp_calloc_size(MppFrameHdrDynamicMeta,
1749 sizeof(MppFrameHdrDynamicMeta) + size);
1750 if (!hdr_dynamic_meta) {
1751 mpp_err_f("malloc hdr dynamic data failed!\n");
1752 return;
1753 }
1754 }
1755 if (size && data) {
1756 switch (hdr_fmt) {
1757 case DLBY: {
1758 RK_U8 start_code[4] = {0, 0, 0, 1};
1759
1760 memcpy((RK_U8*)hdr_dynamic_meta->data, start_code, 4);
1761 memcpy((RK_U8*)hdr_dynamic_meta->data + 4, (RK_U8*)data, size - 4);
1762 } break;
1763 case HDRVIVID:
1764 case HDR10PLUS: {
1765 memcpy((RK_U8*)hdr_dynamic_meta->data, (RK_U8*)data, size);
1766 } break;
1767 default:
1768 break;
1769 }
1770 hdr_dynamic_meta->size = size;
1771 hdr_dynamic_meta->hdr_fmt = hdr_fmt;
1772 }
1773 s->hdr_dynamic_meta = hdr_dynamic_meta;
1774 s->hdr_dynamic = 1;
1775 s->is_hdr = 1;
1776 }
1777
check_rpus(HEVCContext * s)1778 static RK_S32 check_rpus(HEVCContext *s)
1779 {
1780 HEVCNAL *nal;
1781
1782 if (s->nb_nals <= 1)
1783 return 0;
1784
1785 nal = &s->nals[s->nb_nals - 1];
1786
1787 if (nal->size > 2) {
1788 BitReadCtx_t gb;
1789 RK_S32 value, nal_unit_type, nuh_layer_id, temporal_id;
1790
1791 mpp_set_bitread_ctx((&gb), (RK_U8*)nal->data, nal->size);
1792 mpp_set_bitread_pseudo_code_type((&gb), PSEUDO_CODE_H264_H265);
1793
1794 READ_ONEBIT((&gb), &value); /*this bit should be zero*/
1795 READ_BITS((&gb), 6, &nal_unit_type);
1796 READ_BITS((&gb), 6, &nuh_layer_id);
1797 READ_BITS((&gb), 3, &temporal_id);
1798
1799 /*
1800 * Check for RPU delimiter.
1801 *
1802 * Dlby Vision RPUs masquerade as unregistered NALs of type 62.
1803 *
1804 * We have to do this check here an create the rpu buffer, since RPUs are appended
1805 * to the end of an AU; they are the last non-EOB/EOS NAL in the AU.
1806 */
1807 if (nal_unit_type == NAL_UNSPEC62)
1808 mpp_hevc_fill_dynamic_meta(s, nal->data + 2, gb.bytes_left_ + 4, DLBY);
1809 }
1810 return 0;
1811 __BITREAD_ERR:
1812 return MPP_ERR_STREAM;
1813 }
1814
parser_nal_units(HEVCContext * s)1815 static RK_S32 parser_nal_units(HEVCContext *s)
1816 {
1817 /* parse the NAL units */
1818 RK_S32 i, ret = 0, slice_cnt = 0;
1819
1820 check_rpus(s);
1821
1822 for (i = 0; i < s->nb_nals; i++) {
1823 ret = parser_nal_unit(s, s->nals[i].data, s->nals[i].size);
1824 if (ret < 0) {
1825 mpp_err("Error parsing NAL unit #%d,error ret = %d.\n", i, ret);
1826 goto fail;
1827 }
1828 /* update slice data if slice_header_extension_present_flag is 1*/
1829 if (s->nal_unit_type < 32) {
1830 switch (s->nal_unit_type) {
1831 case NAL_TRAIL_R:
1832 case NAL_TRAIL_N:
1833 case NAL_TSA_N:
1834 case NAL_TSA_R:
1835 case NAL_STSA_N:
1836 case NAL_STSA_R:
1837 case NAL_BLA_W_LP:
1838 case NAL_BLA_W_RADL:
1839 case NAL_BLA_N_LP:
1840 case NAL_IDR_W_RADL:
1841 case NAL_IDR_N_LP:
1842 case NAL_CRA_NUT:
1843 case NAL_RADL_N:
1844 case NAL_RADL_R:
1845 case NAL_RASL_N:
1846 case NAL_RASL_R:
1847 if (s->pps && s->pps->slice_header_extension_present_flag) {
1848 h265d_dxva2_picture_context_t *temp = (h265d_dxva2_picture_context_t *)s->hal_pic_private;
1849 temp->slice_cut_param[slice_cnt].start_bit = s->start_bit;
1850 temp->slice_cut_param[slice_cnt].end_bit = s->end_bit;
1851 temp->slice_cut_param[slice_cnt].is_enable = 1;
1852 break;
1853 }
1854 default: break;
1855 }
1856 slice_cnt++;
1857 }
1858 }
1859 fail:
1860 return ret;
1861 }
1862
U16_AT(const RK_U8 * ptr)1863 static RK_U16 U16_AT(const RK_U8 *ptr)
1864 {
1865 return ptr[0] << 8 | ptr[1];
1866 }
1867
hevc_parser_extradata(HEVCContext * s)1868 static RK_S32 hevc_parser_extradata(HEVCContext *s)
1869 {
1870 H265dContext_t *h265dctx = s->h265dctx;
1871 RK_S32 ret = MPP_SUCCESS;
1872 if (h265dctx->extradata_size > 3 &&
1873 (h265dctx->extradata[0] || h265dctx->extradata[1] ||
1874 h265dctx->extradata[2] > 1)) {
1875 /* It seems the extradata is encoded as hvcC format.
1876 * Temporarily, we support configurationVersion==0 until 14496-15 3rd
1877 * is finalized. When finalized, configurationVersion will be 1 and we
1878 * can recognize hvcC by checking if h265dctx->extradata[0]==1 or not. */
1879 const RK_U8 *ptr = (const RK_U8 *)h265dctx->extradata;
1880 RK_U32 size = h265dctx->extradata_size;
1881 RK_U32 numofArrays = 0, numofNals = 0;
1882 RK_U32 j = 0, i = 0;
1883 if (size < 7) {
1884 return MPP_NOK;
1885 }
1886
1887 mpp_log("extradata is encoded as hvcC format");
1888 s->is_nalff = 1;
1889 s->nal_length_size = 1 + (ptr[14 + 7] & 3);
1890 ptr += 22;
1891 size -= 22;
1892 numofArrays = (char)ptr[0];
1893 ptr += 1;
1894 size -= 1;
1895 for (i = 0; i < numofArrays; i++) {
1896 ptr += 1;
1897 size -= 1;
1898 // Num of nals
1899 numofNals = U16_AT(ptr);
1900 ptr += 2;
1901 size -= 2;
1902
1903 for (j = 0; j < numofNals; j++) {
1904 RK_U32 length = 0;
1905 if (size < 2) {
1906 return MPP_NOK;
1907 }
1908
1909 length = U16_AT(ptr);
1910
1911 ptr += 2;
1912 size -= 2;
1913 if (size < length) {
1914 return MPP_NOK;
1915 }
1916 parser_nal_unit(s, ptr, length);
1917 ptr += length;
1918 size -= length;
1919 }
1920 }
1921 } else {
1922 s->is_nalff = 0;
1923 ret = split_nal_units(s, h265dctx->extradata, h265dctx->extradata_size);
1924 if (ret < 0)
1925 return ret;
1926 ret = parser_nal_units(s);
1927 if (ret < 0)
1928 return ret;
1929 }
1930 return ret;
1931 }
1932
h265d_prepare(void * ctx,MppPacket pkt,HalDecTask * task)1933 MPP_RET h265d_prepare(void *ctx, MppPacket pkt, HalDecTask *task)
1934 {
1935
1936 MPP_RET ret = MPP_OK;
1937 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
1938 HEVCContext *s = (HEVCContext *)h265dctx->priv_data;
1939 SplitContext_t *sc = (SplitContext_t*)h265dctx->split_cxt;
1940 RK_S64 pts = -1, dts = -1;
1941 RK_U8 *buf = NULL;
1942 void *pos = NULL;
1943 RK_S32 length = 0;
1944
1945 task->valid = 0;
1946 s->eos = mpp_packet_get_eos(pkt);
1947
1948 if (sc != NULL) {
1949 sc->eos = s->eos;
1950 } else if (h265dctx->cfg->base.split_parse) {
1951 h265d_split_init((void**)&sc);
1952 if (sc == NULL) {
1953 mpp_err("split contxt malloc fail");
1954 return MPP_ERR_NOMEM;
1955 }
1956 h265dctx->split_cxt = sc;
1957 }
1958
1959 buf = (RK_U8 *)mpp_packet_get_pos(pkt);
1960 pts = mpp_packet_get_pts(pkt);
1961 dts = mpp_packet_get_dts(pkt);
1962 h265d_dbg(H265D_DBG_TIME, "prepare get pts %lld", pts);
1963 length = (RK_S32)mpp_packet_get_length(pkt);
1964
1965 if (mpp_packet_get_flag(pkt) & MPP_PACKET_FLAG_EXTRA_DATA) {
1966
1967 h265dctx->extradata_size = length;
1968 h265dctx->extradata = buf;
1969 s->extra_has_frame = 0;
1970 s->task = NULL;
1971 hevc_parser_extradata(s);
1972 if (!s->extra_has_frame) {
1973 pos = buf + length;
1974 mpp_packet_set_pos(pkt, pos);
1975 return MPP_OK;
1976 }
1977 }
1978
1979 if (h265dctx->cfg->base.split_parse && !s->is_nalff) {
1980 RK_S32 consume = 0;
1981 RK_U8 *split_out_buf = NULL;
1982 RK_S32 split_size = 0;
1983
1984 consume = h265d_split_frame(h265dctx->split_cxt, (const RK_U8**)&split_out_buf, &split_size,
1985 (const RK_U8*)buf, length, pts, dts);
1986 pos = buf + consume;
1987 mpp_packet_set_pos(pkt, pos);
1988 if (split_size) {
1989 buf = split_out_buf;
1990 length = split_size;
1991 s->checksum_buf = buf; //check with openhevc
1992 s->checksum_buf_size = split_size;
1993 h265d_dbg(H265D_DBG_TIME, "split frame get pts %lld", sc->pts);
1994 s->pts = sc->pts;
1995 s->dts = sc->dts;
1996 s->eos = (s->eos && (mpp_packet_get_length(pkt) < 4)) ? 1 : 0;
1997 } else {
1998 return MPP_FAIL_SPLIT_FRAME;
1999 }
2000 } else {
2001 pos = buf + length;
2002 s->pts = pts;
2003 s->dts = dts;
2004 mpp_packet_set_pos(pkt, pos);
2005 if (s->eos && !length) {
2006 task->valid = 0;
2007 task->flags.eos = 1;
2008 h265d_flush(ctx);
2009 return ret;
2010 }
2011 }
2012 #ifdef dump
2013 if (s->nb_frame < 10 && fp != NULL) {
2014 fwrite(buf, 1, length, fp);
2015 }
2016 #endif
2017 ret = (MPP_RET)split_nal_units(s, buf, length);
2018
2019 if (MPP_OK == ret) {
2020 if (MPP_OK == h265d_syntax_fill_slice(s->h265dctx, task->input)) {
2021 task->valid = 1;
2022 task->input_packet = s->input_packet;
2023 }
2024 }
2025 return ret;
2026
2027 }
2028
h265d_get_stream(void * ctx,RK_U8 ** buf,RK_S32 * size)2029 MPP_RET h265d_get_stream(void *ctx, RK_U8 **buf, RK_S32 *size)
2030 {
2031 MPP_RET ret = MPP_OK;
2032 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
2033 HEVCContext *s = h265dctx->priv_data;
2034 *buf = s->checksum_buf;
2035 *size = s->checksum_buf_size;
2036 return ret;
2037 }
2038
h265d_set_compare_info(void * ctx,void * info)2039 MPP_RET h265d_set_compare_info(void *ctx, void *info)
2040 {
2041 MPP_RET ret = MPP_OK;
2042 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
2043 h265dctx->compare_info = info;
2044 return ret;
2045 }
2046
2047
h265d_parse(void * ctx,HalDecTask * task)2048 MPP_RET h265d_parse(void *ctx, HalDecTask *task)
2049 {
2050 MPP_RET ret;
2051 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
2052 HEVCContext *s = h265dctx->priv_data;
2053
2054 task->valid = 0;
2055 s->got_frame = 0;
2056 s->task = task;
2057 s->ref = NULL;
2058 ret = parser_nal_units(s);
2059 if (ret < 0) {
2060 if (ret == MPP_ERR_STREAM) {
2061 mpp_log("current stream is no right skip it %p\n", s->ref);
2062 ret = 0;
2063 }
2064 // return ret;
2065 task->flags.parse_err = 1;
2066 }
2067 h265d_dbg(H265D_DBG_GLOBAL, "decode poc = %d", s->poc);
2068 if (s->ref) {
2069 if (!task->flags.parse_err)
2070 h265d_parser2_syntax(h265dctx);
2071
2072 s->task->syntax.data = s->hal_pic_private;
2073 s->task->syntax.number = 1;
2074 s->task->valid = 1;
2075 s->ps_need_upate = 0;
2076 s->rps_need_upate = 0;
2077 }
2078 if (s->eos) {
2079 h265d_flush(ctx);
2080 s->task->flags.eos = 1;
2081 }
2082 s->nb_frame++;
2083 if (s->is_decoded) {
2084 h265d_dbg(H265D_DBG_GLOBAL, "Decoded frame with POC %d.\n", s->poc);
2085 s->is_decoded = 0;
2086 }
2087 mpp_hevc_output_frame(ctx, 0);
2088 return MPP_OK;
2089 }
2090
h265d_deinit(void * ctx)2091 MPP_RET h265d_deinit(void *ctx)
2092 {
2093 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
2094 HEVCContext *s = h265dctx->priv_data;
2095 SplitContext_t *sc = h265dctx->split_cxt;
2096 RK_U8 *buf = NULL;
2097 int i;
2098
2099 for (i = 0; i < MAX_DPB_SIZE; i++) {
2100 mpp_hevc_unref_frame(s, &s->DPB[i], ~0);
2101 mpp_frame_deinit(&s->DPB[i].frame);
2102 }
2103
2104 for (i = 0; i < MAX_VPS_COUNT; i++) {
2105 if (s->vps_list[i])
2106 mpp_mem_pool_put_f(s->vps_pool, s->vps_list[i]);
2107 }
2108 for (i = 0; i < MAX_SPS_COUNT; i++) {
2109 if (s->sps_list[i])
2110 mpp_mem_pool_put_f(s->sps_pool, s->sps_list[i]);
2111 }
2112 for (i = 0; i < MAX_PPS_COUNT; i++)
2113 mpp_hevc_pps_free(s->pps_list[i]);
2114
2115 mpp_free(s->HEVClc);
2116
2117 s->HEVClc = NULL;
2118
2119 for (i = 0; i < s->nals_allocated; i++)
2120 mpp_free(s->nals[i].rbsp_buffer);
2121
2122 if (s->nals) {
2123 mpp_free(s->nals);
2124 }
2125
2126 MPP_FREE(s->pre_pps_data);
2127
2128 s->nals_allocated = 0;
2129
2130 if (s->hal_pic_private) {
2131 h265d_dxva2_picture_context_t *ctx_pic = (h265d_dxva2_picture_context_t *)s->hal_pic_private;
2132 MPP_FREE(ctx_pic->slice_short);
2133 MPP_FREE(ctx_pic->slice_cut_param);
2134 mpp_free(s->hal_pic_private);
2135 }
2136 if (s->input_packet) {
2137 buf = mpp_packet_get_data(s->input_packet);
2138 mpp_free(buf);
2139 mpp_packet_deinit(&s->input_packet);
2140 }
2141
2142 if (s->vps_pool)
2143 mpp_mem_pool_deinit_f(s->vps_pool);
2144 if (s->sps_pool)
2145 mpp_mem_pool_deinit_f(s->sps_pool);
2146
2147 MPP_FREE((s->hdr_dynamic_meta));
2148
2149 if (s) {
2150 mpp_free(s);
2151 }
2152
2153 if (sc) {
2154 h265d_split_deinit(sc);
2155 }
2156 return 0;
2157 }
2158
hevc_init_context(H265dContext_t * h265dctx)2159 static RK_S32 hevc_init_context(H265dContext_t *h265dctx)
2160 {
2161 HEVCContext *s = h265dctx->priv_data;
2162 RK_U32 i;
2163
2164 s->h265dctx = h265dctx;
2165
2166 s->HEVClc = (HEVCLocalContext*)mpp_calloc(HEVCLocalContext, 1);
2167 if (!s->HEVClc)
2168 goto fail;
2169
2170 for (i = 0; i < MPP_ARRAY_ELEMS(s->DPB); i++) {
2171 s->DPB[i].slot_index = 0xff;
2172 s->DPB[i].poc = INT_MAX;
2173 s->DPB[i].error_flag = 0;
2174 mpp_frame_init(&s->DPB[i].frame);
2175 if (!s->DPB[i].frame)
2176 goto fail;
2177 }
2178
2179 s->max_ra = INT_MAX;
2180
2181
2182 s->temporal_layer_id = 8;
2183 s->context_initialized = 1;
2184
2185 return 0;
2186
2187 fail:
2188 h265d_deinit(h265dctx);
2189 return MPP_ERR_NOMEM;
2190 }
2191
2192
h265d_init(void * ctx,ParserCfg * parser_cfg)2193 MPP_RET h265d_init(void *ctx, ParserCfg *parser_cfg)
2194 {
2195
2196 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
2197 HEVCContext *s = (HEVCContext *)h265dctx->priv_data;
2198 SplitContext_t *sc = (SplitContext_t*)h265dctx->split_cxt;
2199 RK_S32 ret;
2200 RK_U8 *buf = NULL;
2201 RK_S32 size = SZ_512K;
2202 if (s == NULL) {
2203 s = (HEVCContext*)mpp_calloc(HEVCContext, 1);
2204 if (s == NULL) {
2205 mpp_err("hevc contxt malloc fail");
2206 return MPP_ERR_NOMEM;
2207 }
2208 h265dctx->priv_data = s;
2209 }
2210
2211 s->first_nal_type = NAL_INIT_VALUE;
2212 h265dctx->cfg = parser_cfg->cfg;
2213
2214 if (sc == NULL && h265dctx->cfg->base.split_parse) {
2215 h265d_split_init((void**)&sc);
2216 if (sc == NULL) {
2217 mpp_err("split contxt malloc fail");
2218 return MPP_ERR_NOMEM;
2219 }
2220 h265dctx->split_cxt = sc;
2221 }
2222
2223 // mpp_env_set_u32("h265d_debug", H265D_DBG_REF);
2224 mpp_env_get_u32("h265d_debug", &h265d_debug, 0);
2225
2226 ret = hevc_init_context(h265dctx);
2227
2228 s->hal_pic_private = mpp_calloc_size(void, sizeof(h265d_dxva2_picture_context_t));
2229
2230 if (s->hal_pic_private) {
2231 h265d_dxva2_picture_context_t *ctx_pic = (h265d_dxva2_picture_context_t *)s->hal_pic_private;
2232 ctx_pic->slice_short = (DXVA_Slice_HEVC_Short *)mpp_malloc(DXVA_Slice_HEVC_Short, MAX_SLICES);
2233
2234 if (!ctx_pic->slice_short)
2235 return MPP_ERR_NOMEM;
2236
2237 ctx_pic->slice_cut_param = (DXVA_Slice_HEVC_Cut_Param *)mpp_malloc(DXVA_Slice_HEVC_Cut_Param, MAX_SLICES);
2238 if (!ctx_pic->slice_cut_param)
2239 return MPP_ERR_NOMEM;
2240 ctx_pic->max_slice_num = MAX_SLICES;
2241 } else {
2242 return MPP_ERR_NOMEM;
2243 }
2244
2245 if (ret < 0)
2246 return ret;
2247
2248 s->picture_struct = 0;
2249
2250 s->slots = parser_cfg->frame_slots;
2251
2252 s->packet_slots = parser_cfg->packet_slots;
2253
2254 if (h265dctx->extradata_size > 0 && h265dctx->extradata) {
2255 ret = hevc_parser_extradata(s);
2256 if (ret < 0) {
2257 h265d_deinit(h265dctx);
2258 return ret;
2259 }
2260 }
2261
2262 buf = mpp_malloc(RK_U8, size);
2263
2264 if (buf == NULL) {
2265 return MPP_ERR_NOMEM;
2266 }
2267
2268 if (MPP_OK != mpp_packet_init(&s->input_packet, (void*)buf, size)) {
2269 return MPP_ERR_NOMEM;
2270 }
2271 mpp_buf_slot_setup(s->slots, 25);
2272
2273 s->h265dctx->hw_info = parser_cfg->hw_info;
2274
2275 s->pre_pps_id = -1;
2276
2277 s->vps_pool = mpp_mem_pool_init_f("h264d_vps", sizeof(HEVCVPS));
2278 s->sps_pool = mpp_mem_pool_init_f("h265d_sps", sizeof(HEVCSPS));
2279
2280 mpp_slots_set_prop(s->slots, SLOTS_WIDTH_ALIGN, rkv_ctu_64_align);
2281
2282 #ifdef dump
2283 fp = fopen("/data/dump1.bin", "wb+");
2284 #endif
2285 return 0;
2286 }
2287
h265d_flush(void * ctx)2288 MPP_RET h265d_flush(void *ctx)
2289 {
2290 RK_S32 ret = 0;
2291 do {
2292 ret = mpp_hevc_output_frame(ctx, 1);
2293 } while (ret);
2294 return MPP_OK;
2295 }
2296
h265d_reset(void * ctx)2297 MPP_RET h265d_reset(void *ctx)
2298 {
2299 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
2300 HEVCContext *s = (HEVCContext *)h265dctx->priv_data;
2301 RK_S32 ret = 0;
2302 do {
2303 ret = mpp_hevc_output_frame(ctx, 1);
2304 } while (ret);
2305 mpp_hevc_flush_dpb(s);
2306 h265d_split_reset(h265dctx->split_cxt);
2307 s->max_ra = INT_MAX;
2308 s->eos = 0;
2309 s->first_i_fast_play = 0;
2310 return MPP_OK;
2311 }
2312
h265d_control(void * ctx,MpiCmd cmd,void * param)2313 MPP_RET h265d_control(void *ctx, MpiCmd cmd, void *param)
2314 {
2315 (void) ctx;
2316 (void) cmd;
2317 (void) param;
2318 return MPP_OK;
2319 }
2320
h265d_callback(void * ctx,void * err_info)2321 MPP_RET h265d_callback(void *ctx, void *err_info)
2322 {
2323 H265dContext_t *h265dctx = (H265dContext_t *)ctx;
2324 HalDecTask *task_dec = (HalDecTask *)err_info;
2325 HEVCContext *s = (HEVCContext *)h265dctx->priv_data;
2326
2327 if (!h265dctx->cfg->base.disable_error) {
2328 MppFrame frame = NULL;
2329 RK_U32 i = 0;
2330
2331 if (s->first_nal_type >= 16 && s->first_nal_type <= 23) {
2332 mpp_log("IS_IRAP frame found error");
2333 s->max_ra = INT_MAX;
2334 }
2335 // s->miss_ref_flag = 1;
2336 mpp_buf_slot_get_prop(s->slots, task_dec->output, SLOT_FRAME_PTR, &frame);
2337 mpp_frame_set_errinfo(frame, MPP_FRAME_ERR_UNKNOW);
2338 h265d_dbg(H265D_DBG_REF, "set decoded frame error, poc %d, slot %d\n",
2339 mpp_frame_get_poc(frame), task_dec->output);
2340
2341 for (i = 0; i < MPP_ARRAY_ELEMS(s->DPB); i++) {
2342 if (s->DPB[i].slot_index == task_dec->output) {
2343 s->DPB[i].error_flag = 1;
2344 h265d_dbg(H265D_DBG_REF, "Mark dpb[%d] poc %d, slot_idx %d, err %d, frame: err %d, dis %d\n",
2345 i, mpp_frame_get_poc(s->DPB[i].frame), s->DPB[i].slot_index, s->DPB[i].error_flag,
2346 mpp_frame_get_errinfo(s->DPB[i].frame), mpp_frame_get_discard(s->DPB[i].frame));
2347 }
2348 }
2349 }
2350
2351 if (!task_dec->flags.parse_err) {
2352 s->ps_need_upate = 0;
2353 s->rps_need_upate = 0;
2354 }
2355
2356 (void) err_info;
2357
2358 return MPP_OK;
2359 }
2360
2361 const ParserApi api_h265d_parser = {
2362 .name = "h265d_parse",
2363 .coding = MPP_VIDEO_CodingHEVC,
2364 .ctx_size = sizeof(H265dContext_t),
2365 .flag = 0,
2366 .init = h265d_init,
2367 .deinit = h265d_deinit,
2368 .prepare = h265d_prepare,
2369 .parse = h265d_parse,
2370 .reset = h265d_reset,
2371 .flush = h265d_flush,
2372 .control = h265d_control,
2373 .callback = h265d_callback,
2374 };
2375
2376