1 /*
2 * Copyright 2021 Rockchip Electronics Co. LTD
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 #define MODULE_TAG "av1d_cbs"
17
18 #include <string.h>
19
20 #include "mpp_mem.h"
21 #include "mpp_debug.h"
22 #include "mpp_bitread.h"
23 #include "mpp_bitwrite.h"
24 #include "rk_hdr_meta_com.h"
25
26 #include "av1d_parser.h"
27
28 #ifndef UINT32_MAX
29 #define UINT32_MAX 0xFFFFFFFF
30 #endif
31
32 #ifndef INT_MAX
33 #define INT_MAX 2147483647 /* maximum (signed) int value */
34 #endif
35
36 #define BUFFER_PADDING_SIZE 64
37 #define MAX_UINT_BITS(length) ((UINT64_C(1) << (length)) - 1)
38 #define MAX_INT_BITS(length) ((INT64_C(1) << ((length) - 1)) - 1)
39 #define MIN_INT_BITS(length) (-(INT64_C(1) << ((length) - 1)))
40
41 /**
42 * Clip a signed integer into the -(2^p),(2^p-1) range.
43 * @param a value to clip
44 * @param p bit position to clip at
45 * @return clipped value
46 */
mpp_clip_uintp2(RK_S32 a,RK_S32 p)47 static RK_U32 mpp_clip_uintp2(RK_S32 a, RK_S32 p)
48 {
49 if (a & ~((1 << p) - 1)) return -a >> 31 & ((1 << p) - 1);
50 else return a;
51 }
52
mpp_av1_read_uvlc(BitReadCtx_t * gbc,const char * name,RK_U32 * write_to,RK_U32 range_min,RK_U32 range_max)53 static RK_S32 mpp_av1_read_uvlc(BitReadCtx_t *gbc, const char *name, RK_U32 *write_to,
54 RK_U32 range_min, RK_U32 range_max)
55 {
56 RK_U32 value;
57
58 mpp_read_ue(gbc, &value);
59
60 if (value < range_min || value > range_max) {
61 mpp_err_f("%s out of range: "
62 "%d, but must be in [%d,%d].\n",
63 name, value, range_min, range_max);
64 return MPP_NOK;
65 }
66 *write_to = value;
67 return MPP_OK;
68 }
69
70
mpp_av1_read_leb128(BitReadCtx_t * gbc,RK_U64 * write_to)71 static RK_S32 mpp_av1_read_leb128(BitReadCtx_t *gbc, RK_U64 *write_to)
72 {
73 RK_U64 value;
74 RK_S32 err = 0, i;
75
76 value = 0;
77 for (i = 0; i < 8; i++) {
78 RK_U32 byte;
79
80 READ_BITS(gbc, 8, &byte);
81
82 if (err < 0)
83 return err;
84
85 value |= (RK_U64)(byte & 0x7f) << (i * 7);
86 if (!(byte & 0x80))
87 break;
88 }
89
90 if (value > UINT32_MAX)
91 return MPP_NOK;
92
93
94 *write_to = value;
95 return MPP_OK;
96
97 __bitread_error:
98 return MPP_NOK;
99
100 }
101
mpp_av1_read_ns(BitReadCtx_t * gbc,const char * name,RK_U32 n,RK_U32 * write_to)102 static RK_S32 mpp_av1_read_ns(BitReadCtx_t *gbc, const char *name,
103 RK_U32 n, RK_U32 *write_to)
104 {
105 RK_U32 m, v, extra_bit, value;
106 RK_S32 w;
107
108 w = mpp_log2(n) + 1;
109 m = (1 << w) - n;
110
111 if (mpp_get_bits_left(gbc) < w) {
112 mpp_err_f("Invalid non-symmetric value at "
113 "%s: bitstream ended.\n", name);
114 return MPP_NOK;
115 }
116 if (w - 1 > 0)
117 READ_BITS(gbc, w - 1, &v);
118 else
119 v = 0;
120
121 if (v < m) {
122 value = v;
123 } else {
124 READ_ONEBIT(gbc, &extra_bit);
125 value = (v << 1) - m + extra_bit;
126 }
127
128 *write_to = value;
129 return MPP_OK;
130
131 __bitread_error:
132 return MPP_NOK;
133
134 }
135
mpp_av1_read_increment(BitReadCtx_t * gbc,RK_U32 range_min,RK_U32 range_max,const char * name,RK_U32 * write_to)136 static RK_S32 mpp_av1_read_increment(BitReadCtx_t *gbc, RK_U32 range_min,
137 RK_U32 range_max, const char *name,
138 RK_U32 *write_to)
139 {
140 RK_U32 value;
141 RK_S32 i;
142 RK_S8 bits[33];
143
144 mpp_assert(range_min <= range_max && range_max - range_min < sizeof(bits) - 1);
145
146 for (i = 0, value = range_min; value < range_max;) {
147 RK_U8 tmp = 0;
148 if (mpp_get_bits_left(gbc) < 1) {
149 mpp_err_f("Invalid increment value at "
150 "%s: bitstream ended.\n", name);
151 return MPP_NOK;
152 }
153 READ_ONEBIT(gbc, &tmp);
154 if (tmp) {
155 bits[i++] = '1';
156 ++value;
157 } else {
158 bits[i++] = '0';
159 break;
160 }
161 }
162 *write_to = value;
163 return MPP_OK;
164
165 __bitread_error:
166 return MPP_NOK;
167 }
168
mpp_av1_read_unsigned(BitReadCtx_t * gbc,RK_S32 width,const char * name,RK_U32 * write_to,RK_U32 range_min,RK_U32 range_max)169 RK_S32 mpp_av1_read_unsigned(BitReadCtx_t *gbc,
170 RK_S32 width, const char *name,
171 RK_U32 *write_to, RK_U32 range_min,
172 RK_U32 range_max)
173 {
174 RK_U32 value;
175
176 mpp_assert(width > 0 && width <= 32);
177
178 if (mpp_get_bits_left(gbc) < width) {
179 mpp_err_f("Invalid value at "
180 "%s: bitstream ended.\n", name);
181 return MPP_NOK;
182 }
183
184 READ_BITS(gbc, width, &value);
185
186 if (value < range_min || value > range_max) {
187 mpp_err_f("%s out of range: "
188 "%d, but must be in [%d,%d].\n",
189 name, value, range_min, range_max);
190 return MPP_NOK;
191 }
192
193 *write_to = value;
194 return 0;
195
196 __bitread_error:
197 return MPP_NOK;
198
199 }
200
sign_extend(RK_S32 val,RK_U8 bits)201 static RK_S32 sign_extend(RK_S32 val, RK_U8 bits)
202 {
203 RK_U8 shift = 8 * sizeof(RK_S32) - bits;
204 union { RK_U8 u; RK_S32 s; } v = { (RK_U8) val << shift };
205 return v.s >> shift;
206 }
207
mpp_av1_read_signed(BitReadCtx_t * gbc,RK_S32 width,const char * name,RK_S32 * write_to,RK_S32 range_min,RK_S32 range_max)208 RK_S32 mpp_av1_read_signed(BitReadCtx_t *gbc,
209 RK_S32 width, const char *name,
210 RK_S32 *write_to, RK_S32 range_min,
211 RK_S32 range_max)
212 {
213 RK_S32 value;
214
215 mpp_assert(width > 0 && width <= 32);
216
217 if (mpp_get_bits_left(gbc) < width) {
218 mpp_err_f("Invalid value at "
219 "%s: bitstream ended.\n", name);
220 return MPP_NOK;
221 }
222
223 READ_BITS(gbc, width, &value);
224 value = sign_extend(value, width);
225 if (value < range_min || value > range_max) {
226 mpp_err_f("%s out of range: "
227 "%d, but must be in [%d,%d].\n",
228 name, value, range_min, range_max);
229 return MPP_NOK;
230 }
231
232 *write_to = value;
233 return 0;
234
235 __bitread_error:
236 return MPP_NOK;
237
238 }
239
mpp_av1_read_subexp(BitReadCtx_t * gbc,RK_U32 range_max,RK_U32 * write_to)240 static RK_S32 mpp_av1_read_subexp(BitReadCtx_t *gbc,
241 RK_U32 range_max, RK_U32 *write_to)
242 {
243 RK_U32 value;
244 RK_S32 err;
245 RK_U32 max_len, len, range_offset, range_bits;
246
247 max_len = mpp_log2(range_max - 1) - 3;
248
249 err = mpp_av1_read_increment(gbc, 0, max_len, "subexp_more_bits", &len);
250 if (err < 0)
251 return err;
252
253 if (len) {
254 range_bits = 2 + len;
255 range_offset = 1 << range_bits;
256 } else {
257 range_bits = 3;
258 range_offset = 0;
259 }
260
261 if (len < max_len) {
262 err = mpp_av1_read_unsigned(gbc, range_bits,
263 "subexp_bits", &value,
264 0, MAX_UINT_BITS(range_bits));
265 if (err < 0)
266 return err;
267
268 } else {
269 err = mpp_av1_read_ns(gbc, "subexp_final_bits", range_max - range_offset,
270 &value);
271 if (err < 0)
272 return err;
273 }
274 value += range_offset;
275
276 *write_to = value;
277 return err;
278 }
279
280
mpp_av1_tile_log2(RK_S32 blksize,RK_S32 target)281 static RK_S32 mpp_av1_tile_log2(RK_S32 blksize, RK_S32 target)
282 {
283 RK_S32 k;
284 for (k = 0; (blksize << k) < target; k++);
285 return k;
286 }
287
mpp_av1_get_relative_dist(const AV1RawSequenceHeader * seq,RK_U32 a,RK_U32 b)288 static RK_S32 mpp_av1_get_relative_dist(const AV1RawSequenceHeader *seq,
289 RK_U32 a, RK_U32 b)
290 {
291 RK_U32 diff, m;
292 if (!seq->enable_order_hint)
293 return 0;
294 diff = a - b;
295 m = 1 << seq->order_hint_bits_minus_1;
296 diff = (diff & (m - 1)) - (diff & m);
297 return diff;
298 }
299
mpp_av1_get_payload_bytes_left(BitReadCtx_t * gbc)300 static size_t mpp_av1_get_payload_bytes_left(BitReadCtx_t *gbc)
301 {
302 size_t size = 0;
303 RK_U8 value = 0;
304 RK_S32 i = 0;
305
306 for (i = 0; mpp_get_bits_left(gbc) >= 8; i++) {
307 READ_BITS(gbc, 8, &value);
308 if (value)
309 size = i;
310 }
311 return size;
312
313 __bitread_error:
314 return MPP_NOK;
315
316 }
317
318 #define CHECK(call) do { \
319 err = (call); \
320 if (err < 0) \
321 return err; \
322 } while (0)
323
324
325 #define SUBSCRIPTS(subs, ...) (subs > 0 ? ((RK_S32[subs + 1]){ subs, __VA_ARGS__ }) : NULL)
326 #define fb(width, name) \
327 xf(width, name, current->name, 0, MAX_UINT_BITS(width), 0, )
328 #define fc(width, name, range_min, range_max) \
329 xf(width, name, current->name, range_min, range_max, 0, )
330 #define flag(name) fb(1, name)
331 #define su(width, name) \
332 xsu(width, name, current->name, 0, )
333
334 #define fbs(width, name, subs, ...) \
335 xf(width, name, current->name, 0, MAX_UINT_BITS(width), subs, __VA_ARGS__)
336 #define fcs(width, name, range_min, range_max, subs, ...) \
337 xf(width, name, current->name, range_min, range_max, subs, __VA_ARGS__)
338 #define flags(name, subs, ...) \
339 xf(1, name, current->name, 0, 1, subs, __VA_ARGS__)
340 #define sus(width, name, subs, ...) \
341 xsu(width, name, current->name, subs, __VA_ARGS__)
342
343 #define xf(width, name, var, range_min, range_max, subs, ...) do { \
344 RK_U32 value; \
345 CHECK(mpp_av1_read_unsigned(gb, width, #name, \
346 &value, range_min, range_max)); \
347 var = value; \
348 } while (0)
349
350 #define xsu(width, name, var, subs, ...) do { \
351 RK_S32 value; \
352 CHECK(mpp_av1_read_signed(gb, width, #name, \
353 &value, \
354 MIN_INT_BITS(width), \
355 MAX_INT_BITS(width))); \
356 var = value; \
357 } while (0)
358
359 #define uvlc(name, range_min, range_max) do { \
360 RK_U32 value; \
361 CHECK(mpp_av1_read_uvlc(gb, #name, \
362 &value, range_min, range_max)); \
363 current->name = value; \
364 } while (0)
365
366 #define ns(max_value, name) do { \
367 RK_U32 value; \
368 CHECK(mpp_av1_read_ns(gb, #name, max_value, \
369 &value)); \
370 current->name = value; \
371 } while (0)
372
373 #define increment(name, min, max) do { \
374 RK_U32 value; \
375 CHECK(mpp_av1_read_increment(gb, min, max, #name, &value)); \
376 current->name = value; \
377 } while (0)
378
379 #define subexp(name, max) do { \
380 RK_U32 value = 0; \
381 CHECK(mpp_av1_read_subexp(gb, max, \
382 &value)); \
383 current->name = value; \
384 } while (0)
385
386 #define delta_q(name) do { \
387 RK_U8 delta_coded; \
388 RK_S8 delta_q; \
389 xf(1, name.delta_coded, delta_coded, 0, 1, 0, ); \
390 if (delta_coded) \
391 xsu(1 + 6, name.delta_q, delta_q, 0, ); \
392 else \
393 delta_q = 0; \
394 current->name = delta_q; \
395 } while (0)
396
397 #define leb128(name) do { \
398 RK_U64 value; \
399 CHECK(mpp_av1_read_leb128(gb, &value)); \
400 current->name = value; \
401 } while (0)
402
403 #define infer(name, value) do { \
404 current->name = value; \
405 } while (0)
406
407 #define byte_alignment(gb) (mpp_get_bits_count(gb) % 8)
408
mpp_av1_read_obu_header(AV1Context * ctx,BitReadCtx_t * gb,AV1RawOBUHeader * current)409 static RK_S32 mpp_av1_read_obu_header(AV1Context *ctx, BitReadCtx_t *gb,
410 AV1RawOBUHeader *current)
411 {
412 RK_S32 err;
413
414 fc(1, obu_forbidden_bit, 0, 0);
415
416 fc(4, obu_type, 0, AV1_OBU_PADDING);
417 flag(obu_extension_flag);
418 flag(obu_has_size_field);
419
420 fc(1, obu_reserved_1bit, 0, 0);
421
422 if (current->obu_extension_flag) {
423 fb(3, temporal_id);
424 fb(2, spatial_id);
425 fc(3, extension_header_reserved_3bits, 0, 0);
426 } else {
427 infer(temporal_id, 0);
428 infer(spatial_id, 0);
429 }
430
431 ctx->temporal_id = current->temporal_id;
432 ctx->spatial_id = current->spatial_id;
433
434 return 0;
435 }
436
mpp_av1_trailing_bits(AV1Context * ctx,BitReadCtx_t * gb,RK_S32 nb_bits)437 static RK_S32 mpp_av1_trailing_bits(AV1Context *ctx, BitReadCtx_t *gb, RK_S32 nb_bits)
438 {
439 (void)ctx;
440 mpp_assert(nb_bits > 0);
441
442 // fixed(1, trailing_one_bit, 1);
443 mpp_skip_bits(gb, 1);
444
445 --nb_bits;
446
447 while (nb_bits > 0) {
448 // fixed(1, trailing_zero_bit, 0);
449 mpp_skip_bits(gb, 1);
450 --nb_bits;
451 }
452
453 return 0;
454 }
455
mpp_av1_byte_alignment(AV1Context * ctx,BitReadCtx_t * gb)456 static RK_S32 mpp_av1_byte_alignment(AV1Context *ctx, BitReadCtx_t *gb)
457 {
458
459 (void)ctx;
460
461 while (byte_alignment(gb) != 0)
462 mpp_skip_bits(gb, 1);
463 //fixed(1, zero_bit, 0);
464
465 return 0;
466 }
467
mpp_av1_color_config(AV1Context * ctx,BitReadCtx_t * gb,AV1RawColorConfig * current,RK_S32 seq_profile)468 static RK_S32 mpp_av1_color_config(AV1Context *ctx, BitReadCtx_t *gb,
469 AV1RawColorConfig *current, RK_S32 seq_profile)
470 {
471 RK_S32 err;
472
473 flag(high_bitdepth);
474
475 if (seq_profile == PROFILE_AV1_PROFESSIONAL &&
476 current->high_bitdepth) {
477 flag(twelve_bit);
478 ctx->bit_depth = current->twelve_bit ? 12 : 10;
479 } else {
480 ctx->bit_depth = current->high_bitdepth ? 10 : 8;
481 }
482
483 if (seq_profile == PROFILE_AV1_HIGH)
484 infer(mono_chrome, 0);
485 else
486 flag(mono_chrome);
487 ctx->num_planes = current->mono_chrome ? 1 : 3;
488
489 flag(color_description_present_flag);
490 if (current->color_description_present_flag) {
491 fb(8, color_primaries);
492 fb(8, transfer_characteristics);
493 fb(8, matrix_coefficients);
494 } else {
495 infer(color_primaries, MPP_FRAME_PRI_UNSPECIFIED);
496 infer(transfer_characteristics, MPP_FRAME_TRC_UNSPECIFIED);
497 infer(matrix_coefficients, MPP_FRAME_SPC_UNSPECIFIED);
498 }
499
500 if (current->mono_chrome) {
501 flag(color_range);
502
503 infer(subsampling_x, 1);
504 infer(subsampling_y, 1);
505 infer(chroma_sample_position, AV1_CSP_UNKNOWN);
506 infer(separate_uv_delta_q, 0);
507
508 } else if (current->color_primaries == MPP_FRAME_PRI_BT709 &&
509 current->transfer_characteristics == MPP_FRAME_TRC_IEC61966_2_1 &&
510 current->matrix_coefficients == MPP_FRAME_SPC_RGB) {
511 infer(color_range, 1);
512 infer(subsampling_x, 0);
513 infer(subsampling_y, 0);
514 flag(separate_uv_delta_q);
515
516 } else {
517 flag(color_range);
518
519 if (seq_profile == PROFILE_AV1_MAIN) {
520 infer(subsampling_x, 1);
521 infer(subsampling_y, 1);
522 } else if (seq_profile == PROFILE_AV1_HIGH) {
523 infer(subsampling_x, 0);
524 infer(subsampling_y, 0);
525 } else {
526 if (ctx->bit_depth == 12) {
527 fb(1, subsampling_x);
528 if (current->subsampling_x)
529 fb(1, subsampling_y);
530 else
531 infer(subsampling_y, 0);
532 } else {
533 infer(subsampling_x, 1);
534 infer(subsampling_y, 0);
535 }
536 }
537 if (current->subsampling_x && current->subsampling_y) {
538 fc(2, chroma_sample_position, AV1_CSP_UNKNOWN,
539 AV1_CSP_COLOCATED);
540 }
541
542 flag(separate_uv_delta_q);
543 }
544
545 return 0;
546 }
547
mpp_av1_timing_info(AV1Context * ctx,BitReadCtx_t * gb,AV1RawTimingInfo * current)548 static RK_S32 mpp_av1_timing_info(AV1Context *ctx, BitReadCtx_t *gb,
549 AV1RawTimingInfo *current)
550 {
551 (void)ctx;
552 RK_S32 err;
553
554 fc(32, num_units_in_display_tick, 1, MAX_UINT_BITS(32));
555 fc(32, time_scale, 1, MAX_UINT_BITS(32));
556
557 flag(equal_picture_interval);
558 if (current->equal_picture_interval)
559 uvlc(num_ticks_per_picture_minus_1, 0, MAX_UINT_BITS(32) - 1);
560
561 return 0;
562 }
563
mpp_av1_decoder_model_info(AV1Context * ctx,BitReadCtx_t * gb,AV1RawDecoderModelInfo * current)564 static RK_S32 mpp_av1_decoder_model_info(AV1Context *ctx, BitReadCtx_t *gb,
565 AV1RawDecoderModelInfo *current)
566 {
567 RK_S32 err;
568 (void)ctx;
569 fb(5, buffer_delay_length_minus_1);
570 fb(32, num_units_in_decoding_tick);
571 fb(5, buffer_removal_time_length_minus_1);
572 fb(5, frame_presentation_time_length_minus_1);
573
574 return 0;
575 }
576
mpp_av1_sequence_header_obu(AV1Context * ctx,BitReadCtx_t * gb,AV1RawSequenceHeader * current)577 static RK_S32 mpp_av1_sequence_header_obu(AV1Context *ctx, BitReadCtx_t *gb,
578 AV1RawSequenceHeader *current)
579 {
580 RK_S32 i, err;
581
582 fc(3, seq_profile, PROFILE_AV1_MAIN,
583 PROFILE_AV1_PROFESSIONAL);
584 flag(still_picture);
585 flag(reduced_still_picture_header);
586
587 if (current->reduced_still_picture_header) {
588 infer(timing_info_present_flag, 0);
589 infer(decoder_model_info_present_flag, 0);
590 infer(initial_display_delay_present_flag, 0);
591 infer(operating_points_cnt_minus_1, 0);
592 infer(operating_point_idc[0], 0);
593
594 fb(5, seq_level_idx[0]);
595
596 infer(seq_tier[0], 0);
597 infer(decoder_model_present_for_this_op[0], 0);
598 infer(initial_display_delay_present_for_this_op[0], 0);
599
600 } else {
601 flag(timing_info_present_flag);
602 if (current->timing_info_present_flag) {
603 CHECK(mpp_av1_timing_info(ctx, gb, ¤t->timing_info));
604
605 flag(decoder_model_info_present_flag);
606 if (current->decoder_model_info_present_flag) {
607 CHECK(mpp_av1_decoder_model_info
608 (ctx, gb, ¤t->decoder_model_info));
609 }
610 } else {
611 infer(decoder_model_info_present_flag, 0);
612 }
613
614 flag(initial_display_delay_present_flag);
615
616 fb(5, operating_points_cnt_minus_1);
617 for (i = 0; i <= current->operating_points_cnt_minus_1; i++) {
618 fbs(12, operating_point_idc[i], 1, i);
619 fbs(5, seq_level_idx[i], 1, i);
620
621 if (current->seq_level_idx[i] > 7)
622 flags(seq_tier[i], 1, i);
623 else
624 infer(seq_tier[i], 0);
625
626 if (current->decoder_model_info_present_flag) {
627 flags(decoder_model_present_for_this_op[i], 1, i);
628 if (current->decoder_model_present_for_this_op[i]) {
629 RK_S32 n = current->decoder_model_info.buffer_delay_length_minus_1 + 1;
630 fbs(n, decoder_buffer_delay[i], 1, i);
631 fbs(n, encoder_buffer_delay[i], 1, i);
632 flags(low_delay_mode_flag[i], 1, i);
633 }
634 } else {
635 infer(decoder_model_present_for_this_op[i], 0);
636 }
637
638 if (current->initial_display_delay_present_flag) {
639 flags(initial_display_delay_present_for_this_op[i], 1, i);
640 if (current->initial_display_delay_present_for_this_op[i])
641 fbs(4, initial_display_delay_minus_1[i], 1, i);
642 }
643 }
644 }
645
646 fb(4, frame_width_bits_minus_1);
647 fb(4, frame_height_bits_minus_1);
648
649 fb(current->frame_width_bits_minus_1 + 1, max_frame_width_minus_1);
650 fb(current->frame_height_bits_minus_1 + 1, max_frame_height_minus_1);
651
652 if (current->reduced_still_picture_header)
653 infer(frame_id_numbers_present_flag, 0);
654 else
655 flag(frame_id_numbers_present_flag);
656 if (current->frame_id_numbers_present_flag) {
657 fb(4, delta_frame_id_length_minus_2);
658 fb(3, additional_frame_id_length_minus_1);
659 }
660
661 flag(use_128x128_superblock);
662 flag(enable_filter_intra);
663 flag(enable_intra_edge_filter);
664
665 if (current->reduced_still_picture_header) {
666 infer(enable_interintra_compound, 0);
667 infer(enable_masked_compound, 0);
668 infer(enable_warped_motion, 0);
669 infer(enable_dual_filter, 0);
670 infer(enable_order_hint, 0);
671 infer(enable_jnt_comp, 0);
672 infer(enable_ref_frame_mvs, 0);
673
674 infer(seq_force_screen_content_tools,
675 AV1_SELECT_SCREEN_CONTENT_TOOLS);
676 infer(seq_force_integer_mv,
677 AV1_SELECT_INTEGER_MV);
678 } else {
679 flag(enable_interintra_compound);
680 flag(enable_masked_compound);
681 flag(enable_warped_motion);
682 flag(enable_dual_filter);
683
684 flag(enable_order_hint);
685 if (current->enable_order_hint) {
686 flag(enable_jnt_comp);
687 flag(enable_ref_frame_mvs);
688 } else {
689 infer(enable_jnt_comp, 0);
690 infer(enable_ref_frame_mvs, 0);
691 }
692
693 flag(seq_choose_screen_content_tools);
694 if (current->seq_choose_screen_content_tools)
695 infer(seq_force_screen_content_tools,
696 AV1_SELECT_SCREEN_CONTENT_TOOLS);
697 else
698 fb(1, seq_force_screen_content_tools);
699 if (current->seq_force_screen_content_tools > 0) {
700 flag(seq_choose_integer_mv);
701 if (current->seq_choose_integer_mv)
702 infer(seq_force_integer_mv,
703 AV1_SELECT_INTEGER_MV);
704 else
705 fb(1, seq_force_integer_mv);
706 } else {
707 infer(seq_force_integer_mv, AV1_SELECT_INTEGER_MV);
708 }
709
710 if (current->enable_order_hint)
711 fb(3, order_hint_bits_minus_1);
712 }
713
714 flag(enable_superres);
715 flag(enable_cdef);
716 flag(enable_restoration);
717
718 CHECK(mpp_av1_color_config(ctx, gb, ¤t->color_config,
719 current->seq_profile));
720
721 flag(film_grain_params_present);
722
723 return 0;
724 }
725
mpp_av1_temporal_delimiter_obu(AV1Context * ctx,BitReadCtx_t * gb)726 static RK_S32 mpp_av1_temporal_delimiter_obu(AV1Context *ctx, BitReadCtx_t *gb)
727 {
728 (void)gb;
729 ctx->seen_frame_header = 0;
730
731 return 0;
732 }
733
734 /* spec 7.8 */
mpp_av1_set_frame_refs(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current)735 static RK_S32 mpp_av1_set_frame_refs(AV1Context *ctx, BitReadCtx_t *gb,
736 AV1RawFrameHeader *current)
737 {
738 (void)gb;
739 const AV1RawSequenceHeader *seq = ctx->sequence_header;
740 static const RK_U8 ref_frame_list[AV1_NUM_REF_FRAMES - 2] = {
741 AV1_REF_FRAME_LAST2, AV1_REF_FRAME_LAST3, AV1_REF_FRAME_BWDREF,
742 AV1_REF_FRAME_ALTREF2, AV1_REF_FRAME_ALTREF
743 };
744 RK_S8 ref_frame_idx[AV1_REFS_PER_FRAME], used_frame[AV1_NUM_REF_FRAMES];
745 RK_S8 shifted_order_hints[AV1_NUM_REF_FRAMES];
746 RK_S32 cur_frame_hint, latest_order_hint, earliest_order_hint, ref;
747 RK_S32 i, j;
748
749 for (i = 0; i < AV1_REFS_PER_FRAME; i++)
750 ref_frame_idx[i] = -1;
751 ref_frame_idx[AV1_REF_FRAME_LAST - AV1_REF_FRAME_LAST] = current->last_frame_idx;
752 ref_frame_idx[AV1_REF_FRAME_GOLDEN - AV1_REF_FRAME_LAST] = current->golden_frame_idx;
753
754 /*
755 * An array usedFrame marking which reference frames
756 * have been used is prepared as follows:
757 */
758 for (i = 0; i < AV1_NUM_REF_FRAMES; i++)
759 used_frame[i] = 0;
760 used_frame[current->last_frame_idx] = 1;
761 used_frame[current->golden_frame_idx] = 1;
762
763 /*
764 * An array shiftedOrderHints (containing the expected output order shifted
765 * such that the current frame has hint equal to curFrameHint) is prepared as follows:
766 */
767 cur_frame_hint = 1 << (seq->order_hint_bits_minus_1);
768 for (i = 0; i < AV1_NUM_REF_FRAMES; i++)
769 shifted_order_hints[i] = cur_frame_hint +
770 mpp_av1_get_relative_dist(seq, ctx->ref_s[i].order_hint,
771 ctx->order_hint);
772
773 latest_order_hint = shifted_order_hints[current->last_frame_idx];
774 earliest_order_hint = shifted_order_hints[current->golden_frame_idx];
775
776 /* find_latest_backward */
777 ref = -1;
778 for (i = 0; i < AV1_NUM_REF_FRAMES; i++) {
779 RK_S32 hint = shifted_order_hints[i];
780 if (!used_frame[i] && hint >= cur_frame_hint &&
781 (ref < 0 || hint >= latest_order_hint)) {
782 ref = i;
783 latest_order_hint = hint;
784 }
785 }
786 /*
787 * The ALTREF_FRAME reference is set to be a backward reference to the frame
788 * with highest output order as follows:
789 */
790 if (ref >= 0) {
791 ref_frame_idx[AV1_REF_FRAME_ALTREF - AV1_REF_FRAME_LAST] = ref;
792 used_frame[ref] = 1;
793 }
794
795 /* find_earliest_backward */
796 ref = -1;
797 for (i = 0; i < AV1_NUM_REF_FRAMES; i++) {
798 RK_S32 hint = shifted_order_hints[i];
799 if (!used_frame[i] && hint >= cur_frame_hint &&
800 (ref < 0 || hint < earliest_order_hint)) {
801 ref = i;
802 earliest_order_hint = hint;
803 }
804 }
805 /*
806 * The BWDREF_FRAME reference is set to be a backward reference to
807 * the closest frame as follows:
808 */
809 if (ref >= 0) {
810 ref_frame_idx[AV1_REF_FRAME_BWDREF - AV1_REF_FRAME_LAST] = ref;
811 used_frame[ref] = 1;
812 }
813
814 ref = -1;
815 for (i = 0; i < AV1_NUM_REF_FRAMES; i++) {
816 RK_S32 hint = shifted_order_hints[i];
817 if (!used_frame[i] && hint >= cur_frame_hint &&
818 (ref < 0 || hint < earliest_order_hint)) {
819 ref = i;
820 earliest_order_hint = hint;
821 }
822 }
823
824 /*
825 * The ALTREF2_FRAME reference is set to the next closest
826 * backward reference as follows:
827 */
828 if (ref >= 0) {
829 ref_frame_idx[AV1_REF_FRAME_ALTREF2 - AV1_REF_FRAME_LAST] = ref;
830 used_frame[ref] = 1;
831 }
832
833 /*
834 * The remaining references are set to be forward references
835 * in anti-chronological order as follows:
836 */
837 for (i = 0; i < AV1_REFS_PER_FRAME - 2; i++) {
838 RK_S32 ref_frame = ref_frame_list[i];
839 if (ref_frame_idx[ref_frame - AV1_REF_FRAME_LAST] < 0 ) {
840 /* find_latest_forward */
841 ref = -1;
842 for (j = 0; j < AV1_NUM_REF_FRAMES; j++) {
843 RK_S32 hint = shifted_order_hints[j];
844 if (!used_frame[j] && hint < cur_frame_hint &&
845 (ref < 0 || hint >= latest_order_hint)) {
846 ref = j;
847 latest_order_hint = hint;
848 }
849 }
850 if (ref >= 0) {
851 ref_frame_idx[ref_frame - AV1_REF_FRAME_LAST] = ref;
852 used_frame[ref] = 1;
853 }
854 }
855 }
856
857 /*
858 * Finally, any remaining references are set to the reference
859 * frame with smallest output order as follows:
860 */
861 ref = -1;
862 for (i = 0; i < AV1_NUM_REF_FRAMES; i++) {
863 RK_S32 hint = shifted_order_hints[i];
864 if (ref < 0 || hint < earliest_order_hint) {
865 ref = i;
866 earliest_order_hint = hint;
867 }
868 }
869 for (i = 0; i < AV1_REFS_PER_FRAME; i++) {
870 if (ref_frame_idx[i] < 0)
871 ref_frame_idx[i] = ref;
872 infer(ref_frame_idx[i], ref_frame_idx[i]);
873 }
874
875 return 0;
876 }
877
mpp_av1_superres_params(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current)878 static RK_S32 mpp_av1_superres_params(AV1Context *ctx, BitReadCtx_t *gb,
879 AV1RawFrameHeader *current)
880 {
881 const AV1RawSequenceHeader *seq = ctx->sequence_header;
882 RK_S32 denom, err;
883
884 if (seq->enable_superres)
885 flag(use_superres);
886 else
887 infer(use_superres, 0);
888
889 if (current->use_superres) {
890 fb(3, coded_denom);
891 denom = current->coded_denom + AV1_SUPERRES_DENOM_MIN;
892 } else {
893 denom = AV1_SUPERRES_NUM;
894 }
895
896 ctx->upscaled_width = ctx->frame_width;
897 ctx->frame_width = (ctx->upscaled_width * AV1_SUPERRES_NUM +
898 denom / 2) / denom;
899 return 0;
900 }
901
mpp_av1_frame_size(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current)902 static RK_S32 mpp_av1_frame_size(AV1Context *ctx, BitReadCtx_t *gb,
903 AV1RawFrameHeader *current)
904 {
905 const AV1RawSequenceHeader *seq = ctx->sequence_header;
906 RK_S32 err;
907
908 if (current->frame_size_override_flag) {
909 fb(seq->frame_width_bits_minus_1 + 1, frame_width_minus_1);
910 fb(seq->frame_height_bits_minus_1 + 1, frame_height_minus_1);
911 } else {
912 infer(frame_width_minus_1, seq->max_frame_width_minus_1);
913 infer(frame_height_minus_1, seq->max_frame_height_minus_1);
914 }
915
916 ctx->frame_width = current->frame_width_minus_1 + 1;
917 ctx->frame_height = current->frame_height_minus_1 + 1;
918
919 CHECK(mpp_av1_superres_params(ctx, gb, current));
920
921 return 0;
922 }
923
mpp_av1_render_size(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current)924 static RK_S32 mpp_av1_render_size(AV1Context *ctx, BitReadCtx_t *gb,
925 AV1RawFrameHeader *current)
926 {
927 RK_S32 err;
928
929 flag(render_and_frame_size_different);
930
931 if (current->render_and_frame_size_different) {
932 fb(16, render_width_minus_1);
933 fb(16, render_height_minus_1);
934 } else {
935 infer(render_width_minus_1, current->frame_width_minus_1);
936 infer(render_height_minus_1, current->frame_height_minus_1);
937 }
938
939 ctx->render_width = current->render_width_minus_1 + 1;
940 ctx->render_height = current->render_height_minus_1 + 1;
941
942 return 0;
943 }
944
mpp_av1_frame_size_with_refs(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current)945 static RK_S32 mpp_av1_frame_size_with_refs(AV1Context *ctx, BitReadCtx_t *gb,
946 AV1RawFrameHeader *current)
947 {
948 RK_S32 i, err;
949
950 for (i = 0; i < AV1_REFS_PER_FRAME; i++) {
951 flags(found_ref[i], 1, i);
952 if (current->found_ref[i]) {
953 AV1ReferenceFrameState *ref =
954 &ctx->ref_s[current->ref_frame_idx[i]];
955
956 if (!ref->valid) {
957 mpp_err_f("Missing reference frame needed for frame size "
958 "(ref = %d, ref_frame_idx = %d).\n",
959 i, current->ref_frame_idx[i]);
960 return MPP_ERR_PROTOL;
961 }
962
963 infer(frame_width_minus_1, ref->upscaled_width - 1);
964 infer(frame_height_minus_1, ref->frame_height - 1);
965 infer(render_width_minus_1, ref->render_width - 1);
966 infer(render_height_minus_1, ref->render_height - 1);
967
968 ctx->upscaled_width = ref->upscaled_width;
969 ctx->frame_width = ctx->upscaled_width;
970 ctx->frame_height = ref->frame_height;
971 ctx->render_width = ref->render_width;
972 ctx->render_height = ref->render_height;
973 break;
974 }
975 }
976
977 if (i >= AV1_REFS_PER_FRAME) {
978 CHECK(mpp_av1_frame_size(ctx, gb, current));
979 CHECK(mpp_av1_render_size(ctx, gb, current));
980 } else {
981 CHECK(mpp_av1_superres_params(ctx, gb, current));
982 }
983
984 return 0;
985 }
986
mpp_av1_interpolation_filter(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current)987 static RK_S32 mpp_av1_interpolation_filter(AV1Context *ctx, BitReadCtx_t *gb,
988 AV1RawFrameHeader *current)
989 {
990 RK_S32 err;
991 (void)ctx;
992 flag(is_filter_switchable);
993 if (current->is_filter_switchable)
994 infer(interpolation_filter,
995 AV1_INTERPOLATION_FILTER_SWITCHABLE);
996 else
997 fb(2, interpolation_filter);
998
999 return 0;
1000 }
1001
mpp_av1_tile_info(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current)1002 static RK_S32 mpp_av1_tile_info(AV1Context *ctx, BitReadCtx_t *gb,
1003 AV1RawFrameHeader *current)
1004 {
1005 const AV1RawSequenceHeader *seq = ctx->sequence_header;
1006 RK_S32 mi_cols, mi_rows, sb_cols, sb_rows, sb_shift, sb_size;
1007 RK_S32 max_tile_width_sb, max_tile_height_sb, max_tile_area_sb;
1008 RK_S32 min_log2_tile_cols, max_log2_tile_cols, max_log2_tile_rows;
1009 RK_S32 min_log2_tiles, min_log2_tile_rows;
1010 RK_S32 i, err;
1011
1012 mi_cols = 2 * ((ctx->frame_width + 7) >> 3);
1013 mi_rows = 2 * ((ctx->frame_height + 7) >> 3);
1014
1015 sb_cols = seq->use_128x128_superblock ? ((mi_cols + 31) >> 5)
1016 : ((mi_cols + 15) >> 4);
1017 sb_rows = seq->use_128x128_superblock ? ((mi_rows + 31) >> 5)
1018 : ((mi_rows + 15) >> 4);
1019
1020 sb_shift = seq->use_128x128_superblock ? 5 : 4;
1021 sb_size = sb_shift + 2;
1022
1023 max_tile_width_sb = AV1_MAX_TILE_WIDTH >> sb_size;
1024 max_tile_area_sb = AV1_MAX_TILE_AREA >> (2 * sb_size);
1025
1026 min_log2_tile_cols = mpp_av1_tile_log2(max_tile_width_sb, sb_cols);
1027 max_log2_tile_cols = mpp_av1_tile_log2(1, MPP_MIN(sb_cols, AV1_MAX_TILE_COLS));
1028 max_log2_tile_rows = mpp_av1_tile_log2(1, MPP_MIN(sb_rows, AV1_MAX_TILE_ROWS));
1029 min_log2_tiles = MPP_MAX(min_log2_tile_cols,
1030 mpp_av1_tile_log2(max_tile_area_sb, sb_rows * sb_cols));
1031
1032 flag(uniform_tile_spacing_flag);
1033
1034 if (current->uniform_tile_spacing_flag) {
1035 RK_S32 tile_width_sb, tile_height_sb;
1036
1037 increment(tile_cols_log2, min_log2_tile_cols, max_log2_tile_cols);
1038
1039 tile_width_sb = (sb_cols + (1 << current->tile_cols_log2) - 1) >>
1040 current->tile_cols_log2;
1041 current->tile_cols = (sb_cols + tile_width_sb - 1) / tile_width_sb;
1042
1043 min_log2_tile_rows = MPP_MAX(min_log2_tiles - current->tile_cols_log2, 0);
1044
1045 increment(tile_rows_log2, min_log2_tile_rows, max_log2_tile_rows);
1046
1047 tile_height_sb = (sb_rows + (1 << current->tile_rows_log2) - 1) >>
1048 current->tile_rows_log2;
1049 current->tile_rows = (sb_rows + tile_height_sb - 1) / tile_height_sb;
1050
1051 for (i = 0; i < current->tile_cols - 1; i++)
1052 infer(width_in_sbs_minus_1[i], tile_width_sb - 1);
1053 infer(width_in_sbs_minus_1[i],
1054 sb_cols - (current->tile_cols - 1) * tile_width_sb - 1);
1055 for (i = 0; i < current->tile_rows - 1; i++)
1056 infer(height_in_sbs_minus_1[i], tile_height_sb - 1);
1057 infer(height_in_sbs_minus_1[i],
1058 sb_rows - (current->tile_rows - 1) * tile_height_sb - 1);
1059
1060 } else {
1061 RK_S32 widest_tile_sb, start_sb, size_sb, max_width, max_height;
1062
1063 widest_tile_sb = 0;
1064
1065 start_sb = 0;
1066 for (i = 0; start_sb < sb_cols && i < AV1_MAX_TILE_COLS; i++) {
1067 max_width = MPP_MIN(sb_cols - start_sb, max_tile_width_sb);
1068 ns(max_width, width_in_sbs_minus_1[i]);
1069 //ns(max_width, width_in_sbs_minus_1[i]);
1070 size_sb = current->width_in_sbs_minus_1[i] + 1;
1071 widest_tile_sb = MPP_MAX(size_sb, widest_tile_sb);
1072 start_sb += size_sb;
1073 }
1074 current->tile_cols_log2 = mpp_av1_tile_log2(1, i);
1075 current->tile_cols = i;
1076
1077 if (min_log2_tiles > 0)
1078 max_tile_area_sb = (sb_rows * sb_cols) >> (min_log2_tiles + 1);
1079 else
1080 max_tile_area_sb = sb_rows * sb_cols;
1081 max_tile_height_sb = MPP_MAX(max_tile_area_sb / widest_tile_sb, 1);
1082
1083 start_sb = 0;
1084 for (i = 0; start_sb < sb_rows && i < AV1_MAX_TILE_ROWS; i++) {
1085 max_height = MPP_MIN(sb_rows - start_sb, max_tile_height_sb);
1086 ns(max_height, height_in_sbs_minus_1[i]);
1087 size_sb = current->height_in_sbs_minus_1[i] + 1;
1088 start_sb += size_sb;
1089 }
1090 current->tile_rows_log2 = mpp_av1_tile_log2(1, i);
1091 current->tile_rows = i;
1092 }
1093
1094 if (current->tile_cols_log2 > 0 ||
1095 current->tile_rows_log2 > 0) {
1096 fb(current->tile_cols_log2 + current->tile_rows_log2,
1097 context_update_tile_id);
1098 fb(2, tile_size_bytes_minus1);
1099 } else {
1100 infer(context_update_tile_id, 0);
1101 current->tile_size_bytes_minus1 = 3;
1102 }
1103
1104 ctx->tile_cols = current->tile_cols;
1105 ctx->tile_rows = current->tile_rows;
1106
1107 return 0;
1108 }
1109
mpp_av1_quantization_params(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current)1110 static RK_S32 mpp_av1_quantization_params(AV1Context *ctx, BitReadCtx_t *gb,
1111 AV1RawFrameHeader *current)
1112 {
1113 const AV1RawSequenceHeader *seq = ctx->sequence_header;
1114 RK_S32 err;
1115
1116 fb(8, base_q_idx);
1117
1118 delta_q(delta_q_y_dc);
1119
1120 if (ctx->num_planes > 1) {
1121 if (seq->color_config.separate_uv_delta_q)
1122 flag(diff_uv_delta);
1123 else
1124 infer(diff_uv_delta, 0);
1125
1126 delta_q(delta_q_u_dc);
1127 delta_q(delta_q_u_ac);
1128
1129 if (current->diff_uv_delta) {
1130 delta_q(delta_q_v_dc);
1131 delta_q(delta_q_v_ac);
1132 } else {
1133 infer(delta_q_v_dc, current->delta_q_u_dc);
1134 infer(delta_q_v_ac, current->delta_q_u_ac);
1135 }
1136 } else {
1137 infer(delta_q_u_dc, 0);
1138 infer(delta_q_u_ac, 0);
1139 infer(delta_q_v_dc, 0);
1140 infer(delta_q_v_ac, 0);
1141 }
1142
1143 flag(using_qmatrix);
1144 if (current->using_qmatrix) {
1145 fb(4, qm_y);
1146 fb(4, qm_u);
1147 if (seq->color_config.separate_uv_delta_q)
1148 fb(4, qm_v);
1149 else
1150 infer(qm_v, current->qm_u);
1151 }
1152
1153 return 0;
1154 }
1155
mpp_av1_segmentation_params(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current)1156 static RK_S32 mpp_av1_segmentation_params(AV1Context *ctx, BitReadCtx_t *gb,
1157 AV1RawFrameHeader *current)
1158 {
1159 static const RK_U8 bits[AV1_SEG_LVL_MAX] = { 8, 6, 6, 6, 6, 3, 0, 0 };
1160 static const RK_U8 sign[AV1_SEG_LVL_MAX] = { 1, 1, 1, 1, 1, 0, 0, 0 };
1161 static const RK_U8 default_feature_enabled[AV1_SEG_LVL_MAX] = { 0 };
1162 static const RK_S16 default_feature_value[AV1_SEG_LVL_MAX] = { 0 };
1163 RK_S32 i, j, err;
1164
1165 flag(segmentation_enabled);
1166
1167 if (current->segmentation_enabled) {
1168 if (current->primary_ref_frame == AV1_PRIMARY_REF_NONE) {
1169 infer(segmentation_update_map, 1);
1170 infer(segmentation_temporal_update, 0);
1171 infer(segmentation_update_data, 1);
1172 } else {
1173 flag(segmentation_update_map);
1174 if (current->segmentation_update_map)
1175 flag(segmentation_temporal_update);
1176 else
1177 infer(segmentation_temporal_update, 0);
1178 flag(segmentation_update_data);
1179 }
1180
1181 for (i = 0; i < AV1_MAX_SEGMENTS; i++) {
1182 const RK_U8 *ref_feature_enabled;
1183 const RK_S16 *ref_feature_value;
1184
1185 if (current->primary_ref_frame == AV1_PRIMARY_REF_NONE) {
1186 ref_feature_enabled = default_feature_enabled;
1187 ref_feature_value = default_feature_value;
1188 } else {
1189 ref_feature_enabled =
1190 ctx->ref_s[current->ref_frame_idx[current->primary_ref_frame]].feature_enabled[i];
1191 ref_feature_value =
1192 ctx->ref_s[current->ref_frame_idx[current->primary_ref_frame]].feature_value[i];
1193 }
1194
1195 for (j = 0; j < AV1_SEG_LVL_MAX; j++) {
1196 if (current->segmentation_update_data) {
1197 flags(feature_enabled[i][j], 2, i, j);
1198 if (current->feature_enabled[i][j] && bits[j] > 0) {
1199 if (sign[j]) {
1200 RK_S32 sign_, data;
1201
1202 READ_ONEBIT(gb, &sign_);
1203 READ_BITS(gb, bits[j], &data);
1204 if (sign_) data -= (1 << bits[j]);
1205 current->feature_value[i][j] = data;
1206 } else
1207 fbs(bits[j], feature_value[i][j], 2, i, j);
1208 } else {
1209 infer(feature_value[i][j], 0);
1210 }
1211 } else {
1212 infer(feature_enabled[i][j], ref_feature_enabled[j]);
1213 infer(feature_value[i][j], ref_feature_value[j]);
1214 }
1215 }
1216 }
1217 } else {
1218 for (i = 0; i < AV1_MAX_SEGMENTS; i++) {
1219 for (j = 0; j < AV1_SEG_LVL_MAX; j++) {
1220 infer(feature_enabled[i][j], 0);
1221 infer(feature_value[i][j], 0);
1222 }
1223 }
1224 }
1225
1226 return 0;
1227 __BITREAD_ERR:
1228 return MPP_ERR_STREAM;
1229 }
1230
mpp_av1_delta_q_params(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current)1231 static RK_S32 mpp_av1_delta_q_params(AV1Context *ctx, BitReadCtx_t *gb,
1232 AV1RawFrameHeader *current)
1233 {
1234 RK_S32 err;
1235 (void)ctx;
1236 if (current->base_q_idx > 0)
1237 flag(delta_q_present);
1238 else
1239 infer(delta_q_present, 0);
1240
1241 if (current->delta_q_present)
1242 fb(2, delta_q_res);
1243
1244 return 0;
1245 }
1246
mpp_av1_delta_lf_params(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current)1247 static RK_S32 mpp_av1_delta_lf_params(AV1Context *ctx, BitReadCtx_t *gb,
1248 AV1RawFrameHeader *current)
1249 {
1250 RK_S32 err;
1251 (void)ctx;
1252 if (current->delta_q_present) {
1253 if (!current->allow_intrabc)
1254 flag(delta_lf_present);
1255 else
1256 infer(delta_lf_present, 0);
1257 if (current->delta_lf_present) {
1258 fb(2, delta_lf_res);
1259 flag(delta_lf_multi);
1260 } else {
1261 infer(delta_lf_res, 0);
1262 infer(delta_lf_multi, 0);
1263 }
1264 } else {
1265 infer(delta_lf_present, 0);
1266 infer(delta_lf_res, 0);
1267 infer(delta_lf_multi, 0);
1268 }
1269
1270 return 0;
1271 }
1272
mpp_av1_loop_filter_params(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current)1273 static RK_S32 mpp_av1_loop_filter_params(AV1Context *ctx, BitReadCtx_t *gb,
1274 AV1RawFrameHeader *current)
1275 {
1276 static const RK_S8 default_loop_filter_ref_deltas[AV1_TOTAL_REFS_PER_FRAME] =
1277 { 1, 0, 0, 0, -1, 0, -1, -1 };
1278 static const RK_S8 default_loop_filter_mode_deltas[2] = { 0, 0 };
1279 RK_S32 i, err;
1280
1281 if (ctx->coded_lossless || current->allow_intrabc) {
1282 infer(loop_filter_level[0], 0);
1283 infer(loop_filter_level[1], 0);
1284 infer(loop_filter_ref_deltas[AV1_REF_FRAME_INTRA], 1);
1285 infer(loop_filter_ref_deltas[AV1_REF_FRAME_LAST], 0);
1286 infer(loop_filter_ref_deltas[AV1_REF_FRAME_LAST2], 0);
1287 infer(loop_filter_ref_deltas[AV1_REF_FRAME_LAST3], 0);
1288 infer(loop_filter_ref_deltas[AV1_REF_FRAME_BWDREF], 0);
1289 infer(loop_filter_ref_deltas[AV1_REF_FRAME_GOLDEN], -1);
1290 infer(loop_filter_ref_deltas[AV1_REF_FRAME_ALTREF], -1);
1291 infer(loop_filter_ref_deltas[AV1_REF_FRAME_ALTREF2], -1);
1292 for (i = 0; i < 2; i++)
1293 infer(loop_filter_mode_deltas[i], 0);
1294 return 0;
1295 }
1296
1297 fb(6, loop_filter_level[0]);
1298 fb(6, loop_filter_level[1]);
1299
1300 if (ctx->num_planes > 1) {
1301 if (current->loop_filter_level[0] ||
1302 current->loop_filter_level[1]) {
1303 fb(6, loop_filter_level[2]);
1304 fb(6, loop_filter_level[3]);
1305 }
1306 }
1307
1308 av1d_dbg(AV1D_DBG_HEADER, "orderhint %d loop_filter_level %d %d %d %d\n",
1309 current->order_hint,
1310 current->loop_filter_level[0], current->loop_filter_level[1],
1311 current->loop_filter_level[2], current->loop_filter_level[3]);
1312 fb(3, loop_filter_sharpness);
1313
1314 flag(loop_filter_delta_enabled);
1315 if (current->loop_filter_delta_enabled) {
1316 const RK_S8 *ref_loop_filter_ref_deltas, *ref_loop_filter_mode_deltas;
1317
1318 if (current->primary_ref_frame == AV1_PRIMARY_REF_NONE) {
1319 ref_loop_filter_ref_deltas = default_loop_filter_ref_deltas;
1320 ref_loop_filter_mode_deltas = default_loop_filter_mode_deltas;
1321 } else {
1322 ref_loop_filter_ref_deltas =
1323 ctx->ref_s[current->ref_frame_idx[current->primary_ref_frame]].loop_filter_ref_deltas;
1324 ref_loop_filter_mode_deltas =
1325 ctx->ref_s[current->ref_frame_idx[current->primary_ref_frame]].loop_filter_mode_deltas;
1326 }
1327
1328 flag(loop_filter_delta_update);
1329 for (i = 0; i < AV1_TOTAL_REFS_PER_FRAME; i++) {
1330 if (current->loop_filter_delta_update)
1331 flags(update_ref_delta[i], 1, i);
1332 else
1333 infer(update_ref_delta[i], 0);
1334 if (current->update_ref_delta[i])
1335 sus(1 + 6, loop_filter_ref_deltas[i], 1, i);
1336 else
1337 infer(loop_filter_ref_deltas[i], ref_loop_filter_ref_deltas[i]);
1338 }
1339 for (i = 0; i < 2; i++) {
1340 if (current->loop_filter_delta_update)
1341 flags(update_mode_delta[i], 1, i);
1342 else
1343 infer(update_mode_delta[i], 0);
1344 if (current->update_mode_delta[i])
1345 sus(1 + 6, loop_filter_mode_deltas[i], 1, i);
1346 else
1347 infer(loop_filter_mode_deltas[i], ref_loop_filter_mode_deltas[i]);
1348 }
1349 } else {
1350 for (i = 0; i < AV1_TOTAL_REFS_PER_FRAME; i++)
1351 infer(loop_filter_ref_deltas[i], default_loop_filter_ref_deltas[i]);
1352 for (i = 0; i < 2; i++)
1353 infer(loop_filter_mode_deltas[i], default_loop_filter_mode_deltas[i]);
1354 }
1355
1356 return 0;
1357 }
1358
mpp_av1_cdef_params(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current)1359 static RK_S32 mpp_av1_cdef_params(AV1Context *ctx, BitReadCtx_t *gb,
1360 AV1RawFrameHeader *current)
1361 {
1362 const AV1RawSequenceHeader *seq = ctx->sequence_header;
1363 RK_S32 i, err;
1364 if (ctx->coded_lossless || current->allow_intrabc ||
1365 !seq->enable_cdef) {
1366 infer(cdef_damping_minus_3, 0);
1367 infer(cdef_bits, 0);
1368 infer(cdef_y_pri_strength[0], 0);
1369 infer(cdef_y_sec_strength[0], 0);
1370 infer(cdef_uv_pri_strength[0], 0);
1371 infer(cdef_uv_sec_strength[0], 0);
1372
1373 return 0;
1374 }
1375
1376 fb(2, cdef_damping_minus_3);
1377 fb(2, cdef_bits);
1378
1379 for (i = 0; i < (1 << current->cdef_bits); i++) {
1380 fbs(4, cdef_y_pri_strength[i], 1, i);
1381 fbs(2, cdef_y_sec_strength[i], 1, i);
1382
1383 if (ctx->num_planes > 1) {
1384 fbs(4, cdef_uv_pri_strength[i], 1, i);
1385 fbs(2, cdef_uv_sec_strength[i], 1, i);
1386 }
1387 }
1388
1389 return 0;
1390 }
1391
mpp_av1_lr_params(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current)1392 static RK_S32 mpp_av1_lr_params(AV1Context *ctx, BitReadCtx_t *gb,
1393 AV1RawFrameHeader *current)
1394 {
1395 const AV1RawSequenceHeader *seq = ctx->sequence_header;
1396 RK_S32 uses_lr, uses_chroma_lr;
1397 RK_S32 i, err;
1398
1399 if (ctx->all_lossless || current->allow_intrabc ||
1400 !seq->enable_restoration) {
1401 return 0;
1402 }
1403
1404 uses_lr = uses_chroma_lr = 0;
1405 for (i = 0; i < ctx->num_planes; i++) {
1406 fbs(2, lr_type[i], 1, i);
1407
1408 if (current->lr_type[i] != AV1_RESTORE_NONE) {
1409 uses_lr = 1;
1410 if (i > 0)
1411 uses_chroma_lr = 1;
1412 }
1413 }
1414
1415 if (uses_lr) {
1416 if (seq->use_128x128_superblock)
1417 increment(lr_unit_shift, 1, 2);
1418 else
1419 increment(lr_unit_shift, 0, 2);
1420
1421 if (seq->color_config.subsampling_x &&
1422 seq->color_config.subsampling_y && uses_chroma_lr) {
1423 fb(1, lr_uv_shift);
1424 } else {
1425 infer(lr_uv_shift, 0);
1426 }
1427 }
1428
1429 return 0;
1430 }
1431
mpp_av1_read_tx_mode(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current)1432 static RK_S32 mpp_av1_read_tx_mode(AV1Context *ctx, BitReadCtx_t *gb,
1433 AV1RawFrameHeader *current)
1434 {
1435 RK_S32 err;
1436
1437 if (ctx->coded_lossless)
1438 infer(tx_mode, 0);
1439 else {
1440 flag(tx_mode);
1441 current->tx_mode = current->tx_mode ? 4 : 3;
1442 }
1443
1444 return 0;
1445 }
1446
mpp_av1_frame_reference_mode(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current)1447 static RK_S32 mpp_av1_frame_reference_mode(AV1Context *ctx, BitReadCtx_t *gb,
1448 AV1RawFrameHeader *current)
1449 {
1450 RK_S32 err;
1451 (void)ctx;
1452 if (current->frame_type == AV1_FRAME_INTRA_ONLY ||
1453 current->frame_type == AV1_FRAME_KEY)
1454 infer(reference_select, 0);
1455 else
1456 flag(reference_select);
1457
1458 return 0;
1459 }
1460
mpp_av1_skip_mode_params(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current)1461 static RK_S32 mpp_av1_skip_mode_params(AV1Context *ctx, BitReadCtx_t *gb,
1462 AV1RawFrameHeader *current)
1463 {
1464 const AV1RawSequenceHeader *seq = ctx->sequence_header;
1465 RK_S32 skip_mode_allowed;
1466 RK_S32 err;
1467
1468 if (current->frame_type == AV1_FRAME_KEY ||
1469 current->frame_type == AV1_FRAME_INTRA_ONLY ||
1470 !current->reference_select || !seq->enable_order_hint) {
1471 skip_mode_allowed = 0;
1472 } else {
1473 RK_S32 forward_idx, backward_idx;
1474 RK_S32 forward_hint, backward_hint;
1475 RK_S32 ref_hint, dist, i;
1476
1477 forward_idx = -1;
1478 backward_idx = -1;
1479 forward_hint = -1;
1480 backward_hint = -1;
1481 for (i = 0; i < AV1_REFS_PER_FRAME; i++) {
1482 ref_hint = ctx->ref_s[current->ref_frame_idx[i]].order_hint;
1483 dist = mpp_av1_get_relative_dist(seq, ref_hint,
1484 ctx->order_hint);
1485 if (dist < 0) {
1486 if (forward_idx < 0 ||
1487 mpp_av1_get_relative_dist(seq, ref_hint,
1488 forward_hint) > 0) {
1489 forward_idx = i;
1490 forward_hint = ref_hint;
1491 }
1492 } else if (dist > 0) {
1493 if (backward_idx < 0 ||
1494 mpp_av1_get_relative_dist(seq, ref_hint,
1495 backward_hint) < 0) {
1496 backward_idx = i;
1497 backward_hint = ref_hint;
1498 }
1499 }
1500 }
1501
1502 if (forward_idx < 0) {
1503 skip_mode_allowed = 0;
1504 } else if (backward_idx >= 0) {
1505 skip_mode_allowed = 1;
1506 ctx->skip_ref0 = MPP_MIN(forward_idx, backward_idx) + 1;
1507 ctx->skip_ref1 = MPP_MAX(forward_idx, backward_idx) + 1;
1508 // Frames for skip mode are forward_idx and backward_idx.
1509 } else {
1510 RK_S32 second_forward_idx;
1511 RK_S32 second_forward_hint;
1512 second_forward_idx = -1;
1513 for (i = 0; i < AV1_REFS_PER_FRAME; i++) {
1514 ref_hint = ctx->ref_s[current->ref_frame_idx[i]].order_hint;
1515 if (mpp_av1_get_relative_dist(seq, ref_hint,
1516 forward_hint) < 0) {
1517 if (second_forward_idx < 0 ||
1518 mpp_av1_get_relative_dist(seq, ref_hint,
1519 second_forward_hint) > 0) {
1520 second_forward_idx = i;
1521 second_forward_hint = ref_hint;
1522 }
1523 }
1524 }
1525
1526 if (second_forward_idx < 0) {
1527 skip_mode_allowed = 0;
1528 } else {
1529 ctx->skip_ref0 = MPP_MIN(forward_idx, second_forward_idx) + 1;
1530 ctx->skip_ref1 = MPP_MAX(forward_idx, second_forward_idx) + 1;
1531 skip_mode_allowed = 1;
1532 // Frames for skip mode are forward_idx and second_forward_idx.
1533 }
1534 }
1535 }
1536
1537 if (skip_mode_allowed)
1538 flag(skip_mode_present);
1539 else
1540 infer(skip_mode_present, 0);
1541
1542 return 0;
1543 }
1544
mpp_av1_global_motion_param(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current,RK_S32 type,RK_S32 ref,RK_S32 idx)1545 static RK_S32 mpp_av1_global_motion_param(AV1Context *ctx, BitReadCtx_t *gb,
1546 AV1RawFrameHeader *current,
1547 RK_S32 type, RK_S32 ref, RK_S32 idx)
1548 {
1549 RK_U32 abs_bits, prec_bits, num_syms;
1550 RK_S32 err;
1551 (void)ctx;
1552 if (idx < 2) {
1553 if (type == AV1_WARP_MODEL_TRANSLATION) {
1554 abs_bits = AV1_GM_ABS_TRANS_ONLY_BITS - !current->allow_high_precision_mv;
1555 prec_bits = AV1_GM_TRANS_ONLY_PREC_BITS - !current->allow_high_precision_mv;
1556 } else {
1557 abs_bits = AV1_GM_ABS_TRANS_BITS;
1558 prec_bits = AV1_GM_TRANS_PREC_BITS;
1559 }
1560 } else {
1561 abs_bits = AV1_GM_ABS_ALPHA_BITS;
1562 prec_bits = AV1_GM_ALPHA_PREC_BITS;
1563 }
1564
1565 num_syms = 2 * (1 << abs_bits) + 1;
1566 subexp(gm_params[ref][idx], num_syms);// 2, ref, idx);
1567
1568 // Actual gm_params value is not reconstructed here.
1569 (void)prec_bits;
1570
1571 return 0;
1572 }
1573
1574 /*
1575 * Actual gm_params value is not reconstructed here.
1576 * Real gm_params update in av1d_parser.c->global_motion_params()
1577 */
mpp_av1_global_motion_params(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current)1578 static RK_S32 mpp_av1_global_motion_params(AV1Context *ctx, BitReadCtx_t *gb,
1579 AV1RawFrameHeader *current)
1580 {
1581 RK_S32 ref, type;
1582 RK_S32 err;
1583
1584 if (current->frame_type == AV1_FRAME_KEY ||
1585 current->frame_type == AV1_FRAME_INTRA_ONLY)
1586 return 0;
1587
1588 for (ref = AV1_REF_FRAME_LAST; ref <= AV1_REF_FRAME_ALTREF; ref++) {
1589 flags(is_global[ref], 1, ref);
1590 if (current->is_global[ref]) {
1591 flags(is_rot_zoom[ref], 1, ref);
1592 if (current->is_rot_zoom[ref]) {
1593 type = AV1_WARP_MODEL_ROTZOOM;
1594 } else {
1595 flags(is_translation[ref], 1, ref);
1596 type = current->is_translation[ref] ? AV1_WARP_MODEL_TRANSLATION
1597 : AV1_WARP_MODEL_AFFINE;
1598 }
1599 } else {
1600 type = AV1_WARP_MODEL_IDENTITY;
1601 }
1602
1603 if (type >= AV1_WARP_MODEL_ROTZOOM) {
1604 CHECK(mpp_av1_global_motion_param(ctx, gb, current, type, ref, 2));
1605 CHECK(mpp_av1_global_motion_param(ctx, gb, current, type, ref, 3));
1606 if (type == AV1_WARP_MODEL_AFFINE) {
1607 CHECK(mpp_av1_global_motion_param(ctx, gb, current, type, ref, 4));
1608 CHECK(mpp_av1_global_motion_param(ctx, gb, current, type, ref, 5));
1609 } else {
1610 current->gm_params[ref][4] = -current->gm_params[ref][3];
1611 current->gm_params[ref][5] = current->gm_params[ref][2];
1612 }
1613 }
1614 if (type >= AV1_WARP_MODEL_TRANSLATION) {
1615 CHECK(mpp_av1_global_motion_param(ctx, gb, current, type, ref, 0));
1616 CHECK(mpp_av1_global_motion_param(ctx, gb, current, type, ref, 1));
1617 }
1618 }
1619
1620 return 0;
1621 }
1622
mpp_av1_film_grain_params(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFilmGrainParams * current,AV1RawFrameHeader * frame_header)1623 static RK_S32 mpp_av1_film_grain_params(AV1Context *ctx, BitReadCtx_t *gb,
1624 AV1RawFilmGrainParams *current,
1625 AV1RawFrameHeader *frame_header)
1626 {
1627 const AV1RawSequenceHeader *seq = ctx->sequence_header;
1628 RK_S32 num_pos_luma, num_pos_chroma;
1629 RK_S32 i, err;
1630
1631 if (!seq->film_grain_params_present ||
1632 (!frame_header->show_frame && !frame_header->showable_frame))
1633 return 0;
1634
1635 flag(apply_grain);
1636
1637 if (!current->apply_grain)
1638 return 0;
1639
1640 fb(16, grain_seed);
1641
1642 if (frame_header->frame_type == AV1_FRAME_INTER)
1643 flag(update_grain);
1644 else
1645 infer(update_grain, 1);
1646
1647 if (!current->update_grain) {
1648 fb(3, film_grain_params_ref_idx);
1649 return 0;
1650 }
1651
1652 fc(4, num_y_points, 0, 14);
1653 for (i = 0; i < current->num_y_points; i++) {
1654 fcs(8, point_y_value[i],
1655 i ? current->point_y_value[i - 1] + 1 : 0,
1656 MAX_UINT_BITS(8) - (current->num_y_points - i - 1),
1657 1, i);
1658 fbs(8, point_y_scaling[i], 1, i);
1659 }
1660
1661 if (seq->color_config.mono_chrome)
1662 infer(chroma_scaling_from_luma, 0);
1663 else
1664 flag(chroma_scaling_from_luma);
1665
1666 if (seq->color_config.mono_chrome ||
1667 current->chroma_scaling_from_luma ||
1668 (seq->color_config.subsampling_x == 1 &&
1669 seq->color_config.subsampling_y == 1 &&
1670 current->num_y_points == 0)) {
1671 infer(num_cb_points, 0);
1672 infer(num_cr_points, 0);
1673 } else {
1674 fc(4, num_cb_points, 0, 10);
1675 for (i = 0; i < current->num_cb_points; i++) {
1676 fcs(8, point_cb_value[i],
1677 i ? current->point_cb_value[i - 1] + 1 : 0,
1678 MAX_UINT_BITS(8) - (current->num_cb_points - i - 1),
1679 1, i);
1680 fbs(8, point_cb_scaling[i], 1, i);
1681 }
1682 fc(4, num_cr_points, 0, 10);
1683 for (i = 0; i < current->num_cr_points; i++) {
1684 fcs(8, point_cr_value[i],
1685 i ? current->point_cr_value[i - 1] + 1 : 0,
1686 MAX_UINT_BITS(8) - (current->num_cr_points - i - 1),
1687 1, i);
1688 fbs(8, point_cr_scaling[i], 1, i);
1689 }
1690 }
1691
1692 fb(2, grain_scaling_minus_8);
1693 fb(2, ar_coeff_lag);
1694 num_pos_luma = 2 * current->ar_coeff_lag * (current->ar_coeff_lag + 1);
1695 if (current->num_y_points) {
1696 num_pos_chroma = num_pos_luma + 1;
1697 for (i = 0; i < num_pos_luma; i++)
1698 fbs(8, ar_coeffs_y_plus_128[i], 1, i);
1699 } else {
1700 num_pos_chroma = num_pos_luma;
1701 }
1702 if (current->chroma_scaling_from_luma || current->num_cb_points) {
1703 for (i = 0; i < num_pos_chroma; i++)
1704 fbs(8, ar_coeffs_cb_plus_128[i], 1, i);
1705 }
1706 if (current->chroma_scaling_from_luma || current->num_cr_points) {
1707 for (i = 0; i < num_pos_chroma; i++)
1708 fbs(8, ar_coeffs_cr_plus_128[i], 1, i);
1709 }
1710 fb(2, ar_coeff_shift_minus_6);
1711 fb(2, grain_scale_shift);
1712 if (current->num_cb_points) {
1713 fb(8, cb_mult);
1714 fb(8, cb_luma_mult);
1715 fb(9, cb_offset);
1716 }
1717 if (current->num_cr_points) {
1718 fb(8, cr_mult);
1719 fb(8, cr_luma_mult);
1720 fb(9, cr_offset);
1721 }
1722
1723 flag(overlap_flag);
1724 flag(clip_to_restricted_range);
1725
1726 return 0;
1727 }
1728
mpp_av1_uncompressed_header(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current)1729 static RK_S32 mpp_av1_uncompressed_header(AV1Context *ctx, BitReadCtx_t *gb,
1730 AV1RawFrameHeader *current)
1731 {
1732 const AV1RawSequenceHeader *seq;
1733 RK_S32 id_len, diff_len, all_frames, frame_is_intra, order_hint_bits;
1734 RK_S32 i, err;
1735
1736 if (!ctx->sequence_header) {
1737 mpp_err_f("No sequence header available: "
1738 "unable to decode frame header.\n");
1739 return MPP_ERR_UNKNOW;
1740 }
1741 seq = ctx->sequence_header;
1742
1743 id_len = seq->additional_frame_id_length_minus_1 +
1744 seq->delta_frame_id_length_minus_2 + 3;
1745 all_frames = (1 << AV1_NUM_REF_FRAMES) - 1;
1746
1747 if (seq->reduced_still_picture_header) {
1748 infer(show_existing_frame, 0);
1749 infer(frame_type, AV1_FRAME_KEY);
1750 infer(show_frame, 1);
1751 infer(showable_frame, 0);
1752 frame_is_intra = 1;
1753
1754 } else {
1755 flag(show_existing_frame);
1756
1757 if (current->show_existing_frame) {
1758 AV1ReferenceFrameState *ref;
1759
1760 fb(3, frame_to_show_map_idx);
1761 ref = &ctx->ref_s[current->frame_to_show_map_idx];
1762
1763 if (!ref->valid) {
1764 mpp_err_f("Missing reference frame needed for "
1765 "show_existing_frame (frame_to_show_map_idx = %d).\n",
1766 current->frame_to_show_map_idx);
1767 return MPP_ERR_UNKNOW;
1768 }
1769
1770 if (seq->decoder_model_info_present_flag &&
1771 !seq->timing_info.equal_picture_interval) {
1772 fb(seq->decoder_model_info.frame_presentation_time_length_minus_1 + 1,
1773 frame_presentation_time);
1774 }
1775
1776 if (seq->frame_id_numbers_present_flag)
1777 fb(id_len, display_frame_id);
1778
1779 infer(frame_type, ref->frame_type);
1780 if (current->frame_type == AV1_FRAME_KEY) {
1781 infer(refresh_frame_flags, all_frames);
1782
1783 // Section 7.21
1784 infer(current_frame_id, ref->frame_id);
1785 ctx->upscaled_width = ref->upscaled_width;
1786 ctx->frame_width = ref->frame_width;
1787 ctx->frame_height = ref->frame_height;
1788 ctx->render_width = ref->render_width;
1789 ctx->render_height = ref->render_height;
1790 ctx->bit_depth = ref->bit_depth;
1791 ctx->order_hint = ref->order_hint;
1792 } else
1793 infer(refresh_frame_flags, 0);
1794
1795 infer(frame_width_minus_1, ref->upscaled_width - 1);
1796 infer(frame_height_minus_1, ref->frame_height - 1);
1797 infer(render_width_minus_1, ref->render_width - 1);
1798 infer(render_height_minus_1, ref->render_height - 1);
1799
1800 // Section 7.20
1801 goto update_refs;
1802 }
1803
1804 fb(2, frame_type);
1805 frame_is_intra = (current->frame_type == AV1_FRAME_INTRA_ONLY ||
1806 current->frame_type == AV1_FRAME_KEY);
1807
1808 ctx->frame_is_intra = frame_is_intra;
1809 if (current->frame_type == AV1_FRAME_KEY) {
1810 RK_U32 refresh_frame_flags = (1 << NUM_REF_FRAMES) - 1;
1811
1812 Av1GetCDFs(ctx, current->frame_to_show_map_idx);
1813 Av1StoreCDFs(ctx, refresh_frame_flags);
1814 }
1815
1816 flag(show_frame);
1817 if (current->show_frame &&
1818 seq->decoder_model_info_present_flag &&
1819 !seq->timing_info.equal_picture_interval) {
1820 fb(seq->decoder_model_info.frame_presentation_time_length_minus_1 + 1,
1821 frame_presentation_time);
1822 }
1823 if (current->show_frame)
1824 infer(showable_frame, current->frame_type != AV1_FRAME_KEY);
1825 else
1826 flag(showable_frame);
1827
1828 if (current->frame_type == AV1_FRAME_SWITCH ||
1829 (current->frame_type == AV1_FRAME_KEY && current->show_frame))
1830 infer(error_resilient_mode, 1);
1831 else
1832 flag(error_resilient_mode);
1833 }
1834
1835 if (current->frame_type == AV1_FRAME_KEY && current->show_frame) {
1836 for (i = 0; i < AV1_NUM_REF_FRAMES; i++) {
1837 ctx->ref_s[i].valid = 0;
1838 ctx->ref_s[i].order_hint = 0;
1839 }
1840 }
1841
1842 flag(disable_cdf_update);
1843
1844 if (seq->seq_force_screen_content_tools ==
1845 AV1_SELECT_SCREEN_CONTENT_TOOLS) {
1846 flag(allow_screen_content_tools);
1847 } else {
1848 infer(allow_screen_content_tools,
1849 seq->seq_force_screen_content_tools);
1850 }
1851 if (current->allow_screen_content_tools) {
1852 if (seq->seq_force_integer_mv == AV1_SELECT_INTEGER_MV)
1853 flag(force_integer_mv);
1854 else
1855 infer(force_integer_mv, seq->seq_force_integer_mv);
1856 } else {
1857 infer(force_integer_mv, 0);
1858 }
1859
1860 if (seq->frame_id_numbers_present_flag) {
1861 fb(id_len, current_frame_id);
1862
1863 diff_len = seq->delta_frame_id_length_minus_2 + 2;
1864 for (i = 0; i < AV1_NUM_REF_FRAMES; i++) {
1865 if (current->current_frame_id > (RK_S32)(1 << diff_len)) {
1866 if (ctx->ref_s[i].frame_id > current->current_frame_id ||
1867 ctx->ref_s[i].frame_id < (current->current_frame_id -
1868 (RK_S32)(1 << diff_len)))
1869 ctx->ref_s[i].valid = 0;
1870 } else {
1871 if (ctx->ref_s[i].frame_id > current->current_frame_id &&
1872 ctx->ref_s[i].frame_id < ((RK_S32)(1 << id_len) +
1873 current->current_frame_id -
1874 (RK_S32)(1 << diff_len)))
1875 ctx->ref_s[i].valid = 0;
1876 }
1877 }
1878 } else {
1879 infer(current_frame_id, 0);
1880 }
1881
1882 if (current->frame_type == AV1_FRAME_SWITCH)
1883 infer(frame_size_override_flag, 1);
1884 else if (seq->reduced_still_picture_header)
1885 infer(frame_size_override_flag, 0);
1886 else
1887 flag(frame_size_override_flag);
1888
1889 order_hint_bits =
1890 seq->enable_order_hint ? seq->order_hint_bits_minus_1 + 1 : 0;
1891 if (order_hint_bits > 0)
1892 fb(order_hint_bits, order_hint);
1893 else
1894 infer(order_hint, 0);
1895 ctx->order_hint = current->order_hint;
1896
1897 if (frame_is_intra || current->error_resilient_mode)
1898 infer(primary_ref_frame, AV1_PRIMARY_REF_NONE);
1899 else
1900 fb(3, primary_ref_frame);
1901
1902 if (seq->decoder_model_info_present_flag) {
1903 flag(buffer_removal_time_present_flag);
1904 if (current->buffer_removal_time_present_flag) {
1905 for (i = 0; i <= seq->operating_points_cnt_minus_1; i++) {
1906 if (seq->decoder_model_present_for_this_op[i]) {
1907 RK_S32 op_pt_idc = seq->operating_point_idc[i];
1908 RK_S32 in_temporal_layer = (op_pt_idc >> ctx->temporal_id ) & 1;
1909 RK_S32 in_spatial_layer = (op_pt_idc >> (ctx->spatial_id + 8)) & 1;
1910 if (seq->operating_point_idc[i] == 0 ||
1911 (in_temporal_layer && in_spatial_layer)) {
1912 fbs(seq->decoder_model_info.buffer_removal_time_length_minus_1 + 1,
1913 buffer_removal_time[i], 1, i);
1914 }
1915 }
1916 }
1917 }
1918 }
1919
1920 if (current->frame_type == AV1_FRAME_SWITCH ||
1921 (current->frame_type == AV1_FRAME_KEY && current->show_frame))
1922 infer(refresh_frame_flags, all_frames);
1923 else
1924 fb(8, refresh_frame_flags);
1925
1926 ctx->refresh_frame_flags = current->refresh_frame_flags;
1927 if (!frame_is_intra || current->refresh_frame_flags != all_frames) {
1928 if (seq->enable_order_hint) {
1929 for (i = 0; i < AV1_NUM_REF_FRAMES; i++) {
1930 if (current->error_resilient_mode)
1931 fbs(order_hint_bits, ref_order_hint[i], 1, i);
1932 else
1933 infer(ref_order_hint[i], ctx->ref_s[i].order_hint);
1934 if (current->ref_order_hint[i] != ctx->ref_s[i].order_hint)
1935 ctx->ref_s[i].valid = 0;
1936 }
1937 }
1938 }
1939
1940 if (current->frame_type == AV1_FRAME_KEY ||
1941 current->frame_type == AV1_FRAME_INTRA_ONLY) {
1942 CHECK(mpp_av1_frame_size(ctx, gb, current));
1943 CHECK(mpp_av1_render_size(ctx, gb, current));
1944
1945 if (current->allow_screen_content_tools &&
1946 ctx->upscaled_width == ctx->frame_width)
1947 flag(allow_intrabc);
1948 else
1949 infer(allow_intrabc, 0);
1950
1951 } else {
1952 if (!seq->enable_order_hint) {
1953 infer(frame_refs_short_signaling, 0);
1954 } else {
1955 flag(frame_refs_short_signaling);
1956 if (current->frame_refs_short_signaling) {
1957 fb(3, last_frame_idx);
1958 fb(3, golden_frame_idx);
1959 CHECK(mpp_av1_set_frame_refs(ctx, gb, current));
1960 }
1961 }
1962
1963 for (i = 0; i < AV1_REFS_PER_FRAME; i++) {
1964 if (!current->frame_refs_short_signaling)
1965 fbs(3, ref_frame_idx[i], 1, i);
1966 if (seq->frame_id_numbers_present_flag) {
1967 fbs(seq->delta_frame_id_length_minus_2 + 2,
1968 delta_frame_id_minus1[i], 1, i);
1969 }
1970 }
1971
1972 if (current->frame_size_override_flag &&
1973 !current->error_resilient_mode) {
1974 CHECK(mpp_av1_frame_size_with_refs(ctx, gb, current));
1975 } else {
1976 CHECK(mpp_av1_frame_size(ctx, gb, current));
1977 CHECK(mpp_av1_render_size(ctx, gb, current));
1978 }
1979
1980 if (current->force_integer_mv)
1981 infer(allow_high_precision_mv, 0);
1982 else
1983 flag(allow_high_precision_mv);
1984
1985 CHECK(mpp_av1_interpolation_filter(ctx, gb, current));
1986
1987 flag(is_motion_mode_switchable);
1988
1989 if (current->error_resilient_mode ||
1990 !seq->enable_ref_frame_mvs)
1991 infer(use_ref_frame_mvs, 0);
1992 else
1993 flag(use_ref_frame_mvs);
1994
1995 infer(allow_intrabc, 0);
1996 }
1997
1998 if (!frame_is_intra) {
1999 // Derive reference frame sign biases.
2000 }
2001
2002 if (seq->reduced_still_picture_header || current->disable_cdf_update)
2003 infer(disable_frame_end_update_cdf, 1);
2004 else
2005 flag(disable_frame_end_update_cdf);
2006
2007 ctx->disable_frame_end_update_cdf = current->disable_frame_end_update_cdf;
2008
2009 if (current->use_ref_frame_mvs) {
2010 // Perform motion field estimation process.
2011 }
2012 av1d_dbg(AV1D_DBG_HEADER, "ptile_info in %d", mpp_get_bits_count(gb));
2013 CHECK(mpp_av1_tile_info(ctx, gb, current));
2014 av1d_dbg(AV1D_DBG_HEADER, "ptile_info out %d", mpp_get_bits_count(gb));
2015
2016 CHECK(mpp_av1_quantization_params(ctx, gb, current));
2017 av1d_dbg(AV1D_DBG_HEADER, "quantization out %d", mpp_get_bits_count(gb));
2018
2019 CHECK(mpp_av1_segmentation_params(ctx, gb, current));
2020 av1d_dbg(AV1D_DBG_HEADER, "segmentation out %d", mpp_get_bits_count(gb));
2021
2022 CHECK(mpp_av1_delta_q_params(ctx, gb, current));
2023 av1d_dbg(AV1D_DBG_HEADER, "delta_q out %d", mpp_get_bits_count(gb));
2024
2025 CHECK(mpp_av1_delta_lf_params(ctx, gb, current));
2026 av1d_dbg(AV1D_DBG_HEADER, "lf out %d", mpp_get_bits_count(gb));
2027
2028 // Init coeff CDFs / load previous segments.
2029 if (current->error_resilient_mode || frame_is_intra || current->primary_ref_frame == AV1_PRIMARY_REF_NONE) {
2030 // Init non-coeff CDFs.
2031 // Setup past independence.
2032 ctx->cdfs = &ctx->default_cdfs;
2033 ctx->cdfs_ndvc = &ctx->default_cdfs_ndvc;
2034 Av1DefaultCoeffProbs(current->base_q_idx, ctx->cdfs);
2035 } else {
2036 // Load CDF tables from previous frame.
2037 // Load params from previous frame.
2038 RK_U32 idx = current->ref_frame_idx[current->primary_ref_frame];
2039
2040 Av1GetCDFs(ctx, idx);
2041 }
2042 av1d_dbg(AV1D_DBG_HEADER, "show_existing_frame_index %d primary_ref_frame %d %d (%d) refresh_frame_flags %d base_q_idx %d\n",
2043 current->frame_to_show_map_idx,
2044 current->ref_frame_idx[current->primary_ref_frame],
2045 ctx->ref[current->ref_frame_idx[current->primary_ref_frame]].slot_index,
2046 current->primary_ref_frame,
2047 current->refresh_frame_flags,
2048 current->base_q_idx);
2049 Av1StoreCDFs(ctx, current->refresh_frame_flags);
2050
2051 ctx->coded_lossless = 1;
2052 for (i = 0; i < AV1_MAX_SEGMENTS; i++) {
2053 RK_S32 qindex;
2054 if (current->feature_enabled[i][AV1_SEG_LVL_ALT_Q]) {
2055 qindex = (current->base_q_idx +
2056 current->feature_value[i][AV1_SEG_LVL_ALT_Q]);
2057 } else {
2058 qindex = current->base_q_idx;
2059 }
2060 qindex = mpp_clip_uintp2(qindex, 8);
2061
2062 if (qindex || current->delta_q_y_dc ||
2063 current->delta_q_u_ac || current->delta_q_u_dc ||
2064 current->delta_q_v_ac || current->delta_q_v_dc) {
2065 ctx->coded_lossless = 0;
2066 }
2067 }
2068 ctx->all_lossless = ctx->coded_lossless &&
2069 ctx->frame_width == ctx->upscaled_width;
2070 av1d_dbg(AV1D_DBG_HEADER, "filter in %d", mpp_get_bits_count(gb));
2071
2072 CHECK(mpp_av1_loop_filter_params(ctx, gb, current));
2073 av1d_dbg(AV1D_DBG_HEADER, "cdef in %d", mpp_get_bits_count(gb));
2074
2075 CHECK(mpp_av1_cdef_params(ctx, gb, current));
2076 av1d_dbg(AV1D_DBG_HEADER, "lr in %d", mpp_get_bits_count(gb));
2077
2078 CHECK(mpp_av1_lr_params(ctx, gb, current));
2079 av1d_dbg(AV1D_DBG_HEADER, "read_tx in %d", mpp_get_bits_count(gb));
2080
2081 CHECK(mpp_av1_read_tx_mode(ctx, gb, current));
2082 av1d_dbg(AV1D_DBG_HEADER, "reference in%d", mpp_get_bits_count(gb));
2083
2084 CHECK(mpp_av1_frame_reference_mode(ctx, gb, current));
2085 av1d_dbg(AV1D_DBG_HEADER, "kip_mode in %d", mpp_get_bits_count(gb));
2086
2087 CHECK(mpp_av1_skip_mode_params(ctx, gb, current));
2088
2089 if (frame_is_intra || current->error_resilient_mode ||
2090 !seq->enable_warped_motion)
2091 infer(allow_warped_motion, 0);
2092 else
2093 flag(allow_warped_motion);
2094
2095 flag(reduced_tx_set);
2096 av1d_dbg(AV1D_DBG_HEADER, "motion in%d", mpp_get_bits_count(gb));
2097
2098 CHECK(mpp_av1_global_motion_params(ctx, gb, current));
2099 av1d_dbg(AV1D_DBG_HEADER, "grain in %d", mpp_get_bits_count(gb));
2100 CHECK(mpp_av1_film_grain_params(ctx, gb, ¤t->film_grain, current));
2101 av1d_dbg(AV1D_DBG_HEADER, "film_grain out %d", mpp_get_bits_count(gb));
2102
2103 av1d_dbg(AV1D_DBG_REF, "Frame %d: size %dx%d "
2104 "upscaled %d render %dx%d subsample %dx%d "
2105 "bitdepth %d tiles %dx%d.\n", ctx->order_hint,
2106 ctx->frame_width, ctx->frame_height, ctx->upscaled_width,
2107 ctx->render_width, ctx->render_height,
2108 seq->color_config.subsampling_x + 1,
2109 seq->color_config.subsampling_y + 1, ctx->bit_depth,
2110 ctx->tile_rows, ctx->tile_cols);
2111
2112 update_refs:
2113 for (i = 0; i < AV1_NUM_REF_FRAMES; i++) {
2114 if (current->refresh_frame_flags & (1 << i)) {
2115 ctx->ref_s[i] = (AV1ReferenceFrameState) {
2116 .valid = 1,
2117 .frame_id = current->current_frame_id,
2118 .upscaled_width = ctx->upscaled_width,
2119 .frame_width = ctx->frame_width,
2120 .frame_height = ctx->frame_height,
2121 .render_width = ctx->render_width,
2122 .render_height = ctx->render_height,
2123 .frame_type = current->frame_type,
2124 .subsampling_x = seq->color_config.subsampling_x,
2125 .subsampling_y = seq->color_config.subsampling_y,
2126 .bit_depth = ctx->bit_depth,
2127 .order_hint = ctx->order_hint,
2128 };
2129 memcpy(ctx->ref_s[i].loop_filter_ref_deltas, current->loop_filter_ref_deltas,
2130 sizeof(current->loop_filter_ref_deltas));
2131 memcpy(ctx->ref_s[i].loop_filter_mode_deltas, current->loop_filter_mode_deltas,
2132 sizeof(current->loop_filter_mode_deltas));
2133 memcpy(ctx->ref_s[i].feature_enabled, current->feature_enabled,
2134 sizeof(current->feature_enabled));
2135 memcpy(ctx->ref_s[i].feature_value, current->feature_value,
2136 sizeof(current->feature_value));
2137 }
2138 }
2139
2140 return 0;
2141 }
2142
mpp_av1_frame_header_obu(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrameHeader * current,RK_S32 redundant,void * rw_buffer_ref)2143 static RK_S32 mpp_av1_frame_header_obu(AV1Context *ctx, BitReadCtx_t *gb,
2144 AV1RawFrameHeader *current, RK_S32 redundant,
2145 void *rw_buffer_ref)
2146 {
2147 RK_S32 start_pos, fh_bits, fh_bytes, err;
2148 RK_U8 *fh_start;
2149 (void)rw_buffer_ref;
2150 if (ctx->seen_frame_header) {
2151 if (!redundant) {
2152 mpp_err_f("Invalid repeated "
2153 "frame header OBU.\n");
2154 return MPP_ERR_UNKNOW;
2155 } else {
2156 BitReadCtx_t fh;
2157 size_t i, b;
2158 RK_U32 val;
2159
2160 // mpp_assert(ctx->frame_header_ref && ctx->frame_header);
2161
2162 mpp_set_bitread_ctx(&fh, ctx->frame_header,
2163 ctx->frame_header_size);
2164
2165 for (i = 0; i < ctx->frame_header_size; i += 8) {
2166 b = MPP_MIN(ctx->frame_header_size - i, 8);
2167 mpp_read_bits(&fh, b, (RK_S32*)&val);
2168 xf(b, frame_header_copy[i],
2169 val, val, val, 1, i / 8);
2170 }
2171 }
2172 } else {
2173
2174 start_pos = mpp_get_bits_count(gb);
2175
2176 CHECK(mpp_av1_uncompressed_header(ctx, gb, current));
2177
2178 ctx->tile_num = 0;
2179
2180 if (current->show_existing_frame) {
2181 ctx->seen_frame_header = 0;
2182 } else {
2183 ctx->seen_frame_header = 1;
2184
2185 fh_bits = mpp_get_bits_count(gb) - start_pos;
2186 fh_start = (RK_U8*)gb->buf + start_pos / 8;
2187
2188 fh_bytes = (fh_bits + 7) / 8;
2189 ctx->frame_header_size = fh_bits;
2190 MPP_FREE(ctx->frame_header);
2191 ctx->frame_header =
2192 mpp_malloc(RK_U8, fh_bytes + BUFFER_PADDING_SIZE);
2193 if (!ctx->frame_header) {
2194 mpp_err_f("frame header malloc failed\n");
2195 return MPP_ERR_NOMEM;
2196 }
2197 memcpy(ctx->frame_header, fh_start, fh_bytes);
2198 }
2199 }
2200
2201 return 0;
2202 }
2203
mpp_av1_tile_group_obu(AV1Context * ctx,BitReadCtx_t * gb,AV1RawTileGroup * current)2204 static RK_S32 mpp_av1_tile_group_obu(AV1Context *ctx, BitReadCtx_t *gb,
2205 AV1RawTileGroup *current)
2206 {
2207 RK_S32 num_tiles, tile_bits;
2208 RK_S32 err;
2209
2210 num_tiles = ctx->tile_cols * ctx->tile_rows;
2211 if (num_tiles > 1)
2212 flag(tile_start_and_end_present_flag);
2213 else
2214 infer(tile_start_and_end_present_flag, 0);
2215
2216 if (num_tiles == 1 || !current->tile_start_and_end_present_flag) {
2217 infer(tg_start, 0);
2218 infer(tg_end, num_tiles - 1);
2219 } else {
2220 tile_bits = mpp_av1_tile_log2(1, ctx->tile_cols) +
2221 mpp_av1_tile_log2(1, ctx->tile_rows);
2222 fc(tile_bits, tg_start, ctx->tile_num, num_tiles - 1);
2223 fc(tile_bits, tg_end, current->tg_start, num_tiles - 1);
2224 }
2225
2226 ctx->tile_num = current->tg_end + 1;
2227
2228 CHECK(mpp_av1_byte_alignment(ctx, gb));
2229
2230 // Reset header for next frame.
2231 if (current->tg_end == num_tiles - 1)
2232 ctx->seen_frame_header = 0;
2233 // Tile data follows.
2234
2235 return 0;
2236 }
2237
mpp_av1_frame_obu(AV1Context * ctx,BitReadCtx_t * gb,AV1RawFrame * current,void * rw_buffer_ref)2238 static RK_S32 mpp_av1_frame_obu(AV1Context *ctx, BitReadCtx_t *gb,
2239 AV1RawFrame *current,
2240 void *rw_buffer_ref)
2241 {
2242 RK_S32 err;
2243 RK_U32 start_pos = mpp_get_bits_count(gb);
2244
2245 CHECK(mpp_av1_frame_header_obu(ctx, gb, ¤t->header,
2246 0, rw_buffer_ref));
2247
2248 CHECK(mpp_av1_byte_alignment(ctx, gb));
2249
2250 CHECK(mpp_av1_tile_group_obu(ctx, gb, ¤t->tile_group));
2251 ctx->frame_tag_size += (mpp_get_bits_count(gb) - start_pos + 7) >> 3;
2252
2253 return 0;
2254 }
2255
mpp_av1_tile_list_obu(AV1Context * ctx,BitReadCtx_t * gb,AV1RawTileList * current)2256 static RK_S32 mpp_av1_tile_list_obu(AV1Context *ctx, BitReadCtx_t *gb,
2257 AV1RawTileList *current)
2258 {
2259 RK_S32 err;
2260 (void)ctx;
2261 fb(8, output_frame_width_in_tiles_minus_1);
2262 fb(8, output_frame_height_in_tiles_minus_1);
2263
2264 fb(16, tile_count_minus_1);
2265
2266 // Tile data follows.
2267
2268 return 0;
2269 }
2270
mpp_av1_metadata_hdr_cll(AV1Context * ctx,BitReadCtx_t * gb,AV1RawMetadataHDRCLL * current)2271 static RK_S32 mpp_av1_metadata_hdr_cll(AV1Context *ctx, BitReadCtx_t *gb,
2272 AV1RawMetadataHDRCLL *current)
2273 {
2274 RK_S32 err;
2275 (void)ctx;
2276 fb(16, max_cll);
2277 fb(16, max_fall);
2278
2279 return 0;
2280 }
2281
mpp_av1_metadata_hdr_mdcv(AV1Context * ctx,BitReadCtx_t * gb,AV1RawMetadataHDRMDCV * current)2282 static RK_S32 mpp_av1_metadata_hdr_mdcv(AV1Context *ctx, BitReadCtx_t *gb,
2283 AV1RawMetadataHDRMDCV *current)
2284 {
2285 RK_S32 err, i;
2286 (void)ctx;
2287 for (i = 0; i < 3; i++) {
2288 fbs(16, primary_chromaticity_x[i], 1, i);
2289 fbs(16, primary_chromaticity_y[i], 1, i);
2290 }
2291
2292 fb(16, white_point_chromaticity_x);
2293 fb(16, white_point_chromaticity_y);
2294
2295 fc(32, luminance_max, 1, MAX_UINT_BITS(32));
2296 // luminance_min must be lower than luminance_max. Convert luminance_max from
2297 // 24.8 fixed point to 18.14 fixed point in order to compare them.
2298 fc(32, luminance_min, 0, MPP_MIN(((RK_U64)current->luminance_max << 6) - 1,
2299 MAX_UINT_BITS(32)));
2300
2301 return 0;
2302 }
2303
mpp_av1_scalability_structure(AV1Context * ctx,BitReadCtx_t * gb,AV1RawMetadataScalability * current)2304 static RK_S32 mpp_av1_scalability_structure(AV1Context *ctx, BitReadCtx_t *gb,
2305 AV1RawMetadataScalability *current)
2306 {
2307 const AV1RawSequenceHeader *seq;
2308 RK_S32 err, i, j;
2309
2310 if (!ctx->sequence_header) {
2311 mpp_err_f("No sequence header available: "
2312 "unable to parse scalability metadata.\n");
2313 return MPP_ERR_UNKNOW;
2314 }
2315 seq = ctx->sequence_header;
2316
2317 fb(2, spatial_layers_cnt_minus_1);
2318 flag(spatial_layer_dimensions_present_flag);
2319 flag(spatial_layer_description_present_flag);
2320 flag(temporal_group_description_present_flag);
2321 fc(3, scalability_structure_reserved_3bits, 0, 0);
2322 if (current->spatial_layer_dimensions_present_flag) {
2323 for (i = 0; i <= current->spatial_layers_cnt_minus_1; i++) {
2324 fcs(16, spatial_layer_max_width[i],
2325 0, seq->max_frame_width_minus_1 + 1, 1, i);
2326 fcs(16, spatial_layer_max_height[i],
2327 0, seq->max_frame_height_minus_1 + 1, 1, i);
2328 }
2329 }
2330 if (current->spatial_layer_description_present_flag) {
2331 for (i = 0; i <= current->spatial_layers_cnt_minus_1; i++)
2332 fbs(8, spatial_layer_ref_id[i], 1, i);
2333 }
2334 if (current->temporal_group_description_present_flag) {
2335 fb(8, temporal_group_size);
2336 for (i = 0; i < current->temporal_group_size; i++) {
2337 fbs(3, temporal_group_temporal_id[i], 1, i);
2338 flags(temporal_group_temporal_switching_up_point_flag[i], 1, i);
2339 flags(temporal_group_spatial_switching_up_point_flag[i], 1, i);
2340 fbs(3, temporal_group_ref_cnt[i], 1, i);
2341 for (j = 0; j < current->temporal_group_ref_cnt[i]; j++) {
2342 fbs(8, temporal_group_ref_pic_diff[i][j], 2, i, j);
2343 }
2344 }
2345 }
2346
2347 return 0;
2348 }
2349
mpp_av1_metadata_scalability(AV1Context * ctx,BitReadCtx_t * gb,AV1RawMetadataScalability * current)2350 static RK_S32 mpp_av1_metadata_scalability(AV1Context *ctx, BitReadCtx_t *gb,
2351 AV1RawMetadataScalability *current)
2352 {
2353 RK_S32 err;
2354
2355 fb(8, scalability_mode_idc);
2356
2357 if (current->scalability_mode_idc == AV1_SCALABILITY_SS)
2358 CHECK(mpp_av1_scalability_structure(ctx, gb, current));
2359
2360 return 0;
2361 }
2362
mpp_av1_get_dolby_rpu(AV1Context * ctx,BitReadCtx_t * gb)2363 static RK_S32 mpp_av1_get_dolby_rpu(AV1Context *ctx, BitReadCtx_t *gb)
2364 {
2365 MppFrameHdrDynamicMeta *hdr_dynamic_meta = ctx->hdr_dynamic_meta;
2366 RK_U32 emdf_payload_size = 0;
2367
2368 /* skip emdf_container{} */
2369 SKIP_BITS(gb, 3);
2370 SKIP_BITS(gb, 2);
2371 SKIP_BITS(gb, 5);
2372 SKIP_BITS(gb, 5);
2373 SKIP_BITS(gb, 1);
2374 SKIP_BITS(gb, 5);
2375 SKIP_BITS(gb, 1);
2376 /* skip emdf_payload_config{} */
2377 SKIP_BITS(gb, 5);
2378
2379 /* get payload size */
2380 #define VARIABLE_BITS8(gb, value) \
2381 for (;;) { \
2382 RK_U32 tmp, flag; \
2383 \
2384 READ_BITS(gb, 8, &tmp); \
2385 value += tmp; \
2386 READ_ONEBIT(gb, &flag); \
2387 if (!flag) break; \
2388 value <<= 8; \
2389 value += (1 << 8); \
2390 }
2391
2392 VARIABLE_BITS8(gb, emdf_payload_size);
2393 if (!hdr_dynamic_meta) {
2394 hdr_dynamic_meta = mpp_calloc_size(MppFrameHdrDynamicMeta,
2395 sizeof(MppFrameHdrDynamicMeta) + SZ_1K);
2396 if (!hdr_dynamic_meta) {
2397 mpp_err_f("malloc hdr dynamic data failed!\n");
2398 return MPP_ERR_NOMEM;
2399 }
2400 }
2401
2402 RK_U32 i;
2403 MppWriteCtx bit_ctx;
2404
2405 mpp_writer_init(&bit_ctx, hdr_dynamic_meta->data, SZ_1K);
2406
2407 mpp_writer_put_raw_bits(&bit_ctx, 0, 24);
2408 mpp_writer_put_raw_bits(&bit_ctx, 1, 8);
2409 mpp_writer_put_raw_bits(&bit_ctx, 0x19, 8);
2410 for (i = 0; i < emdf_payload_size; i++) {
2411 RK_U8 data;
2412
2413 READ_BITS(gb, 8, &data);
2414 mpp_writer_put_bits(&bit_ctx, data, 8);
2415 }
2416
2417 hdr_dynamic_meta->size = mpp_writer_bytes(&bit_ctx);
2418 hdr_dynamic_meta->hdr_fmt = DOLBY;
2419 av1d_dbg(AV1D_DBG_STRMIN, "dolby rpu size %d -> %d\n",
2420 emdf_payload_size, hdr_dynamic_meta->size);
2421
2422 ctx->hdr_dynamic_meta = hdr_dynamic_meta;
2423 ctx->hdr_dynamic = 1;
2424 ctx->is_hdr = 1;
2425
2426 if (av1d_debug & AV1D_DBG_DUMP_RPU) {
2427 RK_U8 *p = hdr_dynamic_meta->data;
2428 char fname[128];
2429 FILE *fp_in = NULL;
2430 static RK_U32 g_frame_no = 0;
2431
2432 sprintf(fname, "/data/video/meta_%d.txt", g_frame_no++);
2433 fp_in = fopen(fname, "wb");
2434 mpp_err("open %s %p\n", fname, fp_in);
2435 if (fp_in)
2436 fwrite(p, 1, hdr_dynamic_meta->size, fp_in);
2437 fflush(fp_in);
2438 fclose(fp_in);
2439 }
2440
2441 return 0;
2442
2443 __BITREAD_ERR:
2444 return MPP_ERR_STREAM;
2445 }
2446
mpp_av1_metadata_itut_t35(AV1Context * ctx,BitReadCtx_t * gb,AV1RawMetadataITUTT35 * current)2447 static RK_S32 mpp_av1_metadata_itut_t35(AV1Context *ctx, BitReadCtx_t *gb,
2448 AV1RawMetadataITUTT35 *current)
2449 {
2450 RK_S32 err;
2451
2452 fb(8, itu_t_t35_country_code);
2453 if (current->itu_t_t35_country_code == 0xff)
2454 fb(8, itu_t_t35_country_code_extension_byte);
2455
2456 current->payload_size = mpp_get_bits_left(gb) / 8 - 1;
2457
2458 av1d_dbg(AV1D_DBG_STRMIN, "%s itu_t_t35_country_code %d payload_size %d\n",
2459 __func__, current->itu_t_t35_country_code, current->payload_size);
2460
2461 fb(16, itu_t_t35_terminal_provider_code);
2462 READ_BITS_LONG(gb, 32, ¤t->itu_t_t35_terminal_provider_oriented_code);
2463
2464 av1d_dbg(AV1D_DBG_STRMIN, "itu_t_t35_country_code 0x%x\n",
2465 current->itu_t_t35_country_code);
2466 av1d_dbg(AV1D_DBG_STRMIN, "itu_t_t35_terminal_provider_code 0x%x\n",
2467 current->itu_t_t35_terminal_provider_code);
2468 av1d_dbg(AV1D_DBG_STRMIN, "itu_t_t35_terminal_provider_oriented_code 0x%x\n",
2469 current->itu_t_t35_terminal_provider_oriented_code);
2470
2471 if (current->itu_t_t35_terminal_provider_code == 0x3B &&
2472 current->itu_t_t35_terminal_provider_oriented_code == 0x800)
2473 mpp_av1_get_dolby_rpu(ctx, gb);
2474
2475 return 0;
2476 __BITREAD_ERR:
2477 return 0;
2478 }
2479
mpp_av1_metadata_timecode(AV1Context * ctx,BitReadCtx_t * gb,AV1RawMetadataTimecode * current)2480 static RK_S32 mpp_av1_metadata_timecode(AV1Context *ctx, BitReadCtx_t *gb,
2481 AV1RawMetadataTimecode *current)
2482 {
2483 RK_S32 err;
2484 (void)ctx;
2485
2486 fb(5, counting_type);
2487 flag(full_timestamp_flag);
2488 flag(discontinuity_flag);
2489 flag(cnt_dropped_flag);
2490 fb(9, n_frames);
2491
2492 if (current->full_timestamp_flag) {
2493 fc(6, seconds_value, 0, 59);
2494 fc(6, minutes_value, 0, 59);
2495 fc(5, hours_value, 0, 23);
2496 } else {
2497 flag(seconds_flag);
2498 if (current->seconds_flag) {
2499 fc(6, seconds_value, 0, 59);
2500 flag(minutes_flag);
2501 if (current->minutes_flag) {
2502 fc(6, minutes_value, 0, 59);
2503 flag(hours_flag);
2504 if (current->hours_flag)
2505 fc(5, hours_value, 0, 23);
2506 }
2507 }
2508 }
2509
2510 fb(5, time_offset_length);
2511 if (current->time_offset_length > 0)
2512 fb(current->time_offset_length, time_offset_value);
2513 else
2514 infer(time_offset_length, 0);
2515
2516 return 0;
2517 }
2518
mpp_av1_metadata_obu(AV1Context * ctx,BitReadCtx_t * gb,AV1RawMetadata * current)2519 static RK_S32 mpp_av1_metadata_obu(AV1Context *ctx, BitReadCtx_t *gb,
2520 AV1RawMetadata *current)
2521 {
2522 RK_S32 err;
2523
2524 leb128(metadata_type);
2525 av1d_dbg(AV1D_DBG_STRMIN, "%s meta type %d\n", __func__, current->metadata_type);
2526 switch (current->metadata_type) {
2527 case AV1_METADATA_TYPE_HDR_CLL:
2528 CHECK(mpp_av1_metadata_hdr_cll(ctx, gb, ¤t->metadata.hdr_cll));
2529 break;
2530 case AV1_METADATA_TYPE_HDR_MDCV:
2531 CHECK(mpp_av1_metadata_hdr_mdcv(ctx, gb, ¤t->metadata.hdr_mdcv));
2532 break;
2533 case AV1_METADATA_TYPE_SCALABILITY:
2534 CHECK(mpp_av1_metadata_scalability(ctx, gb, ¤t->metadata.scalability));
2535 break;
2536 case AV1_METADATA_TYPE_ITUT_T35:
2537 CHECK(mpp_av1_metadata_itut_t35(ctx, gb, ¤t->metadata.itut_t35));
2538 break;
2539 case AV1_METADATA_TYPE_TIMECODE:
2540 CHECK(mpp_av1_metadata_timecode(ctx, gb, ¤t->metadata.timecode));
2541 break;
2542 default:
2543 // Unknown metadata type.
2544 return MPP_OK;
2545 }
2546
2547 return 0;
2548 }
2549
mpp_av1_padding_obu(AV1Context * ctx,BitReadCtx_t * gb,AV1RawPadding * current)2550 static RK_S32 mpp_av1_padding_obu(AV1Context *ctx, BitReadCtx_t *gb,
2551 AV1RawPadding *current)
2552 {
2553 RK_S32 err;
2554 RK_U32 i;
2555 (void)ctx;
2556 current->payload_size = mpp_av1_get_payload_bytes_left(gb);
2557
2558 current->payload = mpp_malloc(RK_U8, current->payload_size);
2559 if (!current->payload )
2560 return MPP_ERR_NOMEM;
2561
2562 for (i = 0; i < current->payload_size; i++)
2563 xf(8, obu_padding_byte[i], current->payload[i], 0x00, 0xff, 1, i);
2564
2565 return 0;
2566 }
2567
2568
2569
mpp_insert_unit(Av1UnitFragment * frag,RK_S32 position)2570 static MPP_RET mpp_insert_unit(Av1UnitFragment *frag, RK_S32 position)
2571 {
2572 Av1ObuUnit *units;
2573
2574 if (frag->nb_units < frag->nb_units_allocated) {
2575 units = frag->units;
2576
2577 if (position < frag->nb_units)
2578 memmove(units + position + 1, units + position,
2579 (frag->nb_units - position) * sizeof(*units));
2580 } else {
2581 units = mpp_malloc(Av1ObuUnit, frag->nb_units * 2 + 1);
2582 if (!units)
2583 return MPP_ERR_NOMEM;
2584
2585 frag->nb_units_allocated = 2 * frag->nb_units_allocated + 1;
2586
2587 if (position > 0)
2588 memcpy(units, frag->units, position * sizeof(*units));
2589
2590 if (position < frag->nb_units)
2591 memcpy(units + position + 1, frag->units + position,
2592 (frag->nb_units - position) * sizeof(*units));
2593 }
2594
2595 memset(units + position, 0, sizeof(*units));
2596
2597 if (units != frag->units) {
2598 mpp_free(frag->units);
2599 frag->units = units;
2600 }
2601
2602 ++frag->nb_units;
2603
2604 return MPP_OK;
2605 }
2606
mpp_insert_unit_data(Av1UnitFragment * frag,RK_S32 position,Av1UnitType type,RK_U8 * data,size_t data_size)2607 static MPP_RET mpp_insert_unit_data(Av1UnitFragment *frag,
2608 RK_S32 position,
2609 Av1UnitType type,
2610 RK_U8 *data, size_t data_size)
2611 {
2612 Av1ObuUnit *unit;
2613 MPP_RET ret;
2614
2615 if (position == -1)
2616 position = frag->nb_units;
2617
2618 mpp_assert(position >= 0 && position <= frag->nb_units);
2619 ret = mpp_insert_unit(frag, position);
2620 if (ret < 0) {
2621 return ret;
2622 }
2623
2624 unit = &frag->units[position];
2625 unit->type = type;
2626 unit->data = data;
2627 unit->data_size = data_size;
2628
2629 return MPP_OK;
2630 }
2631
mpp_av1_split_fragment(AV1Context * ctx,Av1UnitFragment * frag,RK_S32 header_flag)2632 RK_S32 mpp_av1_split_fragment(AV1Context *ctx, Av1UnitFragment *frag, RK_S32 header_flag)
2633 {
2634 BitReadCtx_t gbc;
2635 RK_U8 *data;
2636 size_t size;
2637 RK_U64 obu_length;
2638 RK_S32 pos, err;
2639
2640 data = frag->data;
2641 size = frag->data_size;
2642
2643 if (INT_MAX / 8 < size) {
2644 mpp_err( "Invalid fragment: "
2645 "too large (%d bytes).\n", size);
2646 err = MPP_NOK;
2647 goto fail;
2648 }
2649
2650 if (header_flag && size && data[0] & 0x80) {
2651 // first bit is nonzero, the extradata does not consist purely of
2652 // OBUs. Expect MP4/Matroska AV1CodecConfigurationRecord
2653 RK_S32 config_record_version = data[0] & 0x7f;
2654
2655 if (config_record_version != 1) {
2656 mpp_err(
2657 "Unknown version %d of AV1CodecConfigurationRecord "
2658 "found!\n",
2659 config_record_version);
2660 err = MPP_NOK;
2661 goto fail;
2662 }
2663
2664 if (size <= 4) {
2665 if (size < 4) {
2666 av1d_dbg(AV1D_DBG_STRMIN,
2667 "Undersized AV1CodecConfigurationRecord v%d found!\n",
2668 config_record_version);
2669 err = MPP_NOK;
2670 goto fail;
2671 }
2672
2673 goto success;
2674 }
2675
2676 // In AV1CodecConfigurationRecord v1, actual OBUs start after
2677 // four bytes. Thus set the offset as required for properly
2678 // parsing them.
2679 data += 4;
2680 size -= 4;
2681 }
2682
2683 while (size > 0) {
2684 AV1RawOBUHeader header;
2685 RK_U64 obu_size = 0;
2686
2687 mpp_set_bitread_ctx(&gbc, data, size);
2688
2689 err = mpp_av1_read_obu_header(ctx, &gbc, &header);
2690 if (err < 0)
2691 goto fail;
2692
2693 if (header.obu_has_size_field) {
2694 if (mpp_get_bits_left(&gbc) < 8) {
2695 mpp_err( "Invalid OBU: fragment "
2696 "too short (%d bytes).\n", size);
2697 err = MPP_NOK;
2698 goto fail;
2699 }
2700 err = mpp_av1_read_leb128(&gbc, &obu_size);
2701 if (err < 0)
2702 goto fail;
2703 } else
2704 obu_size = size - 1 - header.obu_extension_flag;
2705
2706 pos = mpp_get_bits_count(&gbc);
2707
2708 mpp_assert(pos % 8 == 0 && pos / 8 <= (RK_S32)size);
2709 obu_length = pos / 8 + obu_size;
2710
2711 if (size < obu_length) {
2712 mpp_err( "Invalid OBU length: "
2713 "%lld, but only %d bytes remaining in fragment.\n",
2714 obu_length, size);
2715 err = MPP_NOK;
2716 goto fail;
2717 }
2718 err = mpp_insert_unit_data(frag, -1, header.obu_type,
2719 data, obu_length);
2720 if (err < 0)
2721 goto fail;
2722
2723 data += obu_length;
2724 size -= obu_length;
2725 }
2726
2727 success:
2728 err = 0;
2729 fail:
2730 return err;
2731 }
2732
mpp_av1_ref_tile_data(Av1ObuUnit * unit,BitReadCtx_t * gbc,AV1RawTileData * td)2733 static RK_S32 mpp_av1_ref_tile_data(Av1ObuUnit *unit,
2734 BitReadCtx_t *gbc,
2735 AV1RawTileData *td)
2736 {
2737 RK_S32 pos;
2738
2739 pos = mpp_get_bits_count(gbc);
2740 if (pos >= (RK_S32)(8 * unit->data_size)) {
2741 mpp_err( "Bitstream ended before "
2742 "any data in tile group (%d bits read).\n", pos);
2743 return MPP_NOK;
2744 }
2745 // Must be byte-aligned at this point.
2746 mpp_assert(pos % 8 == 0);
2747
2748 td->offset = pos / 8;
2749 td->data = unit->data + pos / 8;
2750 td->data_size = unit->data_size - pos / 8;
2751
2752 return 0;
2753 }
2754
mpp_av1_alloc_unit_content(Av1ObuUnit * unit)2755 static MPP_RET mpp_av1_alloc_unit_content(Av1ObuUnit *unit)
2756 {
2757 (void)unit;
2758 MPP_FREE(unit->content);
2759 unit->content = mpp_calloc(AV1RawOBU, 1);
2760 if (!unit->content) {
2761 return MPP_ERR_NOMEM; // drop_obu()
2762 }
2763 return MPP_OK;
2764 }
2765
mpp_av1_read_unit(AV1Context * ctx,Av1ObuUnit * unit)2766 MPP_RET mpp_av1_read_unit(AV1Context *ctx, Av1ObuUnit *unit)
2767 {
2768 AV1RawOBU *obu;
2769 BitReadCtx_t gbc;
2770 RK_S32 err = 0, start_pos, end_pos, hdr_start_pos;
2771
2772 err = mpp_av1_alloc_unit_content(unit);
2773
2774 if (err < 0)
2775 return err;
2776
2777 obu = unit->content;
2778
2779 mpp_set_bitread_ctx(&gbc, unit->data, unit->data_size);
2780
2781 hdr_start_pos = mpp_get_bits_count(&gbc);
2782
2783 err = mpp_av1_read_obu_header(ctx, &gbc, &obu->header);
2784 if (err < 0)
2785 return err;
2786 mpp_assert(obu->header.obu_type == unit->type);
2787
2788 if (obu->header.obu_has_size_field) {
2789 RK_U64 obu_size = 0;
2790 err = mpp_av1_read_leb128(&gbc, &obu_size);
2791 if (err < 0)
2792 return err;
2793 obu->obu_size = obu_size;
2794 } else {
2795 if (unit->data_size < (RK_U32)(1 + obu->header.obu_extension_flag)) {
2796 mpp_err( "Invalid OBU length: "
2797 "unit too short (%d).\n", unit->data_size);
2798 return MPP_NOK;
2799 }
2800 obu->obu_size = unit->data_size - 1 - obu->header.obu_extension_flag;
2801 }
2802
2803 start_pos = mpp_get_bits_count(&gbc);
2804 if (!ctx->fist_tile_group)
2805 ctx->frame_tag_size += ((start_pos - hdr_start_pos + 7) >> 3);
2806 if (obu->header.obu_extension_flag) {
2807 if (obu->header.obu_type != AV1_OBU_SEQUENCE_HEADER &&
2808 obu->header.obu_type != AV1_OBU_TEMPORAL_DELIMITER &&
2809 ctx->operating_point_idc) {
2810 RK_S32 in_temporal_layer =
2811 (ctx->operating_point_idc >> ctx->temporal_id ) & 1;
2812 RK_S32 in_spatial_layer =
2813 (ctx->operating_point_idc >> (ctx->spatial_id + 8)) & 1;
2814 if (!in_temporal_layer || !in_spatial_layer) {
2815 return MPP_ERR_PROTOL; // drop_obu()
2816 }
2817 }
2818 }
2819 av1d_dbg(AV1D_DBG_HEADER, "obu type %d size %d\n",
2820 obu->header.obu_type, obu->obu_size);
2821 switch (obu->header.obu_type) {
2822 case AV1_OBU_SEQUENCE_HEADER: {
2823 err = mpp_av1_sequence_header_obu(ctx, &gbc,
2824 &obu->obu.sequence_header);
2825 if (err < 0)
2826 return err;
2827 ctx->frame_tag_size += obu->obu_size;
2828 if (ctx->operating_point >= 0) {
2829 AV1RawSequenceHeader *sequence_header = &obu->obu.sequence_header;
2830
2831 if (ctx->operating_point > sequence_header->operating_points_cnt_minus_1) {
2832 mpp_err("Invalid Operating Point %d requested. "
2833 "Must not be higher than %u.\n",
2834 ctx->operating_point, sequence_header->operating_points_cnt_minus_1);
2835 return MPP_ERR_PROTOL;
2836 }
2837 ctx->operating_point_idc = sequence_header->operating_point_idc[ctx->operating_point];
2838 }
2839
2840 ctx->sequence_header = NULL;
2841 ctx->sequence_header = &obu->obu.sequence_header;
2842 } break;
2843 case AV1_OBU_TEMPORAL_DELIMITER: {
2844 err = mpp_av1_temporal_delimiter_obu(ctx, &gbc);
2845 if (err < 0)
2846 return err;
2847 } break;
2848 case AV1_OBU_FRAME_HEADER:
2849 case AV1_OBU_REDUNDANT_FRAME_HEADER: {
2850 err = mpp_av1_frame_header_obu(ctx, &gbc,
2851 &obu->obu.frame_header,
2852 obu->header.obu_type ==
2853 AV1_OBU_REDUNDANT_FRAME_HEADER,
2854 NULL);
2855 if (err < 0)
2856 return err;
2857 ctx->frame_tag_size += obu->obu_size;
2858 } break;
2859 case AV1_OBU_TILE_GROUP: {
2860 RK_U32 cur_pos = mpp_get_bits_count(&gbc);
2861
2862 err = mpp_av1_tile_group_obu(ctx, &gbc, &obu->obu.tile_group);
2863 if (err < 0)
2864 return err;
2865 if (!ctx->fist_tile_group)
2866 ctx->frame_tag_size += MPP_ALIGN(mpp_get_bits_count(&gbc) - cur_pos, 8) / 8;
2867 ctx->fist_tile_group = 1;
2868 err = mpp_av1_ref_tile_data(unit, &gbc,
2869 &obu->obu.tile_group.tile_data);
2870 if (err < 0)
2871 return err;
2872 } break;
2873 case AV1_OBU_FRAME: {
2874 err = mpp_av1_frame_obu(ctx, &gbc, &obu->obu.frame,
2875 NULL);
2876 if (err < 0)
2877 return err;
2878
2879 err = mpp_av1_ref_tile_data(unit, &gbc,
2880 &obu->obu.frame.tile_group.tile_data);
2881 if (err < 0)
2882 return err;
2883 } break;
2884 case AV1_OBU_TILE_LIST: {
2885 err = mpp_av1_tile_list_obu(ctx, &gbc, &obu->obu.tile_list);
2886 if (err < 0)
2887 return err;
2888
2889 err = mpp_av1_ref_tile_data(unit, &gbc,
2890 &obu->obu.tile_list.tile_data);
2891 if (err < 0)
2892 return err;
2893 } break;
2894 case AV1_OBU_METADATA: {
2895 ctx->frame_tag_size += obu->obu_size;
2896 err = mpp_av1_metadata_obu(ctx, &gbc, &obu->obu.metadata);
2897 if (err < 0)
2898 return err;
2899 } break;
2900 case AV1_OBU_PADDING: {
2901 err = mpp_av1_padding_obu(ctx, &gbc, &obu->obu.padding);
2902 if (err < 0)
2903 return err;
2904 } break;
2905 default:
2906 return MPP_ERR_VALUE;
2907 }
2908
2909 end_pos = mpp_get_bits_count(&gbc);
2910 mpp_assert(end_pos <= (RK_S32)(unit->data_size * 8));
2911
2912 if (obu->obu_size > 0 &&
2913 obu->header.obu_type != AV1_OBU_TILE_GROUP &&
2914 obu->header.obu_type != AV1_OBU_TILE_LIST &&
2915 obu->header.obu_type != AV1_OBU_FRAME) {
2916 RK_S32 nb_bits = obu->obu_size * 8 + start_pos - end_pos;
2917
2918 if (nb_bits <= 0)
2919 return MPP_NOK;
2920
2921 err = mpp_av1_trailing_bits(ctx, &gbc, nb_bits);
2922 if (err < 0)
2923 return err;
2924 }
2925
2926 return 0;
2927 }
2928
mpp_av1_read_fragment_content(AV1Context * ctx,Av1UnitFragment * frag)2929 RK_S32 mpp_av1_read_fragment_content(AV1Context *ctx, Av1UnitFragment *frag)
2930 {
2931 int err, i, j;
2932 AV1RawOBU *obu;
2933
2934 ctx->frame_tag_size = 0;
2935 ctx->fist_tile_group = 0;
2936 for (i = 0; i < frag->nb_units; i++) {
2937 Av1ObuUnit *unit = &frag->units[i];
2938 if (ctx->unit_types) {
2939 for (j = 0; j < ctx->nb_unit_types; j++) {
2940 if (ctx->unit_types[j] == unit->type)
2941 break;
2942 }
2943 if (j >= ctx->nb_unit_types)
2944 continue;
2945 }
2946 MPP_FREE(unit->content);
2947 mpp_assert(unit->data);
2948 err = mpp_av1_read_unit(ctx, unit);
2949
2950 if (err == MPP_ERR_VALUE) {
2951 mpp_err_f("Decomposition unimplemented for unit %d "
2952 "(type %d).\n", i, unit->type);
2953 } else if (err == MPP_ERR_PROTOL) {
2954 mpp_err_f("Skipping decomposition of"
2955 "unit %d (type %d).\n", i, unit->type);
2956 MPP_FREE(unit->content);
2957 unit->content = NULL;
2958 } else if (err < 0) {
2959 mpp_err_f("Failed to read unit %d (type %d).\n", i, unit->type);
2960 return err;
2961 }
2962 obu = unit->content;
2963 av1d_dbg(AV1D_DBG_HEADER, "obu->header.obu_type %d, obu->obu_size = %d ctx->frame_tag_size %d",
2964 obu->header.obu_type, obu->obu_size, ctx->frame_tag_size);
2965 }
2966 return 0;
2967 }
2968
mpp_av1_set_context_with_sequence(Av1CodecContext * ctx,const AV1RawSequenceHeader * seq)2969 int mpp_av1_set_context_with_sequence(Av1CodecContext *ctx,
2970 const AV1RawSequenceHeader *seq)
2971 {
2972 int width = seq->max_frame_width_minus_1 + 1;
2973 int height = seq->max_frame_height_minus_1 + 1;
2974
2975 ctx->profile = seq->seq_profile;
2976 ctx->level = seq->seq_level_idx[0];
2977
2978 ctx->color_range =
2979 seq->color_config.color_range ? MPP_FRAME_RANGE_JPEG : MPP_FRAME_RANGE_MPEG;
2980 ctx->color_primaries = seq->color_config.color_primaries;
2981 ctx->colorspace = seq->color_config.color_primaries;
2982 ctx->color_trc = seq->color_config.transfer_characteristics;
2983
2984 switch (seq->color_config.chroma_sample_position) {
2985 case AV1_CSP_VERTICAL:
2986 ctx->chroma_sample_location = MPP_CHROMA_LOC_LEFT;
2987 break;
2988 case AV1_CSP_COLOCATED:
2989 ctx->chroma_sample_location = MPP_CHROMA_LOC_TOPLEFT;
2990 break;
2991 }
2992
2993 if (ctx->width != width || ctx->height != height) {
2994 ctx->width = width;
2995 ctx->height = height;
2996 }
2997 return 0;
2998 }
2999
mpp_av1_fragment_reset(Av1UnitFragment * frag)3000 void mpp_av1_fragment_reset(Av1UnitFragment *frag)
3001 {
3002 int i;
3003
3004 for (i = 0; i < frag->nb_units; i++) {
3005 Av1ObuUnit *unit = &frag->units[i];
3006 MPP_FREE(unit->content);
3007 unit->data = NULL;
3008 unit->data_size = 0;
3009 }
3010 frag->nb_units = 0;
3011 frag->data = NULL;
3012 frag->data_size = 0;
3013 }
3014
mpp_av1_assemble_fragment(AV1Context * ctx,Av1UnitFragment * frag)3015 RK_S32 mpp_av1_assemble_fragment(AV1Context *ctx, Av1UnitFragment *frag)
3016 {
3017 size_t size, pos;
3018 RK_S32 i;
3019 (void)ctx;
3020 size = 0;
3021 for (i = 0; i < frag->nb_units; i++)
3022 size += frag->units[i].data_size;
3023
3024 frag->data = mpp_malloc(RK_U8, size + BUFFER_PADDING_SIZE);
3025 if (!frag->data)
3026 return MPP_ERR_NOMEM;
3027
3028 memset(frag->data + size, 0, BUFFER_PADDING_SIZE);
3029
3030 pos = 0;
3031 for (i = 0; i < frag->nb_units; i++) {
3032 memcpy(frag->data + pos, frag->units[i].data,
3033 frag->units[i].data_size);
3034 pos += frag->units[i].data_size;
3035 }
3036 mpp_assert(pos == size);
3037 frag->data_size = size;
3038
3039 return 0;
3040 }
3041
mpp_av1_flush(AV1Context * ctx)3042 void mpp_av1_flush(AV1Context *ctx)
3043 {
3044 // ctx->sequencframe_headere_header = NULL;
3045 // ctx-> = NULL;
3046
3047 memset(ctx->ref_s, 0, sizeof(ctx->ref_s));
3048 ctx->operating_point_idc = 0;
3049 ctx->seen_frame_header = 0;
3050 ctx->tile_num = 0;
3051 }
3052
mpp_av1_close(AV1Context * ctx)3053 void mpp_av1_close(AV1Context *ctx)
3054 {
3055 MPP_FREE(ctx->frame_header);
3056 MPP_FREE(ctx->sequence_header);
3057 MPP_FREE(ctx->raw_frame_header);
3058 }
3059
mpp_av1_free_metadata(void * unit,RK_U8 * content)3060 void mpp_av1_free_metadata(void *unit, RK_U8 *content)
3061 {
3062 AV1RawOBU *obu = (AV1RawOBU*)content;
3063 (void)unit;
3064 mpp_assert(obu->header.obu_type == AV1_OBU_METADATA);
3065 MPP_FREE(content);
3066 }
3067