@@ -82,6 +82,8 @@ typedef struct HEVCDecoderConfigurationRecord {
82
82
uint8_t lengthSizeMinusOne ;
83
83
uint8_t numOfArrays ;
84
84
HVCCNALUnitArray arrays [NB_ARRAYS ];
85
+
86
+ uint8_t alpha_layer_nuh_id ;
85
87
} HEVCDecoderConfigurationRecord ;
86
88
87
89
typedef struct HVCCProfileTierLevel {
@@ -149,20 +151,24 @@ static void hvcc_update_ptl(HEVCDecoderConfigurationRecord *hvcc,
149
151
150
152
static void hvcc_parse_ptl (GetBitContext * gb ,
151
153
HEVCDecoderConfigurationRecord * hvcc ,
154
+ int profile_present_flag ,
152
155
unsigned int max_sub_layers_minus1 )
153
156
{
154
157
unsigned int i ;
155
158
HVCCProfileTierLevel general_ptl ;
156
159
uint8_t sub_layer_profile_present_flag [HEVC_MAX_SUB_LAYERS ];
157
160
uint8_t sub_layer_level_present_flag [HEVC_MAX_SUB_LAYERS ];
158
161
159
- general_ptl .profile_space = get_bits (gb , 2 );
160
- general_ptl .tier_flag = get_bits1 (gb );
161
- general_ptl .profile_idc = get_bits (gb , 5 );
162
- general_ptl .profile_compatibility_flags = get_bits_long (gb , 32 );
163
- general_ptl .constraint_indicator_flags = get_bits64 (gb , 48 );
164
- general_ptl .level_idc = get_bits (gb , 8 );
165
- hvcc_update_ptl (hvcc , & general_ptl );
162
+ if (profile_present_flag ) {
163
+ general_ptl .profile_space = get_bits (gb , 2 );
164
+ general_ptl .tier_flag = get_bits1 (gb );
165
+ general_ptl .profile_idc = get_bits (gb , 5 );
166
+ general_ptl .profile_compatibility_flags = get_bits_long (gb , 32 );
167
+ general_ptl .constraint_indicator_flags = get_bits64 (gb , 48 );
168
+ general_ptl .level_idc = get_bits (gb , 8 );
169
+ hvcc_update_ptl (hvcc , & general_ptl );
170
+ } else
171
+ skip_bits (gb , 8 ); // general_level_idc
166
172
167
173
for (i = 0 ; i < max_sub_layers_minus1 ; i ++ ) {
168
174
sub_layer_profile_present_flag [i ] = get_bits1 (gb );
@@ -384,15 +390,86 @@ static void skip_sub_layer_ordering_info(GetBitContext *gb)
384
390
get_ue_golomb_long (gb ); // max_latency_increase_plus1
385
391
}
386
392
393
+ static int hvcc_parse_vps_extension (GetBitContext * gb , HVCCNALUnit * nal ,
394
+ HEVCDecoderConfigurationRecord * hvcc ,
395
+ uint8_t vps_max_layers_minus1 ,
396
+ uint8_t vps_base_layer_internal_flag )
397
+ {
398
+ uint8_t num_scalability_types = 0 ;
399
+ uint8_t max_layers_minus_1 = FFMIN (62 , vps_max_layers_minus1 );
400
+ uint8_t splitting_flag , vps_nuh_layer_id_present_flag ;
401
+ uint8_t scalability_mask_flag [16 ] = { 0 };
402
+ uint8_t dimension_id_len [16 ] = { 0 };
403
+ uint8_t layer_id_in_nuh [64 ] = { 0 };
404
+ int i , j ;
405
+
406
+ if (vps_max_layers_minus1 > 0 && vps_base_layer_internal_flag )
407
+ hvcc_parse_ptl (gb , hvcc , 0 , nal -> vps_max_sub_layers_minus1 );
408
+
409
+ splitting_flag = get_bits (gb , 1 );
410
+
411
+ for (i = 0 ; i < 16 ; i ++ )
412
+ if (get_bits (gb , 1 ))
413
+ scalability_mask_flag [num_scalability_types ++ ] = i ;
414
+
415
+ for (j = 0 ; j < (num_scalability_types - splitting_flag ); j ++ )
416
+ dimension_id_len [j ] = get_bits (gb , 3 ) + 1 ;
417
+
418
+ vps_nuh_layer_id_present_flag = get_bits (gb , 1 );
419
+
420
+ for (i = 1 ; i <= max_layers_minus_1 ; i ++ ) {
421
+ if (vps_nuh_layer_id_present_flag )
422
+ layer_id_in_nuh [i ] = get_bits (gb , 6 );
423
+ else
424
+ layer_id_in_nuh [i ] = i ;
425
+
426
+ if (!splitting_flag ) {
427
+ for (j = 0 ; j < num_scalability_types ; j ++ ) {
428
+ int dimension_id = get_bits (gb , dimension_id_len [j ]);
429
+
430
+ if (dimension_id == 1 /* AUX_ALPHA */ && scalability_mask_flag [j ] == 3 /* AuxId */ )
431
+ hvcc -> alpha_layer_nuh_id = layer_id_in_nuh [i ];
432
+ }
433
+ }
434
+ }
435
+
436
+ if (splitting_flag ) {
437
+ uint8_t dim_bit_offset [17 ] = { 0 };
438
+
439
+ dim_bit_offset [0 ] = 0 ;
440
+ for (j = 1 ; j < num_scalability_types ; j ++ )
441
+ dim_bit_offset [j ] = dim_bit_offset [j - 1 ] + dimension_id_len [j - 1 ];
442
+ dim_bit_offset [num_scalability_types ] = 6 ;
443
+
444
+ if (num_scalability_types > 0 && dim_bit_offset [num_scalability_types - 1 ] >= 6 )
445
+ return -1 ; // invalid bitstream
446
+
447
+ for (i = 1 ; i <= max_layers_minus_1 ; i ++ ) {
448
+ for (j = 0 ; j < num_scalability_types ; j ++ ) {
449
+ int dimension_id = (layer_id_in_nuh [i ] & ((1 << dim_bit_offset [j + 1 ]) - 1 )) >> dim_bit_offset [j ];
450
+
451
+ if (dimension_id == 1 /* AUX_ALPHA */ && scalability_mask_flag [j ] == 3 /* AuxId */ )
452
+ hvcc -> alpha_layer_nuh_id = layer_id_in_nuh [i ];
453
+ }
454
+ }
455
+ }
456
+
457
+ return 0 ;
458
+ }
459
+
387
460
static int hvcc_parse_vps (GetBitContext * gb , HVCCNALUnit * nal ,
388
461
HEVCDecoderConfigurationRecord * hvcc )
389
462
{
463
+ uint8_t vps_base_layer_internal_flag , vps_max_layers_minus1 ;
464
+ uint8_t vps_sub_layer_ordering_info_present_flag , vps_max_layer_id ;
465
+ int vps_num_layer_sets_minus1 ;
466
+ int i ;
467
+
390
468
nal -> parameter_set_id = get_bits (gb , 4 );
391
- /*
392
- * vps_reserved_three_2bits u(2)
393
- * vps_max_layers_minus1 u(6)
394
- */
395
- skip_bits (gb , 8 );
469
+
470
+ vps_base_layer_internal_flag = get_bits (gb , 1 );
471
+ skip_bits (gb , 1 ); // vps_base_layer_available_flag
472
+ vps_max_layers_minus1 = get_bits (gb , 6 );
396
473
397
474
nal -> vps_max_sub_layers_minus1 = get_bits (gb , 3 );
398
475
@@ -413,7 +490,50 @@ static int hvcc_parse_vps(GetBitContext *gb, HVCCNALUnit *nal,
413
490
*/
414
491
skip_bits (gb , 17 );
415
492
416
- hvcc_parse_ptl (gb , hvcc , nal -> vps_max_sub_layers_minus1 );
493
+ hvcc_parse_ptl (gb , hvcc , 1 , nal -> vps_max_sub_layers_minus1 );
494
+
495
+ vps_sub_layer_ordering_info_present_flag = get_bits (gb , 1 );
496
+ for (i = (vps_sub_layer_ordering_info_present_flag ? 0 : nal -> vps_max_sub_layers_minus1 );
497
+ i <= nal -> vps_max_sub_layers_minus1 ; i ++ ) {
498
+ get_ue_golomb (gb ); // vps_max_dec_pic_buffering_minus1
499
+ get_ue_golomb (gb ); // vps_max_num_reorder_pics
500
+ get_ue_golomb (gb ); // vps_max_latency_increase_plus1
501
+ }
502
+
503
+ vps_max_layer_id = get_bits (gb , 6 );
504
+ vps_num_layer_sets_minus1 = get_ue_golomb (gb );
505
+ skip_bits_long (gb , (vps_max_layer_id + 1 ) * vps_num_layer_sets_minus1 ); // layer_id_included_flag[i][j]
506
+
507
+ if (get_bits (gb , 1 )) { // vps_timing_info_present_flag
508
+ int vps_num_hrd_parameters ;
509
+
510
+ skip_bits_long (gb , 64 ); // vps_num_units_in_tick, vps_time_scale
511
+
512
+ if (get_bits (gb , 1 )) // vps_poc_proportional_to_timing_flag
513
+ get_ue_golomb (gb ); // vps_num_ticks_poc_diff_one_minus1
514
+
515
+ vps_num_hrd_parameters = get_ue_golomb (gb ); // vps_num_hrd_parameters
516
+
517
+ for (i = 0 ; i < vps_num_hrd_parameters ; i ++ ) {
518
+ int cprms_present_flag ;
519
+
520
+ get_ue_golomb (gb ); // hrd_layer_set_idx[i]
521
+ if (i > 0 )
522
+ cprms_present_flag = get_bits (gb , 1 );
523
+ else
524
+ cprms_present_flag = 1 ;
525
+
526
+ skip_hrd_parameters (gb , cprms_present_flag , nal -> vps_max_sub_layers_minus1 );
527
+ }
528
+ }
529
+
530
+ if (get_bits (gb , 1 )) { // vps_extension_flag
531
+ align_get_bits (gb );
532
+ if (hvcc_parse_vps_extension (gb , nal , hvcc ,
533
+ vps_max_layers_minus1 ,
534
+ vps_base_layer_internal_flag ) < 0 )
535
+ return 0 ;
536
+ }
417
537
418
538
/* nothing useful for hvcC past this point */
419
539
return 0 ;
@@ -551,7 +671,7 @@ static int hvcc_parse_sps(GetBitContext *gb, HVCCNALUnit *nal,
551
671
552
672
if (!multi_layer_ext_sps_flag ) {
553
673
hvcc -> temporalIdNested = get_bits1 (gb );
554
- hvcc_parse_ptl (gb , hvcc , sps_max_sub_layers_minus1 );
674
+ hvcc_parse_ptl (gb , hvcc , 1 , sps_max_sub_layers_minus1 );
555
675
}
556
676
557
677
nal -> parameter_set_id = get_ue_golomb_long (gb );
@@ -762,7 +882,7 @@ static int hvcc_add_nal_unit(const uint8_t *nal_buf, uint32_t nal_size,
762
882
goto end ;
763
883
764
884
nal_unit_parse_header (& gbc , & nal_type , & nuh_layer_id );
765
- if (!is_lhvc && nuh_layer_id > 0 )
885
+ if (!is_lhvc && nuh_layer_id > 0 && nuh_layer_id != hvcc -> alpha_layer_nuh_id )
766
886
goto end ;
767
887
768
888
/*
0 commit comments