You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

2321 lines
86KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. static int FUNC(rbsp_trailing_bits)(CodedBitstreamContext *ctx, RWContext *rw)
  19. {
  20. int err;
  21. fixed(1, rbsp_stop_one_bit, 1);
  22. while (byte_alignment(rw) != 0)
  23. fixed(1, rbsp_alignment_zero_bit, 0);
  24. return 0;
  25. }
  26. static int FUNC(nal_unit_header)(CodedBitstreamContext *ctx, RWContext *rw,
  27. H265RawNALUnitHeader *current,
  28. int expected_nal_unit_type)
  29. {
  30. int err;
  31. fixed(1, forbidden_zero_bit, 0);
  32. if (expected_nal_unit_type >= 0)
  33. u(6, nal_unit_type, expected_nal_unit_type,
  34. expected_nal_unit_type);
  35. else
  36. ub(6, nal_unit_type);
  37. u(6, nuh_layer_id, 0, 62);
  38. u(3, nuh_temporal_id_plus1, 1, 7);
  39. return 0;
  40. }
  41. static int FUNC(byte_alignment)(CodedBitstreamContext *ctx, RWContext *rw)
  42. {
  43. int err;
  44. fixed(1, alignment_bit_equal_to_one, 1);
  45. while (byte_alignment(rw) != 0)
  46. fixed(1, alignment_bit_equal_to_zero, 0);
  47. return 0;
  48. }
  49. static int FUNC(extension_data)(CodedBitstreamContext *ctx, RWContext *rw,
  50. H265RawExtensionData *current)
  51. {
  52. int err;
  53. size_t k;
  54. #ifdef READ
  55. GetBitContext start;
  56. uint8_t bit;
  57. start = *rw;
  58. for (k = 0; cbs_h2645_read_more_rbsp_data(rw); k++)
  59. skip_bits(rw, 1);
  60. current->bit_length = k;
  61. if (k > 0) {
  62. *rw = start;
  63. allocate(current->data, (current->bit_length + 7) / 8);
  64. for (k = 0; k < current->bit_length; k++) {
  65. xu(1, extension_data, bit, 0, 1, 0);
  66. current->data[k / 8] |= bit << (7 - k % 8);
  67. }
  68. }
  69. #else
  70. for (k = 0; k < current->bit_length; k++)
  71. xu(1, extension_data, current->data[k / 8] >> (7 - k % 8) & 1, 0, 1, 0);
  72. #endif
  73. return 0;
  74. }
  75. static int FUNC(profile_tier_level)(CodedBitstreamContext *ctx, RWContext *rw,
  76. H265RawProfileTierLevel *current,
  77. int profile_present_flag,
  78. int max_num_sub_layers_minus1)
  79. {
  80. int err, i, j;
  81. if (profile_present_flag) {
  82. u(2, general_profile_space, 0, 0);
  83. flag(general_tier_flag);
  84. ub(5, general_profile_idc);
  85. for (j = 0; j < 32; j++)
  86. flags(general_profile_compatibility_flag[j], 1, j);
  87. flag(general_progressive_source_flag);
  88. flag(general_interlaced_source_flag);
  89. flag(general_non_packed_constraint_flag);
  90. flag(general_frame_only_constraint_flag);
  91. #define profile_compatible(x) (current->general_profile_idc == (x) || \
  92. current->general_profile_compatibility_flag[x])
  93. if (profile_compatible(4) || profile_compatible(5) ||
  94. profile_compatible(6) || profile_compatible(7) ||
  95. profile_compatible(8) || profile_compatible(9) ||
  96. profile_compatible(10)) {
  97. flag(general_max_12bit_constraint_flag);
  98. flag(general_max_10bit_constraint_flag);
  99. flag(general_max_8bit_constraint_flag);
  100. flag(general_max_422chroma_constraint_flag);
  101. flag(general_max_420chroma_constraint_flag);
  102. flag(general_max_monochrome_constraint_flag);
  103. flag(general_intra_constraint_flag);
  104. flag(general_one_picture_only_constraint_flag);
  105. flag(general_lower_bit_rate_constraint_flag);
  106. if (profile_compatible(5) || profile_compatible(9) ||
  107. profile_compatible(10)) {
  108. flag(general_max_14bit_constraint_flag);
  109. fixed(24, general_reserved_zero_33bits, 0);
  110. fixed( 9, general_reserved_zero_33bits, 0);
  111. } else {
  112. fixed(24, general_reserved_zero_34bits, 0);
  113. fixed(10, general_reserved_zero_34bits, 0);
  114. }
  115. } else if (profile_compatible(2)) {
  116. fixed(7, general_reserved_zero_7bits, 0);
  117. flag(general_one_picture_only_constraint_flag);
  118. fixed(24, general_reserved_zero_35bits, 0);
  119. fixed(11, general_reserved_zero_35bits, 0);
  120. } else {
  121. fixed(24, general_reserved_zero_43bits, 0);
  122. fixed(19, general_reserved_zero_43bits, 0);
  123. }
  124. if (profile_compatible(1) || profile_compatible(2) ||
  125. profile_compatible(3) || profile_compatible(4) ||
  126. profile_compatible(5) || profile_compatible(9)) {
  127. flag(general_inbld_flag);
  128. } else {
  129. fixed(1, general_reserved_zero_bit, 0);
  130. }
  131. #undef profile_compatible
  132. }
  133. ub(8, general_level_idc);
  134. for (i = 0; i < max_num_sub_layers_minus1; i++) {
  135. flags(sub_layer_profile_present_flag[i], 1, i);
  136. flags(sub_layer_level_present_flag[i], 1, i);
  137. }
  138. if (max_num_sub_layers_minus1 > 0) {
  139. for (i = max_num_sub_layers_minus1; i < 8; i++)
  140. fixed(2, reserved_zero_2bits, 0);
  141. }
  142. for (i = 0; i < max_num_sub_layers_minus1; i++) {
  143. if (current->sub_layer_profile_present_flag[i]) {
  144. us(2, sub_layer_profile_space[i], 0, 0, 1, i);
  145. flags(sub_layer_tier_flag[i], 1, i);
  146. ubs(5, sub_layer_profile_idc[i], 1, i);
  147. for (j = 0; j < 32; j++)
  148. flags(sub_layer_profile_compatibility_flag[i][j], 2, i, j);
  149. flags(sub_layer_progressive_source_flag[i], 1, i);
  150. flags(sub_layer_interlaced_source_flag[i], 1, i);
  151. flags(sub_layer_non_packed_constraint_flag[i], 1, i);
  152. flags(sub_layer_frame_only_constraint_flag[i], 1, i);
  153. #define profile_compatible(x) (current->sub_layer_profile_idc[i] == (x) || \
  154. current->sub_layer_profile_compatibility_flag[i][x])
  155. if (profile_compatible(4) || profile_compatible(5) ||
  156. profile_compatible(6) || profile_compatible(7) ||
  157. profile_compatible(8) || profile_compatible(9) ||
  158. profile_compatible(10)) {
  159. flags(sub_layer_max_12bit_constraint_flag[i], 1, i);
  160. flags(sub_layer_max_10bit_constraint_flag[i], 1, i);
  161. flags(sub_layer_max_8bit_constraint_flag[i], 1, i);
  162. flags(sub_layer_max_422chroma_constraint_flag[i], 1, i);
  163. flags(sub_layer_max_420chroma_constraint_flag[i], 1, i);
  164. flags(sub_layer_max_monochrome_constraint_flag[i], 1, i);
  165. flags(sub_layer_intra_constraint_flag[i], 1, i);
  166. flags(sub_layer_one_picture_only_constraint_flag[i], 1, i);
  167. flags(sub_layer_lower_bit_rate_constraint_flag[i], 1, i);
  168. if (profile_compatible(5)) {
  169. flags(sub_layer_max_14bit_constraint_flag[i], 1, i);
  170. fixed(24, sub_layer_reserved_zero_33bits, 0);
  171. fixed( 9, sub_layer_reserved_zero_33bits, 0);
  172. } else {
  173. fixed(24, sub_layer_reserved_zero_34bits, 0);
  174. fixed(10, sub_layer_reserved_zero_34bits, 0);
  175. }
  176. } else if (profile_compatible(2)) {
  177. fixed(7, sub_layer_reserved_zero_7bits, 0);
  178. flags(sub_layer_one_picture_only_constraint_flag[i], 1, i);
  179. fixed(24, sub_layer_reserved_zero_43bits, 0);
  180. fixed(11, sub_layer_reserved_zero_43bits, 0);
  181. } else {
  182. fixed(24, sub_layer_reserved_zero_43bits, 0);
  183. fixed(19, sub_layer_reserved_zero_43bits, 0);
  184. }
  185. if (profile_compatible(1) || profile_compatible(2) ||
  186. profile_compatible(3) || profile_compatible(4) ||
  187. profile_compatible(5) || profile_compatible(9)) {
  188. flags(sub_layer_inbld_flag[i], 1, i);
  189. } else {
  190. fixed(1, sub_layer_reserved_zero_bit, 0);
  191. }
  192. #undef profile_compatible
  193. }
  194. if (current->sub_layer_level_present_flag[i])
  195. ubs(8, sub_layer_level_idc[i], 1, i);
  196. }
  197. return 0;
  198. }
  199. static int FUNC(sub_layer_hrd_parameters)(CodedBitstreamContext *ctx, RWContext *rw,
  200. H265RawHRDParameters *hrd,
  201. int nal, int sub_layer_id)
  202. {
  203. H265RawSubLayerHRDParameters *current;
  204. int err, i;
  205. if (nal)
  206. current = &hrd->nal_sub_layer_hrd_parameters[sub_layer_id];
  207. else
  208. current = &hrd->vcl_sub_layer_hrd_parameters[sub_layer_id];
  209. for (i = 0; i <= hrd->cpb_cnt_minus1[sub_layer_id]; i++) {
  210. ues(bit_rate_value_minus1[i], 0, UINT32_MAX - 1, 1, i);
  211. ues(cpb_size_value_minus1[i], 0, UINT32_MAX - 1, 1, i);
  212. if (hrd->sub_pic_hrd_params_present_flag) {
  213. ues(cpb_size_du_value_minus1[i], 0, UINT32_MAX - 1, 1, i);
  214. ues(bit_rate_du_value_minus1[i], 0, UINT32_MAX - 1, 1, i);
  215. }
  216. flags(cbr_flag[i], 1, i);
  217. }
  218. return 0;
  219. }
  220. static int FUNC(hrd_parameters)(CodedBitstreamContext *ctx, RWContext *rw,
  221. H265RawHRDParameters *current, int common_inf_present_flag,
  222. int max_num_sub_layers_minus1)
  223. {
  224. int err, i;
  225. if (common_inf_present_flag) {
  226. flag(nal_hrd_parameters_present_flag);
  227. flag(vcl_hrd_parameters_present_flag);
  228. if (current->nal_hrd_parameters_present_flag ||
  229. current->vcl_hrd_parameters_present_flag) {
  230. flag(sub_pic_hrd_params_present_flag);
  231. if (current->sub_pic_hrd_params_present_flag) {
  232. ub(8, tick_divisor_minus2);
  233. ub(5, du_cpb_removal_delay_increment_length_minus1);
  234. flag(sub_pic_cpb_params_in_pic_timing_sei_flag);
  235. ub(5, dpb_output_delay_du_length_minus1);
  236. }
  237. ub(4, bit_rate_scale);
  238. ub(4, cpb_size_scale);
  239. if (current->sub_pic_hrd_params_present_flag)
  240. ub(4, cpb_size_du_scale);
  241. ub(5, initial_cpb_removal_delay_length_minus1);
  242. ub(5, au_cpb_removal_delay_length_minus1);
  243. ub(5, dpb_output_delay_length_minus1);
  244. } else {
  245. infer(sub_pic_hrd_params_present_flag, 0);
  246. infer(initial_cpb_removal_delay_length_minus1, 23);
  247. infer(au_cpb_removal_delay_length_minus1, 23);
  248. infer(dpb_output_delay_length_minus1, 23);
  249. }
  250. }
  251. for (i = 0; i <= max_num_sub_layers_minus1; i++) {
  252. flags(fixed_pic_rate_general_flag[i], 1, i);
  253. if (!current->fixed_pic_rate_general_flag[i])
  254. flags(fixed_pic_rate_within_cvs_flag[i], 1, i);
  255. else
  256. infer(fixed_pic_rate_within_cvs_flag[i], 1);
  257. if (current->fixed_pic_rate_within_cvs_flag[i]) {
  258. ues(elemental_duration_in_tc_minus1[i], 0, 2047, 1, i);
  259. infer(low_delay_hrd_flag[i], 0);
  260. } else
  261. flags(low_delay_hrd_flag[i], 1, i);
  262. if (!current->low_delay_hrd_flag[i])
  263. ues(cpb_cnt_minus1[i], 0, 31, 1, i);
  264. else
  265. infer(cpb_cnt_minus1[i], 0);
  266. if (current->nal_hrd_parameters_present_flag)
  267. CHECK(FUNC(sub_layer_hrd_parameters)(ctx, rw, current, 0, i));
  268. if (current->vcl_hrd_parameters_present_flag)
  269. CHECK(FUNC(sub_layer_hrd_parameters)(ctx, rw, current, 1, i));
  270. }
  271. return 0;
  272. }
  273. static int FUNC(vui_parameters)(CodedBitstreamContext *ctx, RWContext *rw,
  274. H265RawVUI *current, const H265RawSPS *sps)
  275. {
  276. int err;
  277. flag(aspect_ratio_info_present_flag);
  278. if (current->aspect_ratio_info_present_flag) {
  279. ub(8, aspect_ratio_idc);
  280. if (current->aspect_ratio_idc == 255) {
  281. ub(16, sar_width);
  282. ub(16, sar_height);
  283. }
  284. } else {
  285. infer(aspect_ratio_idc, 0);
  286. }
  287. flag(overscan_info_present_flag);
  288. if (current->overscan_info_present_flag)
  289. flag(overscan_appropriate_flag);
  290. flag(video_signal_type_present_flag);
  291. if (current->video_signal_type_present_flag) {
  292. ub(3, video_format);
  293. flag(video_full_range_flag);
  294. flag(colour_description_present_flag);
  295. if (current->colour_description_present_flag) {
  296. ub(8, colour_primaries);
  297. ub(8, transfer_characteristics);
  298. ub(8, matrix_coefficients);
  299. } else {
  300. infer(colour_primaries, 2);
  301. infer(transfer_characteristics, 2);
  302. infer(matrix_coefficients, 2);
  303. }
  304. } else {
  305. infer(video_format, 5);
  306. infer(video_full_range_flag, 0);
  307. infer(colour_primaries, 2);
  308. infer(transfer_characteristics, 2);
  309. infer(matrix_coefficients, 2);
  310. }
  311. flag(chroma_loc_info_present_flag);
  312. if (current->chroma_loc_info_present_flag) {
  313. ue(chroma_sample_loc_type_top_field, 0, 5);
  314. ue(chroma_sample_loc_type_bottom_field, 0, 5);
  315. } else {
  316. infer(chroma_sample_loc_type_top_field, 0);
  317. infer(chroma_sample_loc_type_bottom_field, 0);
  318. }
  319. flag(neutral_chroma_indication_flag);
  320. flag(field_seq_flag);
  321. flag(frame_field_info_present_flag);
  322. flag(default_display_window_flag);
  323. if (current->default_display_window_flag) {
  324. ue(def_disp_win_left_offset, 0, 16384);
  325. ue(def_disp_win_right_offset, 0, 16384);
  326. ue(def_disp_win_top_offset, 0, 16384);
  327. ue(def_disp_win_bottom_offset, 0, 16384);
  328. }
  329. flag(vui_timing_info_present_flag);
  330. if (current->vui_timing_info_present_flag) {
  331. u(32, vui_num_units_in_tick, 1, UINT32_MAX);
  332. u(32, vui_time_scale, 1, UINT32_MAX);
  333. flag(vui_poc_proportional_to_timing_flag);
  334. if (current->vui_poc_proportional_to_timing_flag)
  335. ue(vui_num_ticks_poc_diff_one_minus1, 0, UINT32_MAX - 1);
  336. flag(vui_hrd_parameters_present_flag);
  337. if (current->vui_hrd_parameters_present_flag) {
  338. CHECK(FUNC(hrd_parameters)(ctx, rw, &current->hrd_parameters,
  339. 1, sps->sps_max_sub_layers_minus1));
  340. }
  341. }
  342. flag(bitstream_restriction_flag);
  343. if (current->bitstream_restriction_flag) {
  344. flag(tiles_fixed_structure_flag);
  345. flag(motion_vectors_over_pic_boundaries_flag);
  346. flag(restricted_ref_pic_lists_flag);
  347. ue(min_spatial_segmentation_idc, 0, 4095);
  348. ue(max_bytes_per_pic_denom, 0, 16);
  349. ue(max_bits_per_min_cu_denom, 0, 16);
  350. ue(log2_max_mv_length_horizontal, 0, 16);
  351. ue(log2_max_mv_length_vertical, 0, 16);
  352. } else {
  353. infer(tiles_fixed_structure_flag, 0);
  354. infer(motion_vectors_over_pic_boundaries_flag, 1);
  355. infer(min_spatial_segmentation_idc, 0);
  356. infer(max_bytes_per_pic_denom, 2);
  357. infer(max_bits_per_min_cu_denom, 1);
  358. infer(log2_max_mv_length_horizontal, 15);
  359. infer(log2_max_mv_length_vertical, 15);
  360. }
  361. return 0;
  362. }
  363. static int FUNC(vps)(CodedBitstreamContext *ctx, RWContext *rw,
  364. H265RawVPS *current)
  365. {
  366. int err, i, j;
  367. HEADER("Video Parameter Set");
  368. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header, HEVC_NAL_VPS));
  369. ub(4, vps_video_parameter_set_id);
  370. flag(vps_base_layer_internal_flag);
  371. flag(vps_base_layer_available_flag);
  372. u(6, vps_max_layers_minus1, 0, HEVC_MAX_LAYERS - 1);
  373. u(3, vps_max_sub_layers_minus1, 0, HEVC_MAX_SUB_LAYERS - 1);
  374. flag(vps_temporal_id_nesting_flag);
  375. if (current->vps_max_sub_layers_minus1 == 0 &&
  376. current->vps_temporal_id_nesting_flag != 1) {
  377. av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: "
  378. "vps_temporal_id_nesting_flag must be 1 if "
  379. "vps_max_sub_layers_minus1 is 0.\n");
  380. return AVERROR_INVALIDDATA;
  381. }
  382. fixed(16, vps_reserved_0xffff_16bits, 0xffff);
  383. CHECK(FUNC(profile_tier_level)(ctx, rw, &current->profile_tier_level,
  384. 1, current->vps_max_sub_layers_minus1));
  385. flag(vps_sub_layer_ordering_info_present_flag);
  386. for (i = (current->vps_sub_layer_ordering_info_present_flag ?
  387. 0 : current->vps_max_sub_layers_minus1);
  388. i <= current->vps_max_sub_layers_minus1; i++) {
  389. ues(vps_max_dec_pic_buffering_minus1[i],
  390. 0, HEVC_MAX_DPB_SIZE - 1, 1, i);
  391. ues(vps_max_num_reorder_pics[i],
  392. 0, current->vps_max_dec_pic_buffering_minus1[i], 1, i);
  393. ues(vps_max_latency_increase_plus1[i],
  394. 0, UINT32_MAX - 1, 1, i);
  395. }
  396. if (!current->vps_sub_layer_ordering_info_present_flag) {
  397. for (i = 0; i < current->vps_max_sub_layers_minus1; i++) {
  398. infer(vps_max_dec_pic_buffering_minus1[i],
  399. current->vps_max_dec_pic_buffering_minus1[current->vps_max_sub_layers_minus1]);
  400. infer(vps_max_num_reorder_pics[i],
  401. current->vps_max_num_reorder_pics[current->vps_max_sub_layers_minus1]);
  402. infer(vps_max_latency_increase_plus1[i],
  403. current->vps_max_latency_increase_plus1[current->vps_max_sub_layers_minus1]);
  404. }
  405. }
  406. u(6, vps_max_layer_id, 0, HEVC_MAX_LAYERS - 1);
  407. ue(vps_num_layer_sets_minus1, 0, HEVC_MAX_LAYER_SETS - 1);
  408. for (i = 1; i <= current->vps_num_layer_sets_minus1; i++) {
  409. for (j = 0; j <= current->vps_max_layer_id; j++)
  410. flags(layer_id_included_flag[i][j], 2, i, j);
  411. }
  412. for (j = 0; j <= current->vps_max_layer_id; j++)
  413. infer(layer_id_included_flag[0][j], j == 0);
  414. flag(vps_timing_info_present_flag);
  415. if (current->vps_timing_info_present_flag) {
  416. u(32, vps_num_units_in_tick, 1, UINT32_MAX);
  417. u(32, vps_time_scale, 1, UINT32_MAX);
  418. flag(vps_poc_proportional_to_timing_flag);
  419. if (current->vps_poc_proportional_to_timing_flag)
  420. ue(vps_num_ticks_poc_diff_one_minus1, 0, UINT32_MAX - 1);
  421. ue(vps_num_hrd_parameters, 0, current->vps_num_layer_sets_minus1 + 1);
  422. for (i = 0; i < current->vps_num_hrd_parameters; i++) {
  423. ues(hrd_layer_set_idx[i],
  424. current->vps_base_layer_internal_flag ? 0 : 1,
  425. current->vps_num_layer_sets_minus1, 1, i);
  426. if (i > 0)
  427. flags(cprms_present_flag[i], 1, i);
  428. else
  429. infer(cprms_present_flag[0], 1);
  430. CHECK(FUNC(hrd_parameters)(ctx, rw, &current->hrd_parameters[i],
  431. current->cprms_present_flag[i],
  432. current->vps_max_sub_layers_minus1));
  433. }
  434. }
  435. flag(vps_extension_flag);
  436. if (current->vps_extension_flag)
  437. CHECK(FUNC(extension_data)(ctx, rw, &current->extension_data));
  438. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  439. return 0;
  440. }
  441. static int FUNC(st_ref_pic_set)(CodedBitstreamContext *ctx, RWContext *rw,
  442. H265RawSTRefPicSet *current, int st_rps_idx,
  443. const H265RawSPS *sps)
  444. {
  445. int err, i, j;
  446. if (st_rps_idx != 0)
  447. flag(inter_ref_pic_set_prediction_flag);
  448. else
  449. infer(inter_ref_pic_set_prediction_flag, 0);
  450. if (current->inter_ref_pic_set_prediction_flag) {
  451. unsigned int ref_rps_idx, num_delta_pocs, num_ref_pics;
  452. const H265RawSTRefPicSet *ref;
  453. int delta_rps, d_poc;
  454. int ref_delta_poc_s0[HEVC_MAX_REFS], ref_delta_poc_s1[HEVC_MAX_REFS];
  455. int delta_poc_s0[HEVC_MAX_REFS], delta_poc_s1[HEVC_MAX_REFS];
  456. uint8_t used_by_curr_pic_s0[HEVC_MAX_REFS],
  457. used_by_curr_pic_s1[HEVC_MAX_REFS];
  458. if (st_rps_idx == sps->num_short_term_ref_pic_sets)
  459. ue(delta_idx_minus1, 0, st_rps_idx - 1);
  460. else
  461. infer(delta_idx_minus1, 0);
  462. ref_rps_idx = st_rps_idx - (current->delta_idx_minus1 + 1);
  463. ref = &sps->st_ref_pic_set[ref_rps_idx];
  464. num_delta_pocs = ref->num_negative_pics + ref->num_positive_pics;
  465. av_assert0(num_delta_pocs < HEVC_MAX_DPB_SIZE);
  466. flag(delta_rps_sign);
  467. ue(abs_delta_rps_minus1, 0, INT16_MAX);
  468. delta_rps = (1 - 2 * current->delta_rps_sign) *
  469. (current->abs_delta_rps_minus1 + 1);
  470. num_ref_pics = 0;
  471. for (j = 0; j <= num_delta_pocs; j++) {
  472. flags(used_by_curr_pic_flag[j], 1, j);
  473. if (!current->used_by_curr_pic_flag[j])
  474. flags(use_delta_flag[j], 1, j);
  475. else
  476. infer(use_delta_flag[j], 1);
  477. if (current->use_delta_flag[j])
  478. ++num_ref_pics;
  479. }
  480. if (num_ref_pics >= HEVC_MAX_DPB_SIZE) {
  481. av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: "
  482. "short-term ref pic set %d "
  483. "contains too many pictures.\n", st_rps_idx);
  484. return AVERROR_INVALIDDATA;
  485. }
  486. // Since the stored form of an RPS here is actually the delta-step
  487. // form used when inter_ref_pic_set_prediction_flag is not set, we
  488. // need to reconstruct that here in order to be able to refer to
  489. // the RPS later (which is required for parsing, because we don't
  490. // even know what syntax elements appear without it). Therefore,
  491. // this code takes the delta-step form of the reference set, turns
  492. // it into the delta-array form, applies the prediction process of
  493. // 7.4.8, converts the result back to the delta-step form, and
  494. // stores that as the current set for future use. Note that the
  495. // inferences here mean that writers using prediction will need
  496. // to fill in the delta-step values correctly as well - since the
  497. // whole RPS prediction process is somewhat overly sophisticated,
  498. // this hopefully forms a useful check for them to ensure their
  499. // predicted form actually matches what was intended rather than
  500. // an onerous additional requirement.
  501. d_poc = 0;
  502. for (i = 0; i < ref->num_negative_pics; i++) {
  503. d_poc -= ref->delta_poc_s0_minus1[i] + 1;
  504. ref_delta_poc_s0[i] = d_poc;
  505. }
  506. d_poc = 0;
  507. for (i = 0; i < ref->num_positive_pics; i++) {
  508. d_poc += ref->delta_poc_s1_minus1[i] + 1;
  509. ref_delta_poc_s1[i] = d_poc;
  510. }
  511. i = 0;
  512. for (j = ref->num_positive_pics - 1; j >= 0; j--) {
  513. d_poc = ref_delta_poc_s1[j] + delta_rps;
  514. if (d_poc < 0 && current->use_delta_flag[ref->num_negative_pics + j]) {
  515. delta_poc_s0[i] = d_poc;
  516. used_by_curr_pic_s0[i++] =
  517. current->used_by_curr_pic_flag[ref->num_negative_pics + j];
  518. }
  519. }
  520. if (delta_rps < 0 && current->use_delta_flag[num_delta_pocs]) {
  521. delta_poc_s0[i] = delta_rps;
  522. used_by_curr_pic_s0[i++] =
  523. current->used_by_curr_pic_flag[num_delta_pocs];
  524. }
  525. for (j = 0; j < ref->num_negative_pics; j++) {
  526. d_poc = ref_delta_poc_s0[j] + delta_rps;
  527. if (d_poc < 0 && current->use_delta_flag[j]) {
  528. delta_poc_s0[i] = d_poc;
  529. used_by_curr_pic_s0[i++] = current->used_by_curr_pic_flag[j];
  530. }
  531. }
  532. infer(num_negative_pics, i);
  533. for (i = 0; i < current->num_negative_pics; i++) {
  534. infer(delta_poc_s0_minus1[i],
  535. -(delta_poc_s0[i] - (i == 0 ? 0 : delta_poc_s0[i - 1])) - 1);
  536. infer(used_by_curr_pic_s0_flag[i], used_by_curr_pic_s0[i]);
  537. }
  538. i = 0;
  539. for (j = ref->num_negative_pics - 1; j >= 0; j--) {
  540. d_poc = ref_delta_poc_s0[j] + delta_rps;
  541. if (d_poc > 0 && current->use_delta_flag[j]) {
  542. delta_poc_s1[i] = d_poc;
  543. used_by_curr_pic_s1[i++] = current->used_by_curr_pic_flag[j];
  544. }
  545. }
  546. if (delta_rps > 0 && current->use_delta_flag[num_delta_pocs]) {
  547. delta_poc_s1[i] = delta_rps;
  548. used_by_curr_pic_s1[i++] =
  549. current->used_by_curr_pic_flag[num_delta_pocs];
  550. }
  551. for (j = 0; j < ref->num_positive_pics; j++) {
  552. d_poc = ref_delta_poc_s1[j] + delta_rps;
  553. if (d_poc > 0 && current->use_delta_flag[ref->num_negative_pics + j]) {
  554. delta_poc_s1[i] = d_poc;
  555. used_by_curr_pic_s1[i++] =
  556. current->used_by_curr_pic_flag[ref->num_negative_pics + j];
  557. }
  558. }
  559. infer(num_positive_pics, i);
  560. for (i = 0; i < current->num_positive_pics; i++) {
  561. infer(delta_poc_s1_minus1[i],
  562. delta_poc_s1[i] - (i == 0 ? 0 : delta_poc_s1[i - 1]) - 1);
  563. infer(used_by_curr_pic_s1_flag[i], used_by_curr_pic_s1[i]);
  564. }
  565. } else {
  566. ue(num_negative_pics, 0, 15);
  567. ue(num_positive_pics, 0, 15 - current->num_negative_pics);
  568. for (i = 0; i < current->num_negative_pics; i++) {
  569. ues(delta_poc_s0_minus1[i], 0, INT16_MAX, 1, i);
  570. flags(used_by_curr_pic_s0_flag[i], 1, i);
  571. }
  572. for (i = 0; i < current->num_positive_pics; i++) {
  573. ues(delta_poc_s1_minus1[i], 0, INT16_MAX, 1, i);
  574. flags(used_by_curr_pic_s1_flag[i], 1, i);
  575. }
  576. }
  577. return 0;
  578. }
  579. static int FUNC(scaling_list_data)(CodedBitstreamContext *ctx, RWContext *rw,
  580. H265RawScalingList *current)
  581. {
  582. int sizeId, matrixId;
  583. int err, n, i;
  584. for (sizeId = 0; sizeId < 4; sizeId++) {
  585. for (matrixId = 0; matrixId < 6; matrixId += (sizeId == 3 ? 3 : 1)) {
  586. flags(scaling_list_pred_mode_flag[sizeId][matrixId],
  587. 2, sizeId, matrixId);
  588. if (!current->scaling_list_pred_mode_flag[sizeId][matrixId]) {
  589. ues(scaling_list_pred_matrix_id_delta[sizeId][matrixId],
  590. 0, sizeId == 3 ? matrixId / 3 : matrixId,
  591. 2, sizeId, matrixId);
  592. } else {
  593. n = FFMIN(64, 1 << (4 + (sizeId << 1)));
  594. if (sizeId > 1) {
  595. ses(scaling_list_dc_coef_minus8[sizeId - 2][matrixId], -7, +247,
  596. 2, sizeId - 2, matrixId);
  597. }
  598. for (i = 0; i < n; i++) {
  599. ses(scaling_list_delta_coeff[sizeId][matrixId][i],
  600. -128, +127, 3, sizeId, matrixId, i);
  601. }
  602. }
  603. }
  604. }
  605. return 0;
  606. }
  607. static int FUNC(sps_range_extension)(CodedBitstreamContext *ctx, RWContext *rw,
  608. H265RawSPS *current)
  609. {
  610. int err;
  611. flag(transform_skip_rotation_enabled_flag);
  612. flag(transform_skip_context_enabled_flag);
  613. flag(implicit_rdpcm_enabled_flag);
  614. flag(explicit_rdpcm_enabled_flag);
  615. flag(extended_precision_processing_flag);
  616. flag(intra_smoothing_disabled_flag);
  617. flag(high_precision_offsets_enabled_flag);
  618. flag(persistent_rice_adaptation_enabled_flag);
  619. flag(cabac_bypass_alignment_enabled_flag);
  620. return 0;
  621. }
  622. static int FUNC(sps_scc_extension)(CodedBitstreamContext *ctx, RWContext *rw,
  623. H265RawSPS *current)
  624. {
  625. int err, comp, i;
  626. flag(sps_curr_pic_ref_enabled_flag);
  627. flag(palette_mode_enabled_flag);
  628. if (current->palette_mode_enabled_flag) {
  629. ue(palette_max_size, 0, 64);
  630. ue(delta_palette_max_predictor_size, 0, 128);
  631. flag(sps_palette_predictor_initializer_present_flag);
  632. if (current->sps_palette_predictor_initializer_present_flag) {
  633. ue(sps_num_palette_predictor_initializer_minus1, 0, 128);
  634. for (comp = 0; comp < (current->chroma_format_idc ? 3 : 1); comp++) {
  635. int bit_depth = comp == 0 ? current->bit_depth_luma_minus8 + 8
  636. : current->bit_depth_chroma_minus8 + 8;
  637. for (i = 0; i <= current->sps_num_palette_predictor_initializer_minus1; i++)
  638. ubs(bit_depth, sps_palette_predictor_initializers[comp][i], 2, comp, i);
  639. }
  640. }
  641. }
  642. u(2, motion_vector_resolution_control_idc, 0, 2);
  643. flag(intra_boundary_filtering_disable_flag);
  644. return 0;
  645. }
  646. static int FUNC(vui_parameters_default)(CodedBitstreamContext *ctx,
  647. RWContext *rw, H265RawVUI *current,
  648. H265RawSPS *sps)
  649. {
  650. infer(aspect_ratio_idc, 0);
  651. infer(video_format, 5);
  652. infer(video_full_range_flag, 0);
  653. infer(colour_primaries, 2);
  654. infer(transfer_characteristics, 2);
  655. infer(matrix_coefficients, 2);
  656. infer(chroma_sample_loc_type_top_field, 0);
  657. infer(chroma_sample_loc_type_bottom_field, 0);
  658. infer(tiles_fixed_structure_flag, 0);
  659. infer(motion_vectors_over_pic_boundaries_flag, 1);
  660. infer(min_spatial_segmentation_idc, 0);
  661. infer(max_bytes_per_pic_denom, 2);
  662. infer(max_bits_per_min_cu_denom, 1);
  663. infer(log2_max_mv_length_horizontal, 15);
  664. infer(log2_max_mv_length_vertical, 15);
  665. return 0;
  666. }
  667. static int FUNC(sps)(CodedBitstreamContext *ctx, RWContext *rw,
  668. H265RawSPS *current)
  669. {
  670. CodedBitstreamH265Context *h265 = ctx->priv_data;
  671. const H265RawVPS *vps;
  672. int err, i;
  673. unsigned int min_cb_log2_size_y, ctb_log2_size_y,
  674. min_cb_size_y, min_tb_log2_size_y;
  675. HEADER("Sequence Parameter Set");
  676. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header, HEVC_NAL_SPS));
  677. ub(4, sps_video_parameter_set_id);
  678. h265->active_vps = vps = h265->vps[current->sps_video_parameter_set_id];
  679. u(3, sps_max_sub_layers_minus1, 0, HEVC_MAX_SUB_LAYERS - 1);
  680. flag(sps_temporal_id_nesting_flag);
  681. if (vps) {
  682. if (vps->vps_max_sub_layers_minus1 > current->sps_max_sub_layers_minus1) {
  683. av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: "
  684. "sps_max_sub_layers_minus1 (%d) must be less than or equal to "
  685. "vps_max_sub_layers_minus1 (%d).\n",
  686. vps->vps_max_sub_layers_minus1,
  687. current->sps_max_sub_layers_minus1);
  688. return AVERROR_INVALIDDATA;
  689. }
  690. if (vps->vps_temporal_id_nesting_flag &&
  691. !current->sps_temporal_id_nesting_flag) {
  692. av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: "
  693. "sps_temporal_id_nesting_flag must be 1 if "
  694. "vps_temporal_id_nesting_flag is 1.\n");
  695. return AVERROR_INVALIDDATA;
  696. }
  697. }
  698. CHECK(FUNC(profile_tier_level)(ctx, rw, &current->profile_tier_level,
  699. 1, current->sps_max_sub_layers_minus1));
  700. ue(sps_seq_parameter_set_id, 0, 15);
  701. ue(chroma_format_idc, 0, 3);
  702. if (current->chroma_format_idc == 3)
  703. flag(separate_colour_plane_flag);
  704. else
  705. infer(separate_colour_plane_flag, 0);
  706. ue(pic_width_in_luma_samples, 1, HEVC_MAX_WIDTH);
  707. ue(pic_height_in_luma_samples, 1, HEVC_MAX_HEIGHT);
  708. flag(conformance_window_flag);
  709. if (current->conformance_window_flag) {
  710. ue(conf_win_left_offset, 0, current->pic_width_in_luma_samples);
  711. ue(conf_win_right_offset, 0, current->pic_width_in_luma_samples);
  712. ue(conf_win_top_offset, 0, current->pic_height_in_luma_samples);
  713. ue(conf_win_bottom_offset, 0, current->pic_height_in_luma_samples);
  714. } else {
  715. infer(conf_win_left_offset, 0);
  716. infer(conf_win_right_offset, 0);
  717. infer(conf_win_top_offset, 0);
  718. infer(conf_win_bottom_offset, 0);
  719. }
  720. ue(bit_depth_luma_minus8, 0, 8);
  721. ue(bit_depth_chroma_minus8, 0, 8);
  722. ue(log2_max_pic_order_cnt_lsb_minus4, 0, 12);
  723. flag(sps_sub_layer_ordering_info_present_flag);
  724. for (i = (current->sps_sub_layer_ordering_info_present_flag ?
  725. 0 : current->sps_max_sub_layers_minus1);
  726. i <= current->sps_max_sub_layers_minus1; i++) {
  727. ues(sps_max_dec_pic_buffering_minus1[i],
  728. 0, HEVC_MAX_DPB_SIZE - 1, 1, i);
  729. ues(sps_max_num_reorder_pics[i],
  730. 0, current->sps_max_dec_pic_buffering_minus1[i], 1, i);
  731. ues(sps_max_latency_increase_plus1[i],
  732. 0, UINT32_MAX - 1, 1, i);
  733. }
  734. if (!current->sps_sub_layer_ordering_info_present_flag) {
  735. for (i = 0; i < current->sps_max_sub_layers_minus1; i++) {
  736. infer(sps_max_dec_pic_buffering_minus1[i],
  737. current->sps_max_dec_pic_buffering_minus1[current->sps_max_sub_layers_minus1]);
  738. infer(sps_max_num_reorder_pics[i],
  739. current->sps_max_num_reorder_pics[current->sps_max_sub_layers_minus1]);
  740. infer(sps_max_latency_increase_plus1[i],
  741. current->sps_max_latency_increase_plus1[current->sps_max_sub_layers_minus1]);
  742. }
  743. }
  744. ue(log2_min_luma_coding_block_size_minus3, 0, 3);
  745. min_cb_log2_size_y = current->log2_min_luma_coding_block_size_minus3 + 3;
  746. ue(log2_diff_max_min_luma_coding_block_size, 0, 3);
  747. ctb_log2_size_y = min_cb_log2_size_y +
  748. current->log2_diff_max_min_luma_coding_block_size;
  749. min_cb_size_y = 1 << min_cb_log2_size_y;
  750. if (current->pic_width_in_luma_samples % min_cb_size_y ||
  751. current->pic_height_in_luma_samples % min_cb_size_y) {
  752. av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid dimensions: %ux%u not divisible "
  753. "by MinCbSizeY = %u.\n", current->pic_width_in_luma_samples,
  754. current->pic_height_in_luma_samples, min_cb_size_y);
  755. return AVERROR_INVALIDDATA;
  756. }
  757. ue(log2_min_luma_transform_block_size_minus2, 0, min_cb_log2_size_y - 3);
  758. min_tb_log2_size_y = current->log2_min_luma_transform_block_size_minus2 + 2;
  759. ue(log2_diff_max_min_luma_transform_block_size,
  760. 0, FFMIN(ctb_log2_size_y, 5) - min_tb_log2_size_y);
  761. ue(max_transform_hierarchy_depth_inter,
  762. 0, ctb_log2_size_y - min_tb_log2_size_y);
  763. ue(max_transform_hierarchy_depth_intra,
  764. 0, ctb_log2_size_y - min_tb_log2_size_y);
  765. flag(scaling_list_enabled_flag);
  766. if (current->scaling_list_enabled_flag) {
  767. flag(sps_scaling_list_data_present_flag);
  768. if (current->sps_scaling_list_data_present_flag)
  769. CHECK(FUNC(scaling_list_data)(ctx, rw, &current->scaling_list));
  770. } else {
  771. infer(sps_scaling_list_data_present_flag, 0);
  772. }
  773. flag(amp_enabled_flag);
  774. flag(sample_adaptive_offset_enabled_flag);
  775. flag(pcm_enabled_flag);
  776. if (current->pcm_enabled_flag) {
  777. u(4, pcm_sample_bit_depth_luma_minus1,
  778. 0, current->bit_depth_luma_minus8 + 8 - 1);
  779. u(4, pcm_sample_bit_depth_chroma_minus1,
  780. 0, current->bit_depth_chroma_minus8 + 8 - 1);
  781. ue(log2_min_pcm_luma_coding_block_size_minus3,
  782. FFMIN(min_cb_log2_size_y, 5) - 3, FFMIN(ctb_log2_size_y, 5) - 3);
  783. ue(log2_diff_max_min_pcm_luma_coding_block_size,
  784. 0, FFMIN(ctb_log2_size_y, 5) - (current->log2_min_pcm_luma_coding_block_size_minus3 + 3));
  785. flag(pcm_loop_filter_disabled_flag);
  786. }
  787. ue(num_short_term_ref_pic_sets, 0, HEVC_MAX_SHORT_TERM_REF_PIC_SETS);
  788. for (i = 0; i < current->num_short_term_ref_pic_sets; i++)
  789. CHECK(FUNC(st_ref_pic_set)(ctx, rw, &current->st_ref_pic_set[i], i, current));
  790. flag(long_term_ref_pics_present_flag);
  791. if (current->long_term_ref_pics_present_flag) {
  792. ue(num_long_term_ref_pics_sps, 0, HEVC_MAX_LONG_TERM_REF_PICS);
  793. for (i = 0; i < current->num_long_term_ref_pics_sps; i++) {
  794. ubs(current->log2_max_pic_order_cnt_lsb_minus4 + 4,
  795. lt_ref_pic_poc_lsb_sps[i], 1, i);
  796. flags(used_by_curr_pic_lt_sps_flag[i], 1, i);
  797. }
  798. }
  799. flag(sps_temporal_mvp_enabled_flag);
  800. flag(strong_intra_smoothing_enabled_flag);
  801. flag(vui_parameters_present_flag);
  802. if (current->vui_parameters_present_flag)
  803. CHECK(FUNC(vui_parameters)(ctx, rw, &current->vui, current));
  804. else
  805. CHECK(FUNC(vui_parameters_default)(ctx, rw, &current->vui, current));
  806. flag(sps_extension_present_flag);
  807. if (current->sps_extension_present_flag) {
  808. flag(sps_range_extension_flag);
  809. flag(sps_multilayer_extension_flag);
  810. flag(sps_3d_extension_flag);
  811. flag(sps_scc_extension_flag);
  812. ub(4, sps_extension_4bits);
  813. }
  814. if (current->sps_range_extension_flag)
  815. CHECK(FUNC(sps_range_extension)(ctx, rw, current));
  816. if (current->sps_multilayer_extension_flag)
  817. return AVERROR_PATCHWELCOME;
  818. if (current->sps_3d_extension_flag)
  819. return AVERROR_PATCHWELCOME;
  820. if (current->sps_scc_extension_flag)
  821. CHECK(FUNC(sps_scc_extension)(ctx, rw, current));
  822. if (current->sps_extension_4bits)
  823. CHECK(FUNC(extension_data)(ctx, rw, &current->extension_data));
  824. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  825. return 0;
  826. }
  827. static int FUNC(pps_range_extension)(CodedBitstreamContext *ctx, RWContext *rw,
  828. H265RawPPS *current)
  829. {
  830. CodedBitstreamH265Context *h265 = ctx->priv_data;
  831. const H265RawSPS *sps = h265->active_sps;
  832. int err, i;
  833. if (current->transform_skip_enabled_flag)
  834. ue(log2_max_transform_skip_block_size_minus2, 0, 3);
  835. flag(cross_component_prediction_enabled_flag);
  836. flag(chroma_qp_offset_list_enabled_flag);
  837. if (current->chroma_qp_offset_list_enabled_flag) {
  838. ue(diff_cu_chroma_qp_offset_depth,
  839. 0, sps->log2_diff_max_min_luma_coding_block_size);
  840. ue(chroma_qp_offset_list_len_minus1, 0, 5);
  841. for (i = 0; i <= current->chroma_qp_offset_list_len_minus1; i++) {
  842. ses(cb_qp_offset_list[i], -12, +12, 1, i);
  843. ses(cr_qp_offset_list[i], -12, +12, 1, i);
  844. }
  845. }
  846. ue(log2_sao_offset_scale_luma, 0, FFMAX(0, sps->bit_depth_luma_minus8 - 2));
  847. ue(log2_sao_offset_scale_chroma, 0, FFMAX(0, sps->bit_depth_chroma_minus8 - 2));
  848. return 0;
  849. }
  850. static int FUNC(pps_scc_extension)(CodedBitstreamContext *ctx, RWContext *rw,
  851. H265RawPPS *current)
  852. {
  853. int err, comp, i;
  854. flag(pps_curr_pic_ref_enabled_flag);
  855. flag(residual_adaptive_colour_transform_enabled_flag);
  856. if (current->residual_adaptive_colour_transform_enabled_flag) {
  857. flag(pps_slice_act_qp_offsets_present_flag);
  858. se(pps_act_y_qp_offset_plus5, -7, +17);
  859. se(pps_act_cb_qp_offset_plus5, -7, +17);
  860. se(pps_act_cr_qp_offset_plus3, -9, +15);
  861. } else {
  862. infer(pps_slice_act_qp_offsets_present_flag, 0);
  863. infer(pps_act_y_qp_offset_plus5, 0);
  864. infer(pps_act_cb_qp_offset_plus5, 0);
  865. infer(pps_act_cr_qp_offset_plus3, 0);
  866. }
  867. flag(pps_palette_predictor_initializer_present_flag);
  868. if (current->pps_palette_predictor_initializer_present_flag) {
  869. ue(pps_num_palette_predictor_initializer, 0, 128);
  870. if (current->pps_num_palette_predictor_initializer > 0) {
  871. flag(monochrome_palette_flag);
  872. ue(luma_bit_depth_entry_minus8, 0, 8);
  873. if (!current->monochrome_palette_flag)
  874. ue(chroma_bit_depth_entry_minus8, 0, 8);
  875. for (comp = 0; comp < (current->monochrome_palette_flag ? 1 : 3); comp++) {
  876. int bit_depth = comp == 0 ? current->luma_bit_depth_entry_minus8 + 8
  877. : current->chroma_bit_depth_entry_minus8 + 8;
  878. for (i = 0; i < current->pps_num_palette_predictor_initializer; i++)
  879. ubs(bit_depth, pps_palette_predictor_initializers[comp][i], 2, comp, i);
  880. }
  881. }
  882. }
  883. return 0;
  884. }
  885. static int FUNC(pps)(CodedBitstreamContext *ctx, RWContext *rw,
  886. H265RawPPS *current)
  887. {
  888. CodedBitstreamH265Context *h265 = ctx->priv_data;
  889. const H265RawSPS *sps;
  890. int err, i;
  891. HEADER("Picture Parameter Set");
  892. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header, HEVC_NAL_PPS));
  893. ue(pps_pic_parameter_set_id, 0, 63);
  894. ue(pps_seq_parameter_set_id, 0, 15);
  895. sps = h265->sps[current->pps_seq_parameter_set_id];
  896. if (!sps) {
  897. av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n",
  898. current->pps_seq_parameter_set_id);
  899. return AVERROR_INVALIDDATA;
  900. }
  901. h265->active_sps = sps;
  902. flag(dependent_slice_segments_enabled_flag);
  903. flag(output_flag_present_flag);
  904. ub(3, num_extra_slice_header_bits);
  905. flag(sign_data_hiding_enabled_flag);
  906. flag(cabac_init_present_flag);
  907. ue(num_ref_idx_l0_default_active_minus1, 0, 14);
  908. ue(num_ref_idx_l1_default_active_minus1, 0, 14);
  909. se(init_qp_minus26, -(26 + 6 * sps->bit_depth_luma_minus8), +25);
  910. flag(constrained_intra_pred_flag);
  911. flag(transform_skip_enabled_flag);
  912. flag(cu_qp_delta_enabled_flag);
  913. if (current->cu_qp_delta_enabled_flag)
  914. ue(diff_cu_qp_delta_depth,
  915. 0, sps->log2_diff_max_min_luma_coding_block_size);
  916. else
  917. infer(diff_cu_qp_delta_depth, 0);
  918. se(pps_cb_qp_offset, -12, +12);
  919. se(pps_cr_qp_offset, -12, +12);
  920. flag(pps_slice_chroma_qp_offsets_present_flag);
  921. flag(weighted_pred_flag);
  922. flag(weighted_bipred_flag);
  923. flag(transquant_bypass_enabled_flag);
  924. flag(tiles_enabled_flag);
  925. flag(entropy_coding_sync_enabled_flag);
  926. if (current->tiles_enabled_flag) {
  927. ue(num_tile_columns_minus1, 0, HEVC_MAX_TILE_COLUMNS);
  928. ue(num_tile_rows_minus1, 0, HEVC_MAX_TILE_ROWS);
  929. flag(uniform_spacing_flag);
  930. if (!current->uniform_spacing_flag) {
  931. for (i = 0; i < current->num_tile_columns_minus1; i++)
  932. ues(column_width_minus1[i], 0, sps->pic_width_in_luma_samples, 1, i);
  933. for (i = 0; i < current->num_tile_rows_minus1; i++)
  934. ues(row_height_minus1[i], 0, sps->pic_height_in_luma_samples, 1, i);
  935. }
  936. flag(loop_filter_across_tiles_enabled_flag);
  937. } else {
  938. infer(num_tile_columns_minus1, 0);
  939. infer(num_tile_rows_minus1, 0);
  940. }
  941. flag(pps_loop_filter_across_slices_enabled_flag);
  942. flag(deblocking_filter_control_present_flag);
  943. if (current->deblocking_filter_control_present_flag) {
  944. flag(deblocking_filter_override_enabled_flag);
  945. flag(pps_deblocking_filter_disabled_flag);
  946. if (!current->pps_deblocking_filter_disabled_flag) {
  947. se(pps_beta_offset_div2, -6, +6);
  948. se(pps_tc_offset_div2, -6, +6);
  949. } else {
  950. infer(pps_beta_offset_div2, 0);
  951. infer(pps_tc_offset_div2, 0);
  952. }
  953. } else {
  954. infer(deblocking_filter_override_enabled_flag, 0);
  955. infer(pps_deblocking_filter_disabled_flag, 0);
  956. infer(pps_beta_offset_div2, 0);
  957. infer(pps_tc_offset_div2, 0);
  958. }
  959. flag(pps_scaling_list_data_present_flag);
  960. if (current->pps_scaling_list_data_present_flag)
  961. CHECK(FUNC(scaling_list_data)(ctx, rw, &current->scaling_list));
  962. flag(lists_modification_present_flag);
  963. ue(log2_parallel_merge_level_minus2,
  964. 0, (sps->log2_min_luma_coding_block_size_minus3 + 3 +
  965. sps->log2_diff_max_min_luma_coding_block_size - 2));
  966. flag(slice_segment_header_extension_present_flag);
  967. flag(pps_extension_present_flag);
  968. if (current->pps_extension_present_flag) {
  969. flag(pps_range_extension_flag);
  970. flag(pps_multilayer_extension_flag);
  971. flag(pps_3d_extension_flag);
  972. flag(pps_scc_extension_flag);
  973. ub(4, pps_extension_4bits);
  974. }
  975. if (current->pps_range_extension_flag)
  976. CHECK(FUNC(pps_range_extension)(ctx, rw, current));
  977. if (current->pps_multilayer_extension_flag)
  978. return AVERROR_PATCHWELCOME;
  979. if (current->pps_3d_extension_flag)
  980. return AVERROR_PATCHWELCOME;
  981. if (current->pps_scc_extension_flag)
  982. CHECK(FUNC(pps_scc_extension)(ctx, rw, current));
  983. if (current->pps_extension_4bits)
  984. CHECK(FUNC(extension_data)(ctx, rw, &current->extension_data));
  985. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  986. return 0;
  987. }
  988. static int FUNC(aud)(CodedBitstreamContext *ctx, RWContext *rw,
  989. H265RawAUD *current)
  990. {
  991. int err;
  992. HEADER("Access Unit Delimiter");
  993. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header, HEVC_NAL_AUD));
  994. u(3, pic_type, 0, 2);
  995. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  996. return 0;
  997. }
  998. static int FUNC(ref_pic_lists_modification)(CodedBitstreamContext *ctx, RWContext *rw,
  999. H265RawSliceHeader *current,
  1000. unsigned int num_pic_total_curr)
  1001. {
  1002. unsigned int entry_size;
  1003. int err, i;
  1004. entry_size = av_log2(num_pic_total_curr - 1) + 1;
  1005. flag(ref_pic_list_modification_flag_l0);
  1006. if (current->ref_pic_list_modification_flag_l0) {
  1007. for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++)
  1008. us(entry_size, list_entry_l0[i], 0, num_pic_total_curr - 1, 1, i);
  1009. }
  1010. if (current->slice_type == HEVC_SLICE_B) {
  1011. flag(ref_pic_list_modification_flag_l1);
  1012. if (current->ref_pic_list_modification_flag_l1) {
  1013. for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++)
  1014. us(entry_size, list_entry_l1[i], 0, num_pic_total_curr - 1, 1, i);
  1015. }
  1016. }
  1017. return 0;
  1018. }
  1019. static int FUNC(pred_weight_table)(CodedBitstreamContext *ctx, RWContext *rw,
  1020. H265RawSliceHeader *current)
  1021. {
  1022. CodedBitstreamH265Context *h265 = ctx->priv_data;
  1023. const H265RawSPS *sps = h265->active_sps;
  1024. int err, i, j;
  1025. int chroma = !sps->separate_colour_plane_flag &&
  1026. sps->chroma_format_idc != 0;
  1027. ue(luma_log2_weight_denom, 0, 7);
  1028. if (chroma)
  1029. se(delta_chroma_log2_weight_denom, -7, 7);
  1030. else
  1031. infer(delta_chroma_log2_weight_denom, 0);
  1032. for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) {
  1033. if (1 /* is not same POC and same layer_id */)
  1034. flags(luma_weight_l0_flag[i], 1, i);
  1035. else
  1036. infer(luma_weight_l0_flag[i], 0);
  1037. }
  1038. if (chroma) {
  1039. for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) {
  1040. if (1 /* is not same POC and same layer_id */)
  1041. flags(chroma_weight_l0_flag[i], 1, i);
  1042. else
  1043. infer(chroma_weight_l0_flag[i], 0);
  1044. }
  1045. }
  1046. for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) {
  1047. if (current->luma_weight_l0_flag[i]) {
  1048. ses(delta_luma_weight_l0[i], -128, +127, 1, i);
  1049. ses(luma_offset_l0[i],
  1050. -(1 << (sps->bit_depth_luma_minus8 + 8 - 1)),
  1051. ((1 << (sps->bit_depth_luma_minus8 + 8 - 1)) - 1), 1, i);
  1052. } else {
  1053. infer(delta_luma_weight_l0[i], 0);
  1054. infer(luma_offset_l0[i], 0);
  1055. }
  1056. if (current->chroma_weight_l0_flag[i]) {
  1057. for (j = 0; j < 2; j++) {
  1058. ses(delta_chroma_weight_l0[i][j], -128, +127, 2, i, j);
  1059. ses(chroma_offset_l0[i][j],
  1060. -(4 << (sps->bit_depth_chroma_minus8 + 8 - 1)),
  1061. ((4 << (sps->bit_depth_chroma_minus8 + 8 - 1)) - 1), 2, i, j);
  1062. }
  1063. } else {
  1064. for (j = 0; j < 2; j++) {
  1065. infer(delta_chroma_weight_l0[i][j], 0);
  1066. infer(chroma_offset_l0[i][j], 0);
  1067. }
  1068. }
  1069. }
  1070. if (current->slice_type == HEVC_SLICE_B) {
  1071. for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) {
  1072. if (1 /* RefPicList1[i] is not CurrPic, nor is it in a different layer */)
  1073. flags(luma_weight_l1_flag[i], 1, i);
  1074. else
  1075. infer(luma_weight_l1_flag[i], 0);
  1076. }
  1077. if (chroma) {
  1078. for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) {
  1079. if (1 /* RefPicList1[i] is not CurrPic, nor is it in a different layer */)
  1080. flags(chroma_weight_l1_flag[i], 1, i);
  1081. else
  1082. infer(chroma_weight_l1_flag[i], 0);
  1083. }
  1084. }
  1085. for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) {
  1086. if (current->luma_weight_l1_flag[i]) {
  1087. ses(delta_luma_weight_l1[i], -128, +127, 1, i);
  1088. ses(luma_offset_l1[i],
  1089. -(1 << (sps->bit_depth_luma_minus8 + 8 - 1)),
  1090. ((1 << (sps->bit_depth_luma_minus8 + 8 - 1)) - 1), 1, i);
  1091. } else {
  1092. infer(delta_luma_weight_l1[i], 0);
  1093. infer(luma_offset_l1[i], 0);
  1094. }
  1095. if (current->chroma_weight_l1_flag[i]) {
  1096. for (j = 0; j < 2; j++) {
  1097. ses(delta_chroma_weight_l1[i][j], -128, +127, 2, i, j);
  1098. ses(chroma_offset_l1[i][j],
  1099. -(4 << (sps->bit_depth_chroma_minus8 + 8 - 1)),
  1100. ((4 << (sps->bit_depth_chroma_minus8 + 8 - 1)) - 1), 2, i, j);
  1101. }
  1102. } else {
  1103. for (j = 0; j < 2; j++) {
  1104. infer(delta_chroma_weight_l1[i][j], 0);
  1105. infer(chroma_offset_l1[i][j], 0);
  1106. }
  1107. }
  1108. }
  1109. }
  1110. return 0;
  1111. }
  1112. static int FUNC(slice_segment_header)(CodedBitstreamContext *ctx, RWContext *rw,
  1113. H265RawSliceHeader *current)
  1114. {
  1115. CodedBitstreamH265Context *h265 = ctx->priv_data;
  1116. const H265RawSPS *sps;
  1117. const H265RawPPS *pps;
  1118. unsigned int min_cb_log2_size_y, ctb_log2_size_y, ctb_size_y;
  1119. unsigned int pic_width_in_ctbs_y, pic_height_in_ctbs_y, pic_size_in_ctbs_y;
  1120. unsigned int num_pic_total_curr = 0;
  1121. int err, i;
  1122. HEADER("Slice Segment Header");
  1123. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header, -1));
  1124. flag(first_slice_segment_in_pic_flag);
  1125. if (current->nal_unit_header.nal_unit_type >= HEVC_NAL_BLA_W_LP &&
  1126. current->nal_unit_header.nal_unit_type <= HEVC_NAL_RSV_IRAP_VCL23)
  1127. flag(no_output_of_prior_pics_flag);
  1128. ue(slice_pic_parameter_set_id, 0, 63);
  1129. pps = h265->pps[current->slice_pic_parameter_set_id];
  1130. if (!pps) {
  1131. av_log(ctx->log_ctx, AV_LOG_ERROR, "PPS id %d not available.\n",
  1132. current->slice_pic_parameter_set_id);
  1133. return AVERROR_INVALIDDATA;
  1134. }
  1135. h265->active_pps = pps;
  1136. sps = h265->sps[pps->pps_seq_parameter_set_id];
  1137. if (!sps) {
  1138. av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n",
  1139. pps->pps_seq_parameter_set_id);
  1140. return AVERROR_INVALIDDATA;
  1141. }
  1142. h265->active_sps = sps;
  1143. min_cb_log2_size_y = sps->log2_min_luma_coding_block_size_minus3 + 3;
  1144. ctb_log2_size_y = min_cb_log2_size_y + sps->log2_diff_max_min_luma_coding_block_size;
  1145. ctb_size_y = 1 << ctb_log2_size_y;
  1146. pic_width_in_ctbs_y =
  1147. (sps->pic_width_in_luma_samples + ctb_size_y - 1) / ctb_size_y;
  1148. pic_height_in_ctbs_y =
  1149. (sps->pic_height_in_luma_samples + ctb_size_y - 1) / ctb_size_y;
  1150. pic_size_in_ctbs_y = pic_width_in_ctbs_y * pic_height_in_ctbs_y;
  1151. if (!current->first_slice_segment_in_pic_flag) {
  1152. unsigned int address_size = av_log2(pic_size_in_ctbs_y - 1) + 1;
  1153. if (pps->dependent_slice_segments_enabled_flag)
  1154. flag(dependent_slice_segment_flag);
  1155. else
  1156. infer(dependent_slice_segment_flag, 0);
  1157. u(address_size, slice_segment_address, 0, pic_size_in_ctbs_y - 1);
  1158. } else {
  1159. infer(dependent_slice_segment_flag, 0);
  1160. }
  1161. if (!current->dependent_slice_segment_flag) {
  1162. for (i = 0; i < pps->num_extra_slice_header_bits; i++)
  1163. flags(slice_reserved_flag[i], 1, i);
  1164. ue(slice_type, 0, 2);
  1165. if (pps->output_flag_present_flag)
  1166. flag(pic_output_flag);
  1167. if (sps->separate_colour_plane_flag)
  1168. u(2, colour_plane_id, 0, 2);
  1169. if (current->nal_unit_header.nal_unit_type != HEVC_NAL_IDR_W_RADL &&
  1170. current->nal_unit_header.nal_unit_type != HEVC_NAL_IDR_N_LP) {
  1171. const H265RawSTRefPicSet *rps;
  1172. ub(sps->log2_max_pic_order_cnt_lsb_minus4 + 4, slice_pic_order_cnt_lsb);
  1173. flag(short_term_ref_pic_set_sps_flag);
  1174. if (!current->short_term_ref_pic_set_sps_flag) {
  1175. CHECK(FUNC(st_ref_pic_set)(ctx, rw, &current->short_term_ref_pic_set,
  1176. sps->num_short_term_ref_pic_sets, sps));
  1177. rps = &current->short_term_ref_pic_set;
  1178. } else if (sps->num_short_term_ref_pic_sets > 1) {
  1179. unsigned int idx_size = av_log2(sps->num_short_term_ref_pic_sets - 1) + 1;
  1180. u(idx_size, short_term_ref_pic_set_idx,
  1181. 0, sps->num_short_term_ref_pic_sets - 1);
  1182. rps = &sps->st_ref_pic_set[current->short_term_ref_pic_set_idx];
  1183. } else {
  1184. infer(short_term_ref_pic_set_idx, 0);
  1185. rps = &sps->st_ref_pic_set[0];
  1186. }
  1187. num_pic_total_curr = 0;
  1188. for (i = 0; i < rps->num_negative_pics; i++)
  1189. if (rps->used_by_curr_pic_s0_flag[i])
  1190. ++num_pic_total_curr;
  1191. for (i = 0; i < rps->num_positive_pics; i++)
  1192. if (rps->used_by_curr_pic_s1_flag[i])
  1193. ++num_pic_total_curr;
  1194. if (sps->long_term_ref_pics_present_flag) {
  1195. unsigned int idx_size;
  1196. if (sps->num_long_term_ref_pics_sps > 0) {
  1197. ue(num_long_term_sps, 0, sps->num_long_term_ref_pics_sps);
  1198. idx_size = av_log2(sps->num_long_term_ref_pics_sps - 1) + 1;
  1199. } else {
  1200. infer(num_long_term_sps, 0);
  1201. idx_size = 0;
  1202. }
  1203. ue(num_long_term_pics, 0, HEVC_MAX_REFS - current->num_long_term_sps);
  1204. for (i = 0; i < current->num_long_term_sps +
  1205. current->num_long_term_pics; i++) {
  1206. if (i < current->num_long_term_sps) {
  1207. if (sps->num_long_term_ref_pics_sps > 1)
  1208. us(idx_size, lt_idx_sps[i],
  1209. 0, sps->num_long_term_ref_pics_sps - 1, 1, i);
  1210. if (sps->used_by_curr_pic_lt_sps_flag[current->lt_idx_sps[i]])
  1211. ++num_pic_total_curr;
  1212. } else {
  1213. ubs(sps->log2_max_pic_order_cnt_lsb_minus4 + 4, poc_lsb_lt[i], 1, i);
  1214. flags(used_by_curr_pic_lt_flag[i], 1, i);
  1215. if (current->used_by_curr_pic_lt_flag[i])
  1216. ++num_pic_total_curr;
  1217. }
  1218. flags(delta_poc_msb_present_flag[i], 1, i);
  1219. if (current->delta_poc_msb_present_flag[i])
  1220. ues(delta_poc_msb_cycle_lt[i], 0, UINT32_MAX - 1, 1, i);
  1221. else
  1222. infer(delta_poc_msb_cycle_lt[i], 0);
  1223. }
  1224. }
  1225. if (sps->sps_temporal_mvp_enabled_flag)
  1226. flag(slice_temporal_mvp_enabled_flag);
  1227. else
  1228. infer(slice_temporal_mvp_enabled_flag, 0);
  1229. if (pps->pps_curr_pic_ref_enabled_flag)
  1230. ++num_pic_total_curr;
  1231. }
  1232. if (sps->sample_adaptive_offset_enabled_flag) {
  1233. flag(slice_sao_luma_flag);
  1234. if (!sps->separate_colour_plane_flag && sps->chroma_format_idc != 0)
  1235. flag(slice_sao_chroma_flag);
  1236. else
  1237. infer(slice_sao_chroma_flag, 0);
  1238. } else {
  1239. infer(slice_sao_luma_flag, 0);
  1240. infer(slice_sao_chroma_flag, 0);
  1241. }
  1242. if (current->slice_type == HEVC_SLICE_P ||
  1243. current->slice_type == HEVC_SLICE_B) {
  1244. flag(num_ref_idx_active_override_flag);
  1245. if (current->num_ref_idx_active_override_flag) {
  1246. ue(num_ref_idx_l0_active_minus1, 0, 14);
  1247. if (current->slice_type == HEVC_SLICE_B)
  1248. ue(num_ref_idx_l1_active_minus1, 0, 14);
  1249. else
  1250. infer(num_ref_idx_l1_active_minus1, pps->num_ref_idx_l1_default_active_minus1);
  1251. } else {
  1252. infer(num_ref_idx_l0_active_minus1, pps->num_ref_idx_l0_default_active_minus1);
  1253. infer(num_ref_idx_l1_active_minus1, pps->num_ref_idx_l1_default_active_minus1);
  1254. }
  1255. if (pps->lists_modification_present_flag && num_pic_total_curr > 1)
  1256. CHECK(FUNC(ref_pic_lists_modification)(ctx, rw, current,
  1257. num_pic_total_curr));
  1258. if (current->slice_type == HEVC_SLICE_B)
  1259. flag(mvd_l1_zero_flag);
  1260. if (pps->cabac_init_present_flag)
  1261. flag(cabac_init_flag);
  1262. else
  1263. infer(cabac_init_flag, 0);
  1264. if (current->slice_temporal_mvp_enabled_flag) {
  1265. if (current->slice_type == HEVC_SLICE_B)
  1266. flag(collocated_from_l0_flag);
  1267. else
  1268. infer(collocated_from_l0_flag, 1);
  1269. if (current->collocated_from_l0_flag) {
  1270. if (current->num_ref_idx_l0_active_minus1 > 0)
  1271. ue(collocated_ref_idx, 0, current->num_ref_idx_l0_active_minus1);
  1272. else
  1273. infer(collocated_ref_idx, 0);
  1274. } else {
  1275. if (current->num_ref_idx_l1_active_minus1 > 0)
  1276. ue(collocated_ref_idx, 0, current->num_ref_idx_l1_active_minus1);
  1277. else
  1278. infer(collocated_ref_idx, 0);
  1279. }
  1280. }
  1281. if ((pps->weighted_pred_flag && current->slice_type == HEVC_SLICE_P) ||
  1282. (pps->weighted_bipred_flag && current->slice_type == HEVC_SLICE_B))
  1283. CHECK(FUNC(pred_weight_table)(ctx, rw, current));
  1284. ue(five_minus_max_num_merge_cand, 0, 4);
  1285. if (sps->motion_vector_resolution_control_idc == 2)
  1286. flag(use_integer_mv_flag);
  1287. else
  1288. infer(use_integer_mv_flag, sps->motion_vector_resolution_control_idc);
  1289. }
  1290. se(slice_qp_delta,
  1291. - 6 * sps->bit_depth_luma_minus8 - (pps->init_qp_minus26 + 26),
  1292. + 51 - (pps->init_qp_minus26 + 26));
  1293. if (pps->pps_slice_chroma_qp_offsets_present_flag) {
  1294. se(slice_cb_qp_offset, -12, +12);
  1295. se(slice_cr_qp_offset, -12, +12);
  1296. } else {
  1297. infer(slice_cb_qp_offset, 0);
  1298. infer(slice_cr_qp_offset, 0);
  1299. }
  1300. if (pps->pps_slice_act_qp_offsets_present_flag) {
  1301. se(slice_act_y_qp_offset,
  1302. -12 - (pps->pps_act_y_qp_offset_plus5 - 5),
  1303. +12 - (pps->pps_act_y_qp_offset_plus5 - 5));
  1304. se(slice_act_cb_qp_offset,
  1305. -12 - (pps->pps_act_cb_qp_offset_plus5 - 5),
  1306. +12 - (pps->pps_act_cb_qp_offset_plus5 - 5));
  1307. se(slice_act_cr_qp_offset,
  1308. -12 - (pps->pps_act_cr_qp_offset_plus3 - 3),
  1309. +12 - (pps->pps_act_cr_qp_offset_plus3 - 3));
  1310. } else {
  1311. infer(slice_act_y_qp_offset, 0);
  1312. infer(slice_act_cb_qp_offset, 0);
  1313. infer(slice_act_cr_qp_offset, 0);
  1314. }
  1315. if (pps->chroma_qp_offset_list_enabled_flag)
  1316. flag(cu_chroma_qp_offset_enabled_flag);
  1317. else
  1318. infer(cu_chroma_qp_offset_enabled_flag, 0);
  1319. if (pps->deblocking_filter_override_enabled_flag)
  1320. flag(deblocking_filter_override_flag);
  1321. else
  1322. infer(deblocking_filter_override_flag, 0);
  1323. if (current->deblocking_filter_override_flag) {
  1324. flag(slice_deblocking_filter_disabled_flag);
  1325. if (!current->slice_deblocking_filter_disabled_flag) {
  1326. se(slice_beta_offset_div2, -6, +6);
  1327. se(slice_tc_offset_div2, -6, +6);
  1328. } else {
  1329. infer(slice_beta_offset_div2, pps->pps_beta_offset_div2);
  1330. infer(slice_tc_offset_div2, pps->pps_tc_offset_div2);
  1331. }
  1332. } else {
  1333. infer(slice_deblocking_filter_disabled_flag,
  1334. pps->pps_deblocking_filter_disabled_flag);
  1335. infer(slice_beta_offset_div2, pps->pps_beta_offset_div2);
  1336. infer(slice_tc_offset_div2, pps->pps_tc_offset_div2);
  1337. }
  1338. if (pps->pps_loop_filter_across_slices_enabled_flag &&
  1339. (current->slice_sao_luma_flag || current->slice_sao_chroma_flag ||
  1340. !current->slice_deblocking_filter_disabled_flag))
  1341. flag(slice_loop_filter_across_slices_enabled_flag);
  1342. else
  1343. infer(slice_loop_filter_across_slices_enabled_flag,
  1344. pps->pps_loop_filter_across_slices_enabled_flag);
  1345. }
  1346. if (pps->tiles_enabled_flag || pps->entropy_coding_sync_enabled_flag) {
  1347. unsigned int num_entry_point_offsets_limit;
  1348. if (!pps->tiles_enabled_flag && pps->entropy_coding_sync_enabled_flag)
  1349. num_entry_point_offsets_limit = pic_height_in_ctbs_y - 1;
  1350. else if (pps->tiles_enabled_flag && !pps->entropy_coding_sync_enabled_flag)
  1351. num_entry_point_offsets_limit =
  1352. (pps->num_tile_columns_minus1 + 1) * (pps->num_tile_rows_minus1 + 1);
  1353. else
  1354. num_entry_point_offsets_limit =
  1355. (pps->num_tile_columns_minus1 + 1) * pic_height_in_ctbs_y - 1;
  1356. ue(num_entry_point_offsets, 0, num_entry_point_offsets_limit);
  1357. if (current->num_entry_point_offsets > HEVC_MAX_ENTRY_POINT_OFFSETS) {
  1358. av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many entry points: "
  1359. "%"PRIu16".\n", current->num_entry_point_offsets);
  1360. return AVERROR_PATCHWELCOME;
  1361. }
  1362. if (current->num_entry_point_offsets > 0) {
  1363. ue(offset_len_minus1, 0, 31);
  1364. for (i = 0; i < current->num_entry_point_offsets; i++)
  1365. ubs(current->offset_len_minus1 + 1, entry_point_offset_minus1[i], 1, i);
  1366. }
  1367. }
  1368. if (pps->slice_segment_header_extension_present_flag) {
  1369. ue(slice_segment_header_extension_length, 0, 256);
  1370. for (i = 0; i < current->slice_segment_header_extension_length; i++)
  1371. us(8, slice_segment_header_extension_data_byte[i], 0x00, 0xff, 1, i);
  1372. }
  1373. CHECK(FUNC(byte_alignment)(ctx, rw));
  1374. return 0;
  1375. }
  1376. static int FUNC(sei_buffering_period)(CodedBitstreamContext *ctx, RWContext *rw,
  1377. H265RawSEIBufferingPeriod *current,
  1378. uint32_t *payload_size,
  1379. int *more_data)
  1380. {
  1381. CodedBitstreamH265Context *h265 = ctx->priv_data;
  1382. const H265RawSPS *sps;
  1383. const H265RawHRDParameters *hrd;
  1384. int err, i, length;
  1385. #ifdef READ
  1386. int start_pos, end_pos;
  1387. start_pos = get_bits_count(rw);
  1388. #endif
  1389. HEADER("Buffering Period");
  1390. ue(bp_seq_parameter_set_id, 0, HEVC_MAX_SPS_COUNT - 1);
  1391. sps = h265->sps[current->bp_seq_parameter_set_id];
  1392. if (!sps) {
  1393. av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n",
  1394. current->bp_seq_parameter_set_id);
  1395. return AVERROR_INVALIDDATA;
  1396. }
  1397. h265->active_sps = sps;
  1398. if (!sps->vui_parameters_present_flag ||
  1399. !sps->vui.vui_hrd_parameters_present_flag) {
  1400. av_log(ctx->log_ctx, AV_LOG_ERROR, "Buffering period SEI requires "
  1401. "HRD parameters to be present in SPS.\n");
  1402. return AVERROR_INVALIDDATA;
  1403. }
  1404. hrd = &sps->vui.hrd_parameters;
  1405. if (!hrd->nal_hrd_parameters_present_flag &&
  1406. !hrd->vcl_hrd_parameters_present_flag) {
  1407. av_log(ctx->log_ctx, AV_LOG_ERROR, "Buffering period SEI requires "
  1408. "NAL or VCL HRD parameters to be present.\n");
  1409. return AVERROR_INVALIDDATA;
  1410. }
  1411. if (!hrd->sub_pic_hrd_params_present_flag)
  1412. flag(irap_cpb_params_present_flag);
  1413. else
  1414. infer(irap_cpb_params_present_flag, 0);
  1415. if (current->irap_cpb_params_present_flag) {
  1416. length = hrd->au_cpb_removal_delay_length_minus1 + 1;
  1417. ub(length, cpb_delay_offset);
  1418. length = hrd->dpb_output_delay_length_minus1 + 1;
  1419. ub(length, dpb_delay_offset);
  1420. } else {
  1421. infer(cpb_delay_offset, 0);
  1422. infer(dpb_delay_offset, 0);
  1423. }
  1424. flag(concatenation_flag);
  1425. length = hrd->au_cpb_removal_delay_length_minus1 + 1;
  1426. ub(length, au_cpb_removal_delay_delta_minus1);
  1427. if (hrd->nal_hrd_parameters_present_flag) {
  1428. for (i = 0; i <= hrd->cpb_cnt_minus1[0]; i++) {
  1429. length = hrd->initial_cpb_removal_delay_length_minus1 + 1;
  1430. ubs(length, nal_initial_cpb_removal_delay[i], 1, i);
  1431. ubs(length, nal_initial_cpb_removal_offset[i], 1, i);
  1432. if (hrd->sub_pic_hrd_params_present_flag ||
  1433. current->irap_cpb_params_present_flag) {
  1434. ubs(length, nal_initial_alt_cpb_removal_delay[i], 1, i);
  1435. ubs(length, nal_initial_alt_cpb_removal_offset[i], 1, i);
  1436. }
  1437. }
  1438. }
  1439. if (hrd->vcl_hrd_parameters_present_flag) {
  1440. for (i = 0; i <= hrd->cpb_cnt_minus1[0]; i++) {
  1441. length = hrd->initial_cpb_removal_delay_length_minus1 + 1;
  1442. ubs(length, vcl_initial_cpb_removal_delay[i], 1, i);
  1443. ubs(length, vcl_initial_cpb_removal_offset[i], 1, i);
  1444. if (hrd->sub_pic_hrd_params_present_flag ||
  1445. current->irap_cpb_params_present_flag) {
  1446. ubs(length, vcl_initial_alt_cpb_removal_delay[i], 1, i);
  1447. ubs(length, vcl_initial_alt_cpb_removal_offset[i], 1, i);
  1448. }
  1449. }
  1450. }
  1451. #ifdef READ
  1452. end_pos = get_bits_count(rw);
  1453. if (cbs_h265_payload_extension_present(rw, *payload_size,
  1454. end_pos - start_pos))
  1455. flag(use_alt_cpb_params_flag);
  1456. else
  1457. infer(use_alt_cpb_params_flag, 0);
  1458. #else
  1459. // If unknown extension data exists, then use_alt_cpb_params_flag is
  1460. // coded in the bitstream and must be written even if it's 0.
  1461. if (current->use_alt_cpb_params_flag || *more_data) {
  1462. flag(use_alt_cpb_params_flag);
  1463. // Ensure this bit is not the last in the payload by making the
  1464. // more_data_in_payload() check evaluate to true, so it may not
  1465. // be mistaken as something else by decoders.
  1466. *more_data = 1;
  1467. }
  1468. #endif
  1469. return 0;
  1470. }
  1471. static int FUNC(sei_pic_timing)(CodedBitstreamContext *ctx, RWContext *rw,
  1472. H265RawSEIPicTiming *current)
  1473. {
  1474. CodedBitstreamH265Context *h265 = ctx->priv_data;
  1475. const H265RawSPS *sps;
  1476. const H265RawHRDParameters *hrd;
  1477. int err, expected_source_scan_type, i, length;
  1478. HEADER("Picture Timing");
  1479. sps = h265->active_sps;
  1480. if (!sps) {
  1481. av_log(ctx->log_ctx, AV_LOG_ERROR,
  1482. "No active SPS for pic_timing.\n");
  1483. return AVERROR_INVALIDDATA;
  1484. }
  1485. expected_source_scan_type = 2 -
  1486. 2 * sps->profile_tier_level.general_interlaced_source_flag -
  1487. sps->profile_tier_level.general_progressive_source_flag;
  1488. if (sps->vui.frame_field_info_present_flag) {
  1489. u(4, pic_struct, 0, 12);
  1490. u(2, source_scan_type,
  1491. expected_source_scan_type >= 0 ? expected_source_scan_type : 0,
  1492. expected_source_scan_type >= 0 ? expected_source_scan_type : 2);
  1493. flag(duplicate_flag);
  1494. } else {
  1495. infer(pic_struct, 0);
  1496. infer(source_scan_type,
  1497. expected_source_scan_type >= 0 ? expected_source_scan_type : 2);
  1498. infer(duplicate_flag, 0);
  1499. }
  1500. if (sps->vui_parameters_present_flag &&
  1501. sps->vui.vui_hrd_parameters_present_flag)
  1502. hrd = &sps->vui.hrd_parameters;
  1503. else
  1504. hrd = NULL;
  1505. if (hrd && (hrd->nal_hrd_parameters_present_flag ||
  1506. hrd->vcl_hrd_parameters_present_flag)) {
  1507. length = hrd->au_cpb_removal_delay_length_minus1 + 1;
  1508. ub(length, au_cpb_removal_delay_minus1);
  1509. length = hrd->dpb_output_delay_length_minus1 + 1;
  1510. ub(length, pic_dpb_output_delay);
  1511. if (hrd->sub_pic_hrd_params_present_flag) {
  1512. length = hrd->dpb_output_delay_du_length_minus1 + 1;
  1513. ub(length, pic_dpb_output_du_delay);
  1514. }
  1515. if (hrd->sub_pic_hrd_params_present_flag &&
  1516. hrd->sub_pic_cpb_params_in_pic_timing_sei_flag) {
  1517. // Each decoding unit must contain at least one slice segment.
  1518. ue(num_decoding_units_minus1, 0, HEVC_MAX_SLICE_SEGMENTS);
  1519. flag(du_common_cpb_removal_delay_flag);
  1520. length = hrd->du_cpb_removal_delay_increment_length_minus1 + 1;
  1521. if (current->du_common_cpb_removal_delay_flag)
  1522. ub(length, du_common_cpb_removal_delay_increment_minus1);
  1523. for (i = 0; i <= current->num_decoding_units_minus1; i++) {
  1524. ues(num_nalus_in_du_minus1[i],
  1525. 0, HEVC_MAX_SLICE_SEGMENTS, 1, i);
  1526. if (!current->du_common_cpb_removal_delay_flag &&
  1527. i < current->num_decoding_units_minus1)
  1528. ubs(length, du_cpb_removal_delay_increment_minus1[i], 1, i);
  1529. }
  1530. }
  1531. }
  1532. return 0;
  1533. }
  1534. static int FUNC(sei_pan_scan_rect)(CodedBitstreamContext *ctx, RWContext *rw,
  1535. H265RawSEIPanScanRect *current)
  1536. {
  1537. int err, i;
  1538. HEADER("Pan-Scan Rectangle");
  1539. ue(pan_scan_rect_id, 0, UINT32_MAX - 1);
  1540. flag(pan_scan_rect_cancel_flag);
  1541. if (!current->pan_scan_rect_cancel_flag) {
  1542. ue(pan_scan_cnt_minus1, 0, 2);
  1543. for (i = 0; i <= current->pan_scan_cnt_minus1; i++) {
  1544. ses(pan_scan_rect_left_offset[i], INT32_MIN + 1, INT32_MAX, 1, i);
  1545. ses(pan_scan_rect_right_offset[i], INT32_MIN + 1, INT32_MAX, 1, i);
  1546. ses(pan_scan_rect_top_offset[i], INT32_MIN + 1, INT32_MAX, 1, i);
  1547. ses(pan_scan_rect_bottom_offset[i], INT32_MIN + 1, INT32_MAX, 1, i);
  1548. }
  1549. flag(pan_scan_rect_persistence_flag);
  1550. }
  1551. return 0;
  1552. }
  1553. static int FUNC(sei_user_data_registered)(CodedBitstreamContext *ctx, RWContext *rw,
  1554. H265RawSEIUserDataRegistered *current,
  1555. uint32_t *payload_size)
  1556. {
  1557. int err, i, j;
  1558. HEADER("User Data Registered ITU-T T.35");
  1559. u(8, itu_t_t35_country_code, 0x00, 0xff);
  1560. if (current->itu_t_t35_country_code != 0xff)
  1561. i = 1;
  1562. else {
  1563. u(8, itu_t_t35_country_code_extension_byte, 0x00, 0xff);
  1564. i = 2;
  1565. }
  1566. #ifdef READ
  1567. if (*payload_size < i) {
  1568. av_log(ctx->log_ctx, AV_LOG_ERROR,
  1569. "Invalid SEI user data registered payload.\n");
  1570. return AVERROR_INVALIDDATA;
  1571. }
  1572. current->data_length = *payload_size - i;
  1573. #else
  1574. *payload_size = i + current->data_length;
  1575. #endif
  1576. allocate(current->data, current->data_length);
  1577. for (j = 0; j < current->data_length; j++)
  1578. xu(8, itu_t_t35_payload_byte[i], current->data[j], 0x00, 0xff, 1, i + j);
  1579. return 0;
  1580. }
  1581. static int FUNC(sei_user_data_unregistered)(CodedBitstreamContext *ctx, RWContext *rw,
  1582. H265RawSEIUserDataUnregistered *current,
  1583. uint32_t *payload_size)
  1584. {
  1585. int err, i;
  1586. HEADER("User Data Unregistered");
  1587. #ifdef READ
  1588. if (*payload_size < 16) {
  1589. av_log(ctx->log_ctx, AV_LOG_ERROR,
  1590. "Invalid SEI user data unregistered payload.\n");
  1591. return AVERROR_INVALIDDATA;
  1592. }
  1593. current->data_length = *payload_size - 16;
  1594. #else
  1595. *payload_size = 16 + current->data_length;
  1596. #endif
  1597. for (i = 0; i < 16; i++)
  1598. us(8, uuid_iso_iec_11578[i], 0x00, 0xff, 1, i);
  1599. allocate(current->data, current->data_length);
  1600. for (i = 0; i < current->data_length; i++)
  1601. xu(8, user_data_payload_byte[i], current->data[i], 0x00, 0xff, 1, i);
  1602. return 0;
  1603. }
  1604. static int FUNC(sei_recovery_point)(CodedBitstreamContext *ctx, RWContext *rw,
  1605. H265RawSEIRecoveryPoint *current)
  1606. {
  1607. int err;
  1608. HEADER("Recovery Point");
  1609. se(recovery_poc_cnt, -32768, 32767);
  1610. flag(exact_match_flag);
  1611. flag(broken_link_flag);
  1612. return 0;
  1613. }
  1614. static int FUNC(sei_display_orientation)(CodedBitstreamContext *ctx, RWContext *rw,
  1615. H265RawSEIDisplayOrientation *current)
  1616. {
  1617. int err;
  1618. HEADER("Display Orientation");
  1619. flag(display_orientation_cancel_flag);
  1620. if (!current->display_orientation_cancel_flag) {
  1621. flag(hor_flip);
  1622. flag(ver_flip);
  1623. ub(16, anticlockwise_rotation);
  1624. flag(display_orientation_persistence_flag);
  1625. }
  1626. return 0;
  1627. }
  1628. static int FUNC(sei_active_parameter_sets)(CodedBitstreamContext *ctx, RWContext *rw,
  1629. H265RawSEIActiveParameterSets *current)
  1630. {
  1631. CodedBitstreamH265Context *h265 = ctx->priv_data;
  1632. const H265RawVPS *vps;
  1633. int err, i;
  1634. HEADER("Active Parameter Sets");
  1635. u(4, active_video_parameter_set_id, 0, HEVC_MAX_VPS_COUNT);
  1636. vps = h265->vps[current->active_video_parameter_set_id];
  1637. if (!vps) {
  1638. av_log(ctx->log_ctx, AV_LOG_ERROR, "VPS id %d not available for active "
  1639. "parameter sets.\n", current->active_video_parameter_set_id);
  1640. return AVERROR_INVALIDDATA;
  1641. }
  1642. h265->active_vps = vps;
  1643. flag(self_contained_cvs_flag);
  1644. flag(no_parameter_set_update_flag);
  1645. ue(num_sps_ids_minus1, 0, HEVC_MAX_SPS_COUNT - 1);
  1646. for (i = 0; i <= current->num_sps_ids_minus1; i++)
  1647. ues(active_seq_parameter_set_id[i], 0, HEVC_MAX_SPS_COUNT - 1, 1, i);
  1648. for (i = vps->vps_base_layer_internal_flag;
  1649. i <= FFMIN(62, vps->vps_max_layers_minus1); i++) {
  1650. ues(layer_sps_idx[i], 0, current->num_sps_ids_minus1, 1, i);
  1651. if (i == 0)
  1652. h265->active_sps = h265->sps[current->active_seq_parameter_set_id[current->layer_sps_idx[0]]];
  1653. }
  1654. return 0;
  1655. }
  1656. static int FUNC(sei_decoded_picture_hash)(CodedBitstreamContext *ctx, RWContext *rw,
  1657. H265RawSEIDecodedPictureHash *current)
  1658. {
  1659. CodedBitstreamH265Context *h265 = ctx->priv_data;
  1660. const H265RawSPS *sps = h265->active_sps;
  1661. int err, c, i;
  1662. HEADER("Decoded Picture Hash");
  1663. if (!sps) {
  1664. av_log(ctx->log_ctx, AV_LOG_ERROR,
  1665. "No active SPS for decoded picture hash.\n");
  1666. return AVERROR_INVALIDDATA;
  1667. }
  1668. u(8, hash_type, 0, 2);
  1669. for (c = 0; c < (sps->chroma_format_idc == 0 ? 1 : 3); c++) {
  1670. if (current->hash_type == 0) {
  1671. for (i = 0; i < 16; i++)
  1672. us(8, picture_md5[c][i], 0x00, 0xff, 2, c, i);
  1673. } else if (current->hash_type == 1) {
  1674. us(16, picture_crc[c], 0x0000, 0xffff, 1, c);
  1675. } else if (current->hash_type == 2) {
  1676. us(32, picture_checksum[c], 0x00000000, 0xffffffff, 1, c);
  1677. }
  1678. }
  1679. return 0;
  1680. }
  1681. static int FUNC(sei_time_code)(CodedBitstreamContext *ctx, RWContext *rw,
  1682. H265RawSEITimeCode *current)
  1683. {
  1684. int err, i;
  1685. HEADER("Time Code");
  1686. u(2, num_clock_ts, 1, 3);
  1687. for (i = 0; i < current->num_clock_ts; i++) {
  1688. flags(clock_timestamp_flag[i], 1, i);
  1689. if (current->clock_timestamp_flag[i]) {
  1690. flags(units_field_based_flag[i], 1, i);
  1691. us(5, counting_type[i], 0, 6, 1, i);
  1692. flags(full_timestamp_flag[i], 1, i);
  1693. flags(discontinuity_flag[i], 1, i);
  1694. flags(cnt_dropped_flag[i], 1, i);
  1695. ubs(9, n_frames[i], 1, i);
  1696. if (current->full_timestamp_flag[i]) {
  1697. us(6, seconds_value[i], 0, 59, 1, i);
  1698. us(6, minutes_value[i], 0, 59, 1, i);
  1699. us(5, hours_value[i], 0, 23, 1, i);
  1700. } else {
  1701. flags(seconds_flag[i], 1, i);
  1702. if (current->seconds_flag[i]) {
  1703. us(6, seconds_value[i], 0, 59, 1, i);
  1704. flags(minutes_flag[i], 1, i);
  1705. if (current->minutes_flag[i]) {
  1706. us(6, minutes_value[i], 0, 59, 1, i);
  1707. flags(hours_flag[i], 1, i);
  1708. if (current->hours_flag[i])
  1709. us(5, hours_value[i], 0, 23, 1, i);
  1710. }
  1711. }
  1712. }
  1713. ubs(5, time_offset_length[i], 1, i);
  1714. if (current->time_offset_length[i] > 0)
  1715. ibs(current->time_offset_length[i], time_offset_value[i], 1, i);
  1716. else
  1717. infer(time_offset_value[i], 0);
  1718. }
  1719. }
  1720. return 0;
  1721. }
  1722. static int FUNC(sei_mastering_display)(CodedBitstreamContext *ctx, RWContext *rw,
  1723. H265RawSEIMasteringDisplayColourVolume *current)
  1724. {
  1725. int err, c;
  1726. HEADER("Mastering Display Colour Volume");
  1727. for (c = 0; c < 3; c++) {
  1728. us(16, display_primaries_x[c], 0, 50000, 1, c);
  1729. us(16, display_primaries_y[c], 0, 50000, 1, c);
  1730. }
  1731. u(16, white_point_x, 0, 50000);
  1732. u(16, white_point_y, 0, 50000);
  1733. u(32, max_display_mastering_luminance,
  1734. 1, MAX_UINT_BITS(32));
  1735. u(32, min_display_mastering_luminance,
  1736. 0, current->max_display_mastering_luminance - 1);
  1737. return 0;
  1738. }
  1739. static int FUNC(sei_content_light_level)(CodedBitstreamContext *ctx, RWContext *rw,
  1740. H265RawSEIContentLightLevelInfo *current)
  1741. {
  1742. int err;
  1743. HEADER("Content Light Level");
  1744. ub(16, max_content_light_level);
  1745. ub(16, max_pic_average_light_level);
  1746. return 0;
  1747. }
  1748. static int FUNC(sei_alternative_transfer_characteristics)(CodedBitstreamContext *ctx,
  1749. RWContext *rw,
  1750. H265RawSEIAlternativeTransferCharacteristics *current)
  1751. {
  1752. int err;
  1753. HEADER("Alternative Transfer Characteristics");
  1754. ub(8, preferred_transfer_characteristics);
  1755. return 0;
  1756. }
  1757. static int FUNC(sei_alpha_channel_info)(CodedBitstreamContext *ctx,
  1758. RWContext *rw,
  1759. H265RawSEIAlphaChannelInfo *current)
  1760. {
  1761. int err, length;
  1762. HEADER("Alpha Channel Information");
  1763. flag(alpha_channel_cancel_flag);
  1764. if (!current->alpha_channel_cancel_flag) {
  1765. ub(3, alpha_channel_use_idc);
  1766. ub(3, alpha_channel_bit_depth_minus8);
  1767. length = current->alpha_channel_bit_depth_minus8 + 9;
  1768. ub(length, alpha_transparent_value);
  1769. ub(length, alpha_opaque_value);
  1770. flag(alpha_channel_incr_flag);
  1771. flag(alpha_channel_clip_flag);
  1772. if (current->alpha_channel_clip_flag)
  1773. flag(alpha_channel_clip_type_flag);
  1774. } else {
  1775. infer(alpha_channel_use_idc, 2);
  1776. infer(alpha_channel_incr_flag, 0);
  1777. infer(alpha_channel_clip_flag, 0);
  1778. }
  1779. return 0;
  1780. }
  1781. static int FUNC(payload_extension)(CodedBitstreamContext *ctx, RWContext *rw,
  1782. H265RawExtensionData *current, uint32_t payload_size,
  1783. int cur_pos)
  1784. {
  1785. int err;
  1786. size_t byte_length, k;
  1787. #ifdef READ
  1788. GetBitContext tmp;
  1789. int bits_left, payload_zero_bits;
  1790. if (!cbs_h265_payload_extension_present(rw, payload_size, cur_pos))
  1791. return 0;
  1792. bits_left = 8 * payload_size - cur_pos;
  1793. tmp = *rw;
  1794. if (bits_left > 8)
  1795. skip_bits_long(&tmp, bits_left - 8);
  1796. payload_zero_bits = get_bits(&tmp, FFMIN(bits_left, 8));
  1797. if (!payload_zero_bits)
  1798. return AVERROR_INVALIDDATA;
  1799. payload_zero_bits = ff_ctz(payload_zero_bits);
  1800. current->bit_length = bits_left - payload_zero_bits - 1;
  1801. allocate(current->data, (current->bit_length + 7) / 8);
  1802. #endif
  1803. byte_length = (current->bit_length + 7) / 8;
  1804. for (k = 0; k < byte_length; k++) {
  1805. int length = FFMIN(current->bit_length - k * 8, 8);
  1806. xu(length, reserved_payload_extension_data, current->data[k],
  1807. 0, MAX_UINT_BITS(length), 0);
  1808. }
  1809. return 0;
  1810. }
  1811. static int FUNC(sei_payload)(CodedBitstreamContext *ctx, RWContext *rw,
  1812. H265RawSEIPayload *current, int prefix)
  1813. {
  1814. int err, i;
  1815. int start_position, current_position;
  1816. int more_data = !!current->extension_data.bit_length;
  1817. #ifdef READ
  1818. start_position = get_bits_count(rw);
  1819. #else
  1820. start_position = put_bits_count(rw);
  1821. #endif
  1822. switch (current->payload_type) {
  1823. #define SEI_TYPE_CHECK_VALID(name, prefix_valid, suffix_valid) do { \
  1824. if (prefix && !prefix_valid) { \
  1825. av_log(ctx->log_ctx, AV_LOG_ERROR, "SEI type %s invalid " \
  1826. "as prefix SEI!\n", #name); \
  1827. return AVERROR_INVALIDDATA; \
  1828. } \
  1829. if (!prefix && !suffix_valid) { \
  1830. av_log(ctx->log_ctx, AV_LOG_ERROR, "SEI type %s invalid " \
  1831. "as suffix SEI!\n", #name); \
  1832. return AVERROR_INVALIDDATA; \
  1833. } \
  1834. } while (0)
  1835. #define SEI_TYPE_N(type, prefix_valid, suffix_valid, name) \
  1836. case HEVC_SEI_TYPE_ ## type: \
  1837. SEI_TYPE_CHECK_VALID(name, prefix_valid, suffix_valid); \
  1838. CHECK(FUNC(sei_ ## name)(ctx, rw, &current->payload.name)); \
  1839. break
  1840. #define SEI_TYPE_S(type, prefix_valid, suffix_valid, name) \
  1841. case HEVC_SEI_TYPE_ ## type: \
  1842. SEI_TYPE_CHECK_VALID(name, prefix_valid, suffix_valid); \
  1843. CHECK(FUNC(sei_ ## name)(ctx, rw, &current->payload.name, \
  1844. &current->payload_size)); \
  1845. break
  1846. #define SEI_TYPE_E(type, prefix_valid, suffix_valid, name) \
  1847. case HEVC_SEI_TYPE_ ## type: \
  1848. SEI_TYPE_CHECK_VALID(name, prefix_valid, suffix_valid); \
  1849. CHECK(FUNC(sei_ ## name)(ctx, rw, &current->payload.name, \
  1850. &current->payload_size, \
  1851. &more_data)); \
  1852. break
  1853. SEI_TYPE_E(BUFFERING_PERIOD, 1, 0, buffering_period);
  1854. SEI_TYPE_N(PICTURE_TIMING, 1, 0, pic_timing);
  1855. SEI_TYPE_N(PAN_SCAN_RECT, 1, 0, pan_scan_rect);
  1856. SEI_TYPE_S(USER_DATA_REGISTERED_ITU_T_T35,
  1857. 1, 1, user_data_registered);
  1858. SEI_TYPE_S(USER_DATA_UNREGISTERED, 1, 1, user_data_unregistered);
  1859. SEI_TYPE_N(RECOVERY_POINT, 1, 0, recovery_point);
  1860. SEI_TYPE_N(DISPLAY_ORIENTATION, 1, 0, display_orientation);
  1861. SEI_TYPE_N(ACTIVE_PARAMETER_SETS, 1, 0, active_parameter_sets);
  1862. SEI_TYPE_N(DECODED_PICTURE_HASH, 0, 1, decoded_picture_hash);
  1863. SEI_TYPE_N(TIME_CODE, 1, 0, time_code);
  1864. SEI_TYPE_N(MASTERING_DISPLAY_INFO, 1, 0, mastering_display);
  1865. SEI_TYPE_N(CONTENT_LIGHT_LEVEL_INFO, 1, 0, content_light_level);
  1866. SEI_TYPE_N(ALTERNATIVE_TRANSFER_CHARACTERISTICS,
  1867. 1, 0, alternative_transfer_characteristics);
  1868. SEI_TYPE_N(ALPHA_CHANNEL_INFO, 1, 0, alpha_channel_info);
  1869. #undef SEI_TYPE
  1870. default:
  1871. {
  1872. #ifdef READ
  1873. current->payload.other.data_length = current->payload_size;
  1874. #endif
  1875. allocate(current->payload.other.data, current->payload.other.data_length);
  1876. for (i = 0; i < current->payload_size; i++)
  1877. xu(8, payload_byte[i], current->payload.other.data[i], 0, 255,
  1878. 1, i);
  1879. }
  1880. }
  1881. // more_data_in_payload()
  1882. #ifdef READ
  1883. current_position = get_bits_count(rw) - start_position;
  1884. if (current_position < 8 * current->payload_size) {
  1885. #else
  1886. current_position = put_bits_count(rw) - start_position;
  1887. if (byte_alignment(rw) || more_data) {
  1888. #endif
  1889. CHECK(FUNC(payload_extension)(ctx, rw, &current->extension_data,
  1890. current->payload_size, current_position));
  1891. fixed(1, bit_equal_to_one, 1);
  1892. while (byte_alignment(rw))
  1893. fixed(1, bit_equal_to_zero, 0);
  1894. }
  1895. #ifdef WRITE
  1896. current->payload_size = (put_bits_count(rw) - start_position) >> 3;
  1897. #endif
  1898. return 0;
  1899. }
  1900. static int FUNC(sei)(CodedBitstreamContext *ctx, RWContext *rw,
  1901. H265RawSEI *current, int prefix)
  1902. {
  1903. int err, k;
  1904. if (prefix)
  1905. HEADER("Prefix Supplemental Enhancement Information");
  1906. else
  1907. HEADER("Suffix Supplemental Enhancement Information");
  1908. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  1909. prefix ? HEVC_NAL_SEI_PREFIX
  1910. : HEVC_NAL_SEI_SUFFIX));
  1911. #ifdef READ
  1912. for (k = 0; k < H265_MAX_SEI_PAYLOADS; k++) {
  1913. uint32_t payload_type = 0;
  1914. uint32_t payload_size = 0;
  1915. uint32_t tmp;
  1916. while (show_bits(rw, 8) == 0xff) {
  1917. fixed(8, ff_byte, 0xff);
  1918. payload_type += 255;
  1919. }
  1920. xu(8, last_payload_type_byte, tmp, 0, 254, 0);
  1921. payload_type += tmp;
  1922. while (show_bits(rw, 8) == 0xff) {
  1923. fixed(8, ff_byte, 0xff);
  1924. payload_size += 255;
  1925. }
  1926. xu(8, last_payload_size_byte, tmp, 0, 254, 0);
  1927. payload_size += tmp;
  1928. current->payload[k].payload_type = payload_type;
  1929. current->payload[k].payload_size = payload_size;
  1930. current->payload_count++;
  1931. CHECK(FUNC(sei_payload)(ctx, rw, &current->payload[k], prefix));
  1932. if (!cbs_h2645_read_more_rbsp_data(rw))
  1933. break;
  1934. }
  1935. if (k >= H265_MAX_SEI_PAYLOADS) {
  1936. av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many payloads in "
  1937. "SEI message: found %d.\n", k);
  1938. return AVERROR_INVALIDDATA;
  1939. }
  1940. #else
  1941. for (k = 0; k < current->payload_count; k++) {
  1942. PutBitContext start_state;
  1943. uint32_t tmp;
  1944. int need_size, i;
  1945. // Somewhat clumsy: we write the payload twice when
  1946. // we don't know the size in advance. This will mess
  1947. // with trace output, but is otherwise harmless.
  1948. start_state = *rw;
  1949. need_size = !current->payload[k].payload_size;
  1950. for (i = 0; i < 1 + need_size; i++) {
  1951. *rw = start_state;
  1952. tmp = current->payload[k].payload_type;
  1953. while (tmp >= 255) {
  1954. fixed(8, ff_byte, 0xff);
  1955. tmp -= 255;
  1956. }
  1957. xu(8, last_payload_type_byte, tmp, 0, 254, 0);
  1958. tmp = current->payload[k].payload_size;
  1959. while (tmp >= 255) {
  1960. fixed(8, ff_byte, 0xff);
  1961. tmp -= 255;
  1962. }
  1963. xu(8, last_payload_size_byte, tmp, 0, 254, 0);
  1964. CHECK(FUNC(sei_payload)(ctx, rw, &current->payload[k], prefix));
  1965. }
  1966. }
  1967. #endif
  1968. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  1969. return 0;
  1970. }