You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1389 lines
46KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. static int FUNC(rbsp_trailing_bits)(CodedBitstreamContext *ctx, RWContext *rw)
  19. {
  20. int err;
  21. fixed(1, rbsp_stop_one_bit, 1);
  22. while (byte_alignment(rw) != 0)
  23. fixed(1, rbsp_alignment_zero_bit, 0);
  24. return 0;
  25. }
  26. static int FUNC(nal_unit_header)(CodedBitstreamContext *ctx, RWContext *rw,
  27. H264RawNALUnitHeader *current,
  28. uint32_t valid_type_mask)
  29. {
  30. int err;
  31. u(1, forbidden_zero_bit, 0, 0);
  32. u(2, nal_ref_idc, 0, 3);
  33. u(5, nal_unit_type, 0, 31);
  34. if (!(1 << current->nal_unit_type & valid_type_mask)) {
  35. av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid NAL unit type %d.\n",
  36. current->nal_unit_type);
  37. return AVERROR_INVALIDDATA;
  38. }
  39. if (current->nal_unit_type == 14 ||
  40. current->nal_unit_type == 20 ||
  41. current->nal_unit_type == 21) {
  42. if (current->nal_unit_type != 21)
  43. flag(svc_extension_flag);
  44. else
  45. flag(avc_3d_extension_flag);
  46. if (current->svc_extension_flag) {
  47. av_log(ctx->log_ctx, AV_LOG_ERROR, "SVC not supported.\n");
  48. return AVERROR_PATCHWELCOME;
  49. } else if (current->avc_3d_extension_flag) {
  50. av_log(ctx->log_ctx, AV_LOG_ERROR, "3DAVC not supported.\n");
  51. return AVERROR_PATCHWELCOME;
  52. } else {
  53. av_log(ctx->log_ctx, AV_LOG_ERROR, "MVC not supported.\n");
  54. return AVERROR_PATCHWELCOME;
  55. }
  56. }
  57. return 0;
  58. }
  59. static int FUNC(scaling_list)(CodedBitstreamContext *ctx, RWContext *rw,
  60. H264RawScalingList *current,
  61. int size_of_scaling_list)
  62. {
  63. int err, i, scale;
  64. scale = 8;
  65. for (i = 0; i < size_of_scaling_list; i++) {
  66. ses(delta_scale[i], -128, +127, 1, i);
  67. scale = (scale + current->delta_scale[i] + 256) % 256;
  68. if (scale == 0)
  69. break;
  70. }
  71. return 0;
  72. }
  73. static int FUNC(hrd_parameters)(CodedBitstreamContext *ctx, RWContext *rw,
  74. H264RawHRD *current)
  75. {
  76. int err, i;
  77. ue(cpb_cnt_minus1, 0, 31);
  78. u(4, bit_rate_scale, 0, 15);
  79. u(4, cpb_size_scale, 0, 15);
  80. for (i = 0; i <= current->cpb_cnt_minus1; i++) {
  81. ues(bit_rate_value_minus1[i], 0, UINT32_MAX - 1, 1, i);
  82. ues(cpb_size_value_minus1[i], 0, UINT32_MAX - 1, 1, i);
  83. flags(cbr_flag[i], 1, i);
  84. }
  85. u(5, initial_cpb_removal_delay_length_minus1, 0, 31);
  86. u(5, cpb_removal_delay_length_minus1, 0, 31);
  87. u(5, dpb_output_delay_length_minus1, 0, 31);
  88. u(5, time_offset_length, 0, 31);
  89. return 0;
  90. }
  91. static int FUNC(vui_parameters)(CodedBitstreamContext *ctx, RWContext *rw,
  92. H264RawVUI *current, H264RawSPS *sps)
  93. {
  94. int err;
  95. flag(aspect_ratio_info_present_flag);
  96. if (current->aspect_ratio_info_present_flag) {
  97. u(8, aspect_ratio_idc, 0, 255);
  98. if (current->aspect_ratio_idc == 255) {
  99. u(16, sar_width, 0, 65535);
  100. u(16, sar_height, 0, 65535);
  101. }
  102. } else {
  103. infer(aspect_ratio_idc, 0);
  104. }
  105. flag(overscan_info_present_flag);
  106. if (current->overscan_info_present_flag)
  107. flag(overscan_appropriate_flag);
  108. flag(video_signal_type_present_flag);
  109. if (current->video_signal_type_present_flag) {
  110. u(3, video_format, 0, 7);
  111. flag(video_full_range_flag);
  112. flag(colour_description_present_flag);
  113. if (current->colour_description_present_flag) {
  114. u(8, colour_primaries, 0, 255);
  115. u(8, transfer_characteristics, 0, 255);
  116. u(8, matrix_coefficients, 0, 255);
  117. }
  118. } else {
  119. infer(video_format, 5);
  120. infer(video_full_range_flag, 0);
  121. infer(colour_primaries, 2);
  122. infer(transfer_characteristics, 2);
  123. infer(matrix_coefficients, 2);
  124. }
  125. flag(chroma_loc_info_present_flag);
  126. if (current->chroma_loc_info_present_flag) {
  127. ue(chroma_sample_loc_type_top_field, 0, 5);
  128. ue(chroma_sample_loc_type_bottom_field, 0, 5);
  129. } else {
  130. infer(chroma_sample_loc_type_top_field, 0);
  131. infer(chroma_sample_loc_type_bottom_field, 0);
  132. }
  133. flag(timing_info_present_flag);
  134. if (current->timing_info_present_flag) {
  135. u(32, num_units_in_tick, 1, UINT32_MAX);
  136. u(32, time_scale, 1, UINT32_MAX);
  137. flag(fixed_frame_rate_flag);
  138. } else {
  139. infer(fixed_frame_rate_flag, 0);
  140. }
  141. flag(nal_hrd_parameters_present_flag);
  142. if (current->nal_hrd_parameters_present_flag)
  143. CHECK(FUNC(hrd_parameters)(ctx, rw, &current->nal_hrd_parameters));
  144. flag(vcl_hrd_parameters_present_flag);
  145. if (current->vcl_hrd_parameters_present_flag)
  146. CHECK(FUNC(hrd_parameters)(ctx, rw, &current->vcl_hrd_parameters));
  147. if (current->nal_hrd_parameters_present_flag ||
  148. current->vcl_hrd_parameters_present_flag)
  149. flag(low_delay_hrd_flag);
  150. else
  151. infer(low_delay_hrd_flag, 1 - current->fixed_frame_rate_flag);
  152. flag(pic_struct_present_flag);
  153. flag(bitstream_restriction_flag);
  154. if (current->bitstream_restriction_flag) {
  155. flag(motion_vectors_over_pic_boundaries_flag);
  156. ue(max_bytes_per_pic_denom, 0, 16);
  157. ue(max_bits_per_mb_denom, 0, 16);
  158. // The current version of the standard constrains this to be in
  159. // [0,15], but older versions allow 16.
  160. ue(log2_max_mv_length_horizontal, 0, 16);
  161. ue(log2_max_mv_length_vertical, 0, 16);
  162. ue(max_num_reorder_frames, 0, H264_MAX_DPB_FRAMES);
  163. ue(max_dec_frame_buffering, 0, H264_MAX_DPB_FRAMES);
  164. } else {
  165. infer(motion_vectors_over_pic_boundaries_flag, 1);
  166. infer(max_bytes_per_pic_denom, 2);
  167. infer(max_bits_per_mb_denom, 1);
  168. infer(log2_max_mv_length_horizontal, 15);
  169. infer(log2_max_mv_length_vertical, 15);
  170. if ((sps->profile_idc == 44 || sps->profile_idc == 86 ||
  171. sps->profile_idc == 100 || sps->profile_idc == 110 ||
  172. sps->profile_idc == 122 || sps->profile_idc == 244) &&
  173. sps->constraint_set3_flag) {
  174. infer(max_num_reorder_frames, 0);
  175. infer(max_dec_frame_buffering, 0);
  176. } else {
  177. infer(max_num_reorder_frames, H264_MAX_DPB_FRAMES);
  178. infer(max_dec_frame_buffering, H264_MAX_DPB_FRAMES);
  179. }
  180. }
  181. return 0;
  182. }
  183. static int FUNC(vui_parameters_default)(CodedBitstreamContext *ctx,
  184. RWContext *rw, H264RawVUI *current,
  185. H264RawSPS *sps)
  186. {
  187. infer(aspect_ratio_idc, 0);
  188. infer(video_format, 5);
  189. infer(video_full_range_flag, 0);
  190. infer(colour_primaries, 2);
  191. infer(transfer_characteristics, 2);
  192. infer(matrix_coefficients, 2);
  193. infer(chroma_sample_loc_type_top_field, 0);
  194. infer(chroma_sample_loc_type_bottom_field, 0);
  195. infer(fixed_frame_rate_flag, 0);
  196. infer(low_delay_hrd_flag, 1);
  197. infer(pic_struct_present_flag, 0);
  198. infer(motion_vectors_over_pic_boundaries_flag, 1);
  199. infer(max_bytes_per_pic_denom, 2);
  200. infer(max_bits_per_mb_denom, 1);
  201. infer(log2_max_mv_length_horizontal, 15);
  202. infer(log2_max_mv_length_vertical, 15);
  203. if ((sps->profile_idc == 44 || sps->profile_idc == 86 ||
  204. sps->profile_idc == 100 || sps->profile_idc == 110 ||
  205. sps->profile_idc == 122 || sps->profile_idc == 244) &&
  206. sps->constraint_set3_flag) {
  207. infer(max_num_reorder_frames, 0);
  208. infer(max_dec_frame_buffering, 0);
  209. } else {
  210. infer(max_num_reorder_frames, H264_MAX_DPB_FRAMES);
  211. infer(max_dec_frame_buffering, H264_MAX_DPB_FRAMES);
  212. }
  213. return 0;
  214. }
  215. static int FUNC(sps)(CodedBitstreamContext *ctx, RWContext *rw,
  216. H264RawSPS *current)
  217. {
  218. int err, i;
  219. HEADER("Sequence Parameter Set");
  220. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  221. 1 << H264_NAL_SPS));
  222. u(8, profile_idc, 0, 255);
  223. flag(constraint_set0_flag);
  224. flag(constraint_set1_flag);
  225. flag(constraint_set2_flag);
  226. flag(constraint_set3_flag);
  227. flag(constraint_set4_flag);
  228. flag(constraint_set5_flag);
  229. u(2, reserved_zero_2bits, 0, 0);
  230. u(8, level_idc, 0, 255);
  231. ue(seq_parameter_set_id, 0, 31);
  232. if (current->profile_idc == 100 || current->profile_idc == 110 ||
  233. current->profile_idc == 122 || current->profile_idc == 244 ||
  234. current->profile_idc == 44 || current->profile_idc == 83 ||
  235. current->profile_idc == 86 || current->profile_idc == 118 ||
  236. current->profile_idc == 128 || current->profile_idc == 138) {
  237. ue(chroma_format_idc, 0, 3);
  238. if (current->chroma_format_idc == 3)
  239. flag(separate_colour_plane_flag);
  240. else
  241. infer(separate_colour_plane_flag, 0);
  242. ue(bit_depth_luma_minus8, 0, 6);
  243. ue(bit_depth_chroma_minus8, 0, 6);
  244. flag(qpprime_y_zero_transform_bypass_flag);
  245. flag(seq_scaling_matrix_present_flag);
  246. if (current->seq_scaling_matrix_present_flag) {
  247. for (i = 0; i < ((current->chroma_format_idc != 3) ? 8 : 12); i++) {
  248. flags(seq_scaling_list_present_flag[i], 1, i);
  249. if (current->seq_scaling_list_present_flag[i]) {
  250. if (i < 6)
  251. CHECK(FUNC(scaling_list)(ctx, rw,
  252. &current->scaling_list_4x4[i],
  253. 16));
  254. else
  255. CHECK(FUNC(scaling_list)(ctx, rw,
  256. &current->scaling_list_8x8[i - 6],
  257. 64));
  258. }
  259. }
  260. }
  261. } else {
  262. infer(chroma_format_idc, current->profile_idc == 183 ? 0 : 1);
  263. infer(separate_colour_plane_flag, 0);
  264. infer(bit_depth_luma_minus8, 0);
  265. infer(bit_depth_chroma_minus8, 0);
  266. }
  267. ue(log2_max_frame_num_minus4, 0, 12);
  268. ue(pic_order_cnt_type, 0, 2);
  269. if (current->pic_order_cnt_type == 0) {
  270. ue(log2_max_pic_order_cnt_lsb_minus4, 0, 12);
  271. } else if (current->pic_order_cnt_type == 1) {
  272. flag(delta_pic_order_always_zero_flag);
  273. se(offset_for_non_ref_pic, INT32_MIN + 1, INT32_MAX);
  274. se(offset_for_top_to_bottom_field, INT32_MIN + 1, INT32_MAX);
  275. ue(num_ref_frames_in_pic_order_cnt_cycle, 0, 255);
  276. for (i = 0; i < current->num_ref_frames_in_pic_order_cnt_cycle; i++)
  277. ses(offset_for_ref_frame[i], INT32_MIN + 1, INT32_MAX, 1, i);
  278. }
  279. ue(max_num_ref_frames, 0, H264_MAX_DPB_FRAMES);
  280. flag(gaps_in_frame_num_allowed_flag);
  281. ue(pic_width_in_mbs_minus1, 0, H264_MAX_MB_WIDTH);
  282. ue(pic_height_in_map_units_minus1, 0, H264_MAX_MB_HEIGHT);
  283. flag(frame_mbs_only_flag);
  284. if (!current->frame_mbs_only_flag)
  285. flag(mb_adaptive_frame_field_flag);
  286. flag(direct_8x8_inference_flag);
  287. flag(frame_cropping_flag);
  288. if (current->frame_cropping_flag) {
  289. ue(frame_crop_left_offset, 0, H264_MAX_WIDTH);
  290. ue(frame_crop_right_offset, 0, H264_MAX_WIDTH);
  291. ue(frame_crop_top_offset, 0, H264_MAX_HEIGHT);
  292. ue(frame_crop_bottom_offset, 0, H264_MAX_HEIGHT);
  293. }
  294. flag(vui_parameters_present_flag);
  295. if (current->vui_parameters_present_flag)
  296. CHECK(FUNC(vui_parameters)(ctx, rw, &current->vui, current));
  297. else
  298. CHECK(FUNC(vui_parameters_default)(ctx, rw, &current->vui, current));
  299. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  300. return 0;
  301. }
  302. static int FUNC(sps_extension)(CodedBitstreamContext *ctx, RWContext *rw,
  303. H264RawSPSExtension *current)
  304. {
  305. int err;
  306. HEADER("Sequence Parameter Set Extension");
  307. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  308. 1 << H264_NAL_SPS_EXT));
  309. ue(seq_parameter_set_id, 0, 31);
  310. ue(aux_format_idc, 0, 3);
  311. if (current->aux_format_idc != 0) {
  312. int bits;
  313. ue(bit_depth_aux_minus8, 0, 4);
  314. flag(alpha_incr_flag);
  315. bits = current->bit_depth_aux_minus8 + 9;
  316. u(bits, alpha_opaque_value, 0, MAX_UINT_BITS(bits));
  317. u(bits, alpha_transparent_value, 0, MAX_UINT_BITS(bits));
  318. }
  319. flag(additional_extension_flag);
  320. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  321. return 0;
  322. }
  323. static int FUNC(pps)(CodedBitstreamContext *ctx, RWContext *rw,
  324. H264RawPPS *current)
  325. {
  326. CodedBitstreamH264Context *h264 = ctx->priv_data;
  327. const H264RawSPS *sps;
  328. int err, i;
  329. HEADER("Picture Parameter Set");
  330. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  331. 1 << H264_NAL_PPS));
  332. ue(pic_parameter_set_id, 0, 255);
  333. ue(seq_parameter_set_id, 0, 31);
  334. sps = h264->sps[current->seq_parameter_set_id];
  335. if (!sps) {
  336. av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n",
  337. current->seq_parameter_set_id);
  338. return AVERROR_INVALIDDATA;
  339. }
  340. flag(entropy_coding_mode_flag);
  341. flag(bottom_field_pic_order_in_frame_present_flag);
  342. ue(num_slice_groups_minus1, 0, 7);
  343. if (current->num_slice_groups_minus1 > 0) {
  344. unsigned int pic_size;
  345. int iGroup;
  346. pic_size = (sps->pic_width_in_mbs_minus1 + 1) *
  347. (sps->pic_height_in_map_units_minus1 + 1);
  348. ue(slice_group_map_type, 0, 6);
  349. if (current->slice_group_map_type == 0) {
  350. for (iGroup = 0; iGroup <= current->num_slice_groups_minus1; iGroup++)
  351. ues(run_length_minus1[iGroup], 0, pic_size - 1, 1, iGroup);
  352. } else if (current->slice_group_map_type == 2) {
  353. for (iGroup = 0; iGroup < current->num_slice_groups_minus1; iGroup++) {
  354. ues(top_left[iGroup], 0, pic_size - 1, 1, iGroup);
  355. ues(bottom_right[iGroup],
  356. current->top_left[iGroup], pic_size - 1, 1, iGroup);
  357. }
  358. } else if (current->slice_group_map_type == 3 ||
  359. current->slice_group_map_type == 4 ||
  360. current->slice_group_map_type == 5) {
  361. flag(slice_group_change_direction_flag);
  362. ue(slice_group_change_rate_minus1, 0, pic_size - 1);
  363. } else if (current->slice_group_map_type == 6) {
  364. ue(pic_size_in_map_units_minus1, pic_size - 1, pic_size - 1);
  365. allocate(current->slice_group_id,
  366. current->pic_size_in_map_units_minus1 + 1);
  367. for (i = 0; i <= current->pic_size_in_map_units_minus1; i++)
  368. us(av_log2(2 * current->num_slice_groups_minus1 + 1),
  369. slice_group_id[i], 0, current->num_slice_groups_minus1, 1, i);
  370. }
  371. }
  372. ue(num_ref_idx_l0_default_active_minus1, 0, 31);
  373. ue(num_ref_idx_l1_default_active_minus1, 0, 31);
  374. flag(weighted_pred_flag);
  375. u(2, weighted_bipred_idc, 0, 2);
  376. se(pic_init_qp_minus26, -26 - 6 * sps->bit_depth_luma_minus8, +25);
  377. se(pic_init_qs_minus26, -26, +25);
  378. se(chroma_qp_index_offset, -12, +12);
  379. flag(deblocking_filter_control_present_flag);
  380. flag(constrained_intra_pred_flag);
  381. flag(redundant_pic_cnt_present_flag);
  382. if (more_rbsp_data(current->more_rbsp_data))
  383. {
  384. flag(transform_8x8_mode_flag);
  385. flag(pic_scaling_matrix_present_flag);
  386. if (current->pic_scaling_matrix_present_flag) {
  387. for (i = 0; i < 6 + (((sps->chroma_format_idc != 3) ? 2 : 6) *
  388. current->transform_8x8_mode_flag); i++) {
  389. flags(pic_scaling_list_present_flag[i], 1, i);
  390. if (current->pic_scaling_list_present_flag[i]) {
  391. if (i < 6)
  392. CHECK(FUNC(scaling_list)(ctx, rw,
  393. &current->scaling_list_4x4[i],
  394. 16));
  395. else
  396. CHECK(FUNC(scaling_list)(ctx, rw,
  397. &current->scaling_list_8x8[i - 6],
  398. 64));
  399. }
  400. }
  401. }
  402. se(second_chroma_qp_index_offset, -12, +12);
  403. } else {
  404. infer(transform_8x8_mode_flag, 0);
  405. infer(pic_scaling_matrix_present_flag, 0);
  406. infer(second_chroma_qp_index_offset, current->chroma_qp_index_offset);
  407. }
  408. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  409. return 0;
  410. }
  411. static int FUNC(sei_buffering_period)(CodedBitstreamContext *ctx, RWContext *rw,
  412. H264RawSEIBufferingPeriod *current)
  413. {
  414. CodedBitstreamH264Context *h264 = ctx->priv_data;
  415. const H264RawSPS *sps;
  416. int err, i, length;
  417. ue(seq_parameter_set_id, 0, 31);
  418. sps = h264->sps[current->seq_parameter_set_id];
  419. if (!sps) {
  420. av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n",
  421. current->seq_parameter_set_id);
  422. return AVERROR_INVALIDDATA;
  423. }
  424. h264->active_sps = sps;
  425. if (sps->vui.nal_hrd_parameters_present_flag) {
  426. for (i = 0; i <= sps->vui.nal_hrd_parameters.cpb_cnt_minus1; i++) {
  427. length = sps->vui.nal_hrd_parameters.initial_cpb_removal_delay_length_minus1 + 1;
  428. xu(length, initial_cpb_removal_delay[SchedSelIdx],
  429. current->nal.initial_cpb_removal_delay[i],
  430. 1, MAX_UINT_BITS(length), 1, i);
  431. xu(length, initial_cpb_removal_delay_offset[SchedSelIdx],
  432. current->nal.initial_cpb_removal_delay_offset[i],
  433. 0, MAX_UINT_BITS(length), 1, i);
  434. }
  435. }
  436. if (sps->vui.vcl_hrd_parameters_present_flag) {
  437. for (i = 0; i <= sps->vui.vcl_hrd_parameters.cpb_cnt_minus1; i++) {
  438. length = sps->vui.vcl_hrd_parameters.initial_cpb_removal_delay_length_minus1 + 1;
  439. xu(length, initial_cpb_removal_delay[SchedSelIdx],
  440. current->vcl.initial_cpb_removal_delay[i],
  441. 1, MAX_UINT_BITS(length), 1, i);
  442. xu(length, initial_cpb_removal_delay_offset[SchedSelIdx],
  443. current->vcl.initial_cpb_removal_delay_offset[i],
  444. 0, MAX_UINT_BITS(length), 1, i);
  445. }
  446. }
  447. return 0;
  448. }
  449. static int FUNC(sei_pic_timestamp)(CodedBitstreamContext *ctx, RWContext *rw,
  450. H264RawSEIPicTimestamp *current,
  451. const H264RawSPS *sps)
  452. {
  453. uint8_t time_offset_length;
  454. int err;
  455. u(2, ct_type, 0, 2);
  456. flag(nuit_field_based_flag);
  457. u(5, counting_type, 0, 6);
  458. flag(full_timestamp_flag);
  459. flag(discontinuity_flag);
  460. flag(cnt_dropped_flag);
  461. u(8, n_frames, 0, 255);
  462. if (current->full_timestamp_flag) {
  463. u(6, seconds_value, 0, 59);
  464. u(6, minutes_value, 0, 59);
  465. u(5, hours_value, 0, 23);
  466. } else {
  467. flag(seconds_flag);
  468. if (current->seconds_flag) {
  469. u(6, seconds_value, 0, 59);
  470. flag(minutes_flag);
  471. if (current->minutes_flag) {
  472. u(6, minutes_value, 0, 59);
  473. flag(hours_flag);
  474. if (current->hours_flag)
  475. u(5, hours_value, 0, 23);
  476. }
  477. }
  478. }
  479. if (sps->vui.nal_hrd_parameters_present_flag)
  480. time_offset_length = sps->vui.nal_hrd_parameters.time_offset_length;
  481. else if (sps->vui.vcl_hrd_parameters_present_flag)
  482. time_offset_length = sps->vui.vcl_hrd_parameters.time_offset_length;
  483. else
  484. time_offset_length = 24;
  485. if (time_offset_length > 0)
  486. u(time_offset_length, time_offset,
  487. 0, MAX_UINT_BITS(time_offset_length));
  488. else
  489. infer(time_offset, 0);
  490. return 0;
  491. }
  492. static int FUNC(sei_pic_timing)(CodedBitstreamContext *ctx, RWContext *rw,
  493. H264RawSEIPicTiming *current)
  494. {
  495. CodedBitstreamH264Context *h264 = ctx->priv_data;
  496. const H264RawSPS *sps;
  497. int err;
  498. sps = h264->active_sps;
  499. if (!sps) {
  500. // If there is exactly one possible SPS but it is not yet active
  501. // then just assume that it should be the active one.
  502. int i, k = -1;
  503. for (i = 0; i < H264_MAX_SPS_COUNT; i++) {
  504. if (h264->sps[i]) {
  505. if (k >= 0) {
  506. k = -1;
  507. break;
  508. }
  509. k = i;
  510. }
  511. }
  512. if (k >= 0)
  513. sps = h264->sps[k];
  514. }
  515. if (!sps) {
  516. av_log(ctx->log_ctx, AV_LOG_ERROR,
  517. "No active SPS for pic_timing.\n");
  518. return AVERROR_INVALIDDATA;
  519. }
  520. if (sps->vui.nal_hrd_parameters_present_flag ||
  521. sps->vui.vcl_hrd_parameters_present_flag) {
  522. const H264RawHRD *hrd;
  523. if (sps->vui.nal_hrd_parameters_present_flag)
  524. hrd = &sps->vui.nal_hrd_parameters;
  525. else if (sps->vui.vcl_hrd_parameters_present_flag)
  526. hrd = &sps->vui.vcl_hrd_parameters;
  527. else {
  528. av_log(ctx->log_ctx, AV_LOG_ERROR,
  529. "No HRD parameters for pic_timing.\n");
  530. return AVERROR_INVALIDDATA;
  531. }
  532. u(hrd->cpb_removal_delay_length_minus1 + 1, cpb_removal_delay,
  533. 0, MAX_UINT_BITS(hrd->cpb_removal_delay_length_minus1 + 1));
  534. u(hrd->dpb_output_delay_length_minus1 + 1, dpb_output_delay,
  535. 0, MAX_UINT_BITS(hrd->dpb_output_delay_length_minus1 + 1));
  536. }
  537. if (sps->vui.pic_struct_present_flag) {
  538. static const int num_clock_ts[9] = {
  539. 1, 1, 1, 2, 2, 3, 3, 2, 3
  540. };
  541. int i;
  542. u(4, pic_struct, 0, 8);
  543. if (current->pic_struct > 8)
  544. return AVERROR_INVALIDDATA;
  545. for (i = 0; i < num_clock_ts[current->pic_struct]; i++) {
  546. flags(clock_timestamp_flag[i], 1, i);
  547. if (current->clock_timestamp_flag[i])
  548. CHECK(FUNC(sei_pic_timestamp)(ctx, rw,
  549. &current->timestamp[i], sps));
  550. }
  551. }
  552. return 0;
  553. }
  554. static int FUNC(sei_pan_scan_rect)(CodedBitstreamContext *ctx, RWContext *rw,
  555. H264RawSEIPanScanRect *current)
  556. {
  557. int err, i;
  558. ue(pan_scan_rect_id, 0, UINT32_MAX - 1);
  559. flag(pan_scan_rect_cancel_flag);
  560. if (!current->pan_scan_rect_cancel_flag) {
  561. ue(pan_scan_cnt_minus1, 0, 2);
  562. for (i = 0; i <= current->pan_scan_cnt_minus1; i++) {
  563. ses(pan_scan_rect_left_offset[i], INT32_MIN + 1, INT32_MAX, 1, i);
  564. ses(pan_scan_rect_right_offset[i], INT32_MIN + 1, INT32_MAX, 1, i);
  565. ses(pan_scan_rect_top_offset[i], INT32_MIN + 1, INT32_MAX, 1, i);
  566. ses(pan_scan_rect_bottom_offset[i], INT32_MIN + 1, INT32_MAX, 1, i);
  567. }
  568. ue(pan_scan_rect_repetition_period, 0, 16384);
  569. }
  570. return 0;
  571. }
  572. static int FUNC(sei_user_data_registered)(CodedBitstreamContext *ctx, RWContext *rw,
  573. H264RawSEIUserDataRegistered *current,
  574. uint32_t *payload_size)
  575. {
  576. int err, i, j;
  577. u(8, itu_t_t35_country_code, 0x00, 0xff);
  578. if (current->itu_t_t35_country_code != 0xff)
  579. i = 1;
  580. else {
  581. u(8, itu_t_t35_country_code_extension_byte, 0x00, 0xff);
  582. i = 2;
  583. }
  584. #ifdef READ
  585. if (*payload_size < i) {
  586. av_log(ctx->log_ctx, AV_LOG_ERROR,
  587. "Invalid SEI user data registered payload.\n");
  588. return AVERROR_INVALIDDATA;
  589. }
  590. current->data_length = *payload_size - i;
  591. #else
  592. *payload_size = i + current->data_length;
  593. #endif
  594. allocate(current->data, current->data_length);
  595. for (j = 0; j < current->data_length; j++)
  596. xu(8, itu_t_t35_payload_byte[i], current->data[j], 0x00, 0xff, 1, i + j);
  597. return 0;
  598. }
  599. static int FUNC(sei_user_data_unregistered)(CodedBitstreamContext *ctx, RWContext *rw,
  600. H264RawSEIUserDataUnregistered *current,
  601. uint32_t *payload_size)
  602. {
  603. int err, i;
  604. #ifdef READ
  605. if (*payload_size < 16) {
  606. av_log(ctx->log_ctx, AV_LOG_ERROR,
  607. "Invalid SEI user data unregistered payload.\n");
  608. return AVERROR_INVALIDDATA;
  609. }
  610. current->data_length = *payload_size - 16;
  611. #else
  612. *payload_size = 16 + current->data_length;
  613. #endif
  614. for (i = 0; i < 16; i++)
  615. us(8, uuid_iso_iec_11578[i], 0x00, 0xff, 1, i);
  616. allocate(current->data, current->data_length);
  617. for (i = 0; i < current->data_length; i++)
  618. xu(8, user_data_payload_byte[i], current->data[i], 0x00, 0xff, 1, i);
  619. return 0;
  620. }
  621. static int FUNC(sei_recovery_point)(CodedBitstreamContext *ctx, RWContext *rw,
  622. H264RawSEIRecoveryPoint *current)
  623. {
  624. int err;
  625. ue(recovery_frame_cnt, 0, 65535);
  626. flag(exact_match_flag);
  627. flag(broken_link_flag);
  628. u(2, changing_slice_group_idc, 0, 2);
  629. return 0;
  630. }
  631. static int FUNC(sei_display_orientation)(CodedBitstreamContext *ctx, RWContext *rw,
  632. H264RawSEIDisplayOrientation *current)
  633. {
  634. int err;
  635. flag(display_orientation_cancel_flag);
  636. if (!current->display_orientation_cancel_flag) {
  637. flag(hor_flip);
  638. flag(ver_flip);
  639. u(16, anticlockwise_rotation, 0, 65535);
  640. ue(display_orientation_repetition_period, 0, 16384);
  641. flag(display_orientation_extension_flag);
  642. }
  643. return 0;
  644. }
  645. static int FUNC(sei_mastering_display_colour_volume)(CodedBitstreamContext *ctx, RWContext *rw,
  646. H264RawSEIMasteringDisplayColourVolume *current)
  647. {
  648. int err, c;
  649. for (c = 0; c < 3; c++) {
  650. us(16, display_primaries_x[c], 0, 50000, 1, c);
  651. us(16, display_primaries_y[c], 0, 50000, 1, c);
  652. }
  653. u(16, white_point_x, 0, 50000);
  654. u(16, white_point_y, 0, 50000);
  655. u(32, max_display_mastering_luminance, 1, MAX_UINT_BITS(32));
  656. u(32, min_display_mastering_luminance, 0, current->max_display_mastering_luminance - 1);
  657. return 0;
  658. }
  659. static int FUNC(sei_payload)(CodedBitstreamContext *ctx, RWContext *rw,
  660. H264RawSEIPayload *current)
  661. {
  662. int err, i;
  663. int start_position, end_position;
  664. #ifdef READ
  665. start_position = get_bits_count(rw);
  666. #else
  667. start_position = put_bits_count(rw);
  668. #endif
  669. switch (current->payload_type) {
  670. case H264_SEI_TYPE_BUFFERING_PERIOD:
  671. CHECK(FUNC(sei_buffering_period)
  672. (ctx, rw, &current->payload.buffering_period));
  673. break;
  674. case H264_SEI_TYPE_PIC_TIMING:
  675. CHECK(FUNC(sei_pic_timing)
  676. (ctx, rw, &current->payload.pic_timing));
  677. break;
  678. case H264_SEI_TYPE_PAN_SCAN_RECT:
  679. CHECK(FUNC(sei_pan_scan_rect)
  680. (ctx, rw, &current->payload.pan_scan_rect));
  681. break;
  682. case H264_SEI_TYPE_FILLER_PAYLOAD:
  683. {
  684. for (i = 0; i < current->payload_size; i++)
  685. fixed(8, ff_byte, 0xff);
  686. }
  687. break;
  688. case H264_SEI_TYPE_USER_DATA_REGISTERED:
  689. CHECK(FUNC(sei_user_data_registered)
  690. (ctx, rw, &current->payload.user_data_registered, &current->payload_size));
  691. break;
  692. case H264_SEI_TYPE_USER_DATA_UNREGISTERED:
  693. CHECK(FUNC(sei_user_data_unregistered)
  694. (ctx, rw, &current->payload.user_data_unregistered, &current->payload_size));
  695. break;
  696. case H264_SEI_TYPE_RECOVERY_POINT:
  697. CHECK(FUNC(sei_recovery_point)
  698. (ctx, rw, &current->payload.recovery_point));
  699. break;
  700. case H264_SEI_TYPE_DISPLAY_ORIENTATION:
  701. CHECK(FUNC(sei_display_orientation)
  702. (ctx, rw, &current->payload.display_orientation));
  703. break;
  704. case H264_SEI_TYPE_MASTERING_DISPLAY_COLOUR_VOLUME:
  705. CHECK(FUNC(sei_mastering_display_colour_volume)
  706. (ctx, rw, &current->payload.mastering_display_colour_volume));
  707. break;
  708. default:
  709. {
  710. #ifdef READ
  711. current->payload.other.data_length = current->payload_size;
  712. #endif
  713. allocate(current->payload.other.data, current->payload.other.data_length);
  714. for (i = 0; i < current->payload.other.data_length; i++)
  715. xu(8, payload_byte[i], current->payload.other.data[i], 0, 255, 1, i);
  716. }
  717. }
  718. if (byte_alignment(rw)) {
  719. fixed(1, bit_equal_to_one, 1);
  720. while (byte_alignment(rw))
  721. fixed(1, bit_equal_to_zero, 0);
  722. }
  723. #ifdef READ
  724. end_position = get_bits_count(rw);
  725. if (end_position < start_position + 8 * current->payload_size) {
  726. av_log(ctx->log_ctx, AV_LOG_ERROR, "Incorrect SEI payload length: "
  727. "header %"PRIu32" bits, actually %d bits.\n",
  728. 8 * current->payload_size,
  729. end_position - start_position);
  730. return AVERROR_INVALIDDATA;
  731. }
  732. #else
  733. end_position = put_bits_count(rw);
  734. current->payload_size = (end_position - start_position) / 8;
  735. #endif
  736. return 0;
  737. }
  738. static int FUNC(sei)(CodedBitstreamContext *ctx, RWContext *rw,
  739. H264RawSEI *current)
  740. {
  741. int err, k;
  742. HEADER("Supplemental Enhancement Information");
  743. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  744. 1 << H264_NAL_SEI));
  745. #ifdef READ
  746. for (k = 0; k < H264_MAX_SEI_PAYLOADS; k++) {
  747. uint32_t payload_type = 0;
  748. uint32_t payload_size = 0;
  749. uint32_t tmp;
  750. while (show_bits(rw, 8) == 0xff) {
  751. fixed(8, ff_byte, 0xff);
  752. payload_type += 255;
  753. }
  754. xu(8, last_payload_type_byte, tmp, 0, 254, 0);
  755. payload_type += tmp;
  756. while (show_bits(rw, 8) == 0xff) {
  757. fixed(8, ff_byte, 0xff);
  758. payload_size += 255;
  759. }
  760. xu(8, last_payload_size_byte, tmp, 0, 254, 0);
  761. payload_size += tmp;
  762. current->payload[k].payload_type = payload_type;
  763. current->payload[k].payload_size = payload_size;
  764. CHECK(FUNC(sei_payload)(ctx, rw, &current->payload[k]));
  765. if (!cbs_h2645_read_more_rbsp_data(rw))
  766. break;
  767. }
  768. if (k >= H264_MAX_SEI_PAYLOADS) {
  769. av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many payloads in "
  770. "SEI message: found %d.\n", k);
  771. return AVERROR_INVALIDDATA;
  772. }
  773. current->payload_count = k + 1;
  774. #else
  775. for (k = 0; k < current->payload_count; k++) {
  776. PutBitContext start_state;
  777. uint32_t tmp;
  778. int need_size, i;
  779. // Somewhat clumsy: we write the payload twice when
  780. // we don't know the size in advance. This will mess
  781. // with trace output, but is otherwise harmless.
  782. start_state = *rw;
  783. need_size = !current->payload[k].payload_size;
  784. for (i = 0; i < 1 + need_size; i++) {
  785. *rw = start_state;
  786. tmp = current->payload[k].payload_type;
  787. while (tmp >= 255) {
  788. fixed(8, ff_byte, 0xff);
  789. tmp -= 255;
  790. }
  791. xu(8, last_payload_type_byte, tmp, 0, 254, 0);
  792. tmp = current->payload[k].payload_size;
  793. while (tmp >= 255) {
  794. fixed(8, ff_byte, 0xff);
  795. tmp -= 255;
  796. }
  797. xu(8, last_payload_size_byte, tmp, 0, 254, 0);
  798. CHECK(FUNC(sei_payload)(ctx, rw, &current->payload[k]));
  799. }
  800. }
  801. #endif
  802. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  803. return 0;
  804. }
  805. static int FUNC(aud)(CodedBitstreamContext *ctx, RWContext *rw,
  806. H264RawAUD *current)
  807. {
  808. int err;
  809. HEADER("Access Unit Delimiter");
  810. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  811. 1 << H264_NAL_AUD));
  812. u(3, primary_pic_type, 0, 7);
  813. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  814. return 0;
  815. }
  816. static int FUNC(ref_pic_list_modification)(CodedBitstreamContext *ctx, RWContext *rw,
  817. H264RawSliceHeader *current)
  818. {
  819. CodedBitstreamH264Context *h264 = ctx->priv_data;
  820. const H264RawSPS *sps = h264->active_sps;
  821. int err, i, mopn;
  822. if (current->slice_type % 5 != 2 &&
  823. current->slice_type % 5 != 4) {
  824. flag(ref_pic_list_modification_flag_l0);
  825. if (current->ref_pic_list_modification_flag_l0) {
  826. for (i = 0; i < H264_MAX_RPLM_COUNT; i++) {
  827. xue(modification_of_pic_nums_idc,
  828. current->rplm_l0[i].modification_of_pic_nums_idc, 0, 3, 0);
  829. mopn = current->rplm_l0[i].modification_of_pic_nums_idc;
  830. if (mopn == 3)
  831. break;
  832. if (mopn == 0 || mopn == 1)
  833. xue(abs_diff_pic_num_minus1,
  834. current->rplm_l0[i].abs_diff_pic_num_minus1,
  835. 0, (1 + current->field_pic_flag) *
  836. (1 << (sps->log2_max_frame_num_minus4 + 4)), 0);
  837. else if (mopn == 2)
  838. xue(long_term_pic_num,
  839. current->rplm_l0[i].long_term_pic_num,
  840. 0, sps->max_num_ref_frames - 1, 0);
  841. }
  842. }
  843. }
  844. if (current->slice_type % 5 == 1) {
  845. flag(ref_pic_list_modification_flag_l1);
  846. if (current->ref_pic_list_modification_flag_l1) {
  847. for (i = 0; i < H264_MAX_RPLM_COUNT; i++) {
  848. xue(modification_of_pic_nums_idc,
  849. current->rplm_l1[i].modification_of_pic_nums_idc, 0, 3, 0);
  850. mopn = current->rplm_l1[i].modification_of_pic_nums_idc;
  851. if (mopn == 3)
  852. break;
  853. if (mopn == 0 || mopn == 1)
  854. xue(abs_diff_pic_num_minus1,
  855. current->rplm_l1[i].abs_diff_pic_num_minus1,
  856. 0, (1 + current->field_pic_flag) *
  857. (1 << (sps->log2_max_frame_num_minus4 + 4)), 0);
  858. else if (mopn == 2)
  859. xue(long_term_pic_num,
  860. current->rplm_l1[i].long_term_pic_num,
  861. 0, sps->max_num_ref_frames - 1, 0);
  862. }
  863. }
  864. }
  865. return 0;
  866. }
  867. static int FUNC(pred_weight_table)(CodedBitstreamContext *ctx, RWContext *rw,
  868. H264RawSliceHeader *current)
  869. {
  870. CodedBitstreamH264Context *h264 = ctx->priv_data;
  871. const H264RawSPS *sps = h264->active_sps;
  872. int chroma;
  873. int err, i, j;
  874. ue(luma_log2_weight_denom, 0, 7);
  875. chroma = !sps->separate_colour_plane_flag && sps->chroma_format_idc != 0;
  876. if (chroma)
  877. ue(chroma_log2_weight_denom, 0, 7);
  878. for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) {
  879. flags(luma_weight_l0_flag[i], 1, i);
  880. if (current->luma_weight_l0_flag[i]) {
  881. ses(luma_weight_l0[i], -128, +127, 1, i);
  882. ses(luma_offset_l0[i], -128, +127, 1, i);
  883. }
  884. if (chroma) {
  885. flags(chroma_weight_l0_flag[i], 1, i);
  886. if (current->chroma_weight_l0_flag[i]) {
  887. for (j = 0; j < 2; j++) {
  888. ses(chroma_weight_l0[i][j], -128, +127, 2, i, j);
  889. ses(chroma_offset_l0[i][j], -128, +127, 2, i, j);
  890. }
  891. }
  892. }
  893. }
  894. if (current->slice_type % 5 == 1) {
  895. for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) {
  896. flags(luma_weight_l1_flag[i], 1, i);
  897. if (current->luma_weight_l1_flag[i]) {
  898. ses(luma_weight_l1[i], -128, +127, 1, i);
  899. ses(luma_offset_l1[i], -128, +127, 1, i);
  900. }
  901. if (chroma) {
  902. flags(chroma_weight_l1_flag[i], 1, i);
  903. if (current->chroma_weight_l1_flag[i]) {
  904. for (j = 0; j < 2; j++) {
  905. ses(chroma_weight_l1[i][j], -128, +127, 2, i, j);
  906. ses(chroma_offset_l1[i][j], -128, +127, 2, i, j);
  907. }
  908. }
  909. }
  910. }
  911. }
  912. return 0;
  913. }
  914. static int FUNC(dec_ref_pic_marking)(CodedBitstreamContext *ctx, RWContext *rw,
  915. H264RawSliceHeader *current, int idr_pic_flag)
  916. {
  917. CodedBitstreamH264Context *h264 = ctx->priv_data;
  918. const H264RawSPS *sps = h264->active_sps;
  919. int err, i;
  920. uint32_t mmco;
  921. if (idr_pic_flag) {
  922. flag(no_output_of_prior_pics_flag);
  923. flag(long_term_reference_flag);
  924. } else {
  925. flag(adaptive_ref_pic_marking_mode_flag);
  926. if (current->adaptive_ref_pic_marking_mode_flag) {
  927. for (i = 0; i < H264_MAX_MMCO_COUNT; i++) {
  928. xue(memory_management_control_operation,
  929. current->mmco[i].memory_management_control_operation,
  930. 0, 6, 0);
  931. mmco = current->mmco[i].memory_management_control_operation;
  932. if (mmco == 0)
  933. break;
  934. if (mmco == 1 || mmco == 3)
  935. xue(difference_of_pic_nums_minus1,
  936. current->mmco[i].difference_of_pic_nums_minus1,
  937. 0, INT32_MAX, 0);
  938. if (mmco == 2)
  939. xue(long_term_pic_num,
  940. current->mmco[i].long_term_pic_num,
  941. 0, sps->max_num_ref_frames - 1, 0);
  942. if (mmco == 3 || mmco == 6)
  943. xue(long_term_frame_idx,
  944. current->mmco[i].long_term_frame_idx,
  945. 0, sps->max_num_ref_frames - 1, 0);
  946. if (mmco == 4)
  947. xue(max_long_term_frame_idx_plus1,
  948. current->mmco[i].max_long_term_frame_idx_plus1,
  949. 0, sps->max_num_ref_frames, 0);
  950. }
  951. if (i == H264_MAX_MMCO_COUNT) {
  952. av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many "
  953. "memory management control operations.\n");
  954. return AVERROR_INVALIDDATA;
  955. }
  956. }
  957. }
  958. return 0;
  959. }
  960. static int FUNC(slice_header)(CodedBitstreamContext *ctx, RWContext *rw,
  961. H264RawSliceHeader *current)
  962. {
  963. CodedBitstreamH264Context *h264 = ctx->priv_data;
  964. const H264RawSPS *sps;
  965. const H264RawPPS *pps;
  966. int err;
  967. int idr_pic_flag;
  968. int slice_type_i, slice_type_p, slice_type_b;
  969. int slice_type_si, slice_type_sp;
  970. HEADER("Slice Header");
  971. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  972. 1 << H264_NAL_SLICE |
  973. 1 << H264_NAL_IDR_SLICE |
  974. 1 << H264_NAL_AUXILIARY_SLICE));
  975. if (current->nal_unit_header.nal_unit_type == H264_NAL_AUXILIARY_SLICE) {
  976. if (!h264->last_slice_nal_unit_type) {
  977. av_log(ctx->log_ctx, AV_LOG_ERROR, "Auxiliary slice "
  978. "is not decodable without the main picture "
  979. "in the same access unit.\n");
  980. return AVERROR_INVALIDDATA;
  981. }
  982. } else {
  983. h264->last_slice_nal_unit_type =
  984. current->nal_unit_header.nal_unit_type;
  985. }
  986. idr_pic_flag = h264->last_slice_nal_unit_type == H264_NAL_IDR_SLICE;
  987. ue(first_mb_in_slice, 0, H264_MAX_MB_PIC_SIZE - 1);
  988. ue(slice_type, 0, 9);
  989. slice_type_i = current->slice_type % 5 == 2;
  990. slice_type_p = current->slice_type % 5 == 0;
  991. slice_type_b = current->slice_type % 5 == 1;
  992. slice_type_si = current->slice_type % 5 == 4;
  993. slice_type_sp = current->slice_type % 5 == 3;
  994. if (idr_pic_flag && !(slice_type_i || slice_type_si)) {
  995. av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid slice type %d "
  996. "for IDR picture.\n", current->slice_type);
  997. return AVERROR_INVALIDDATA;
  998. }
  999. ue(pic_parameter_set_id, 0, 255);
  1000. pps = h264->pps[current->pic_parameter_set_id];
  1001. if (!pps) {
  1002. av_log(ctx->log_ctx, AV_LOG_ERROR, "PPS id %d not available.\n",
  1003. current->pic_parameter_set_id);
  1004. return AVERROR_INVALIDDATA;
  1005. }
  1006. h264->active_pps = pps;
  1007. sps = h264->sps[pps->seq_parameter_set_id];
  1008. if (!sps) {
  1009. av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n",
  1010. pps->seq_parameter_set_id);
  1011. return AVERROR_INVALIDDATA;
  1012. }
  1013. h264->active_sps = sps;
  1014. if (sps->separate_colour_plane_flag)
  1015. u(2, colour_plane_id, 0, 2);
  1016. u(sps->log2_max_frame_num_minus4 + 4, frame_num,
  1017. 0, MAX_UINT_BITS(sps->log2_max_frame_num_minus4 + 4));
  1018. if (!sps->frame_mbs_only_flag) {
  1019. flag(field_pic_flag);
  1020. if (current->field_pic_flag)
  1021. flag(bottom_field_flag);
  1022. else
  1023. infer(bottom_field_flag, 0);
  1024. } else {
  1025. infer(field_pic_flag, 0);
  1026. infer(bottom_field_flag, 0);
  1027. }
  1028. if (idr_pic_flag)
  1029. ue(idr_pic_id, 0, 65535);
  1030. if (sps->pic_order_cnt_type == 0) {
  1031. u(sps->log2_max_pic_order_cnt_lsb_minus4 + 4, pic_order_cnt_lsb,
  1032. 0, MAX_UINT_BITS(sps->log2_max_pic_order_cnt_lsb_minus4 + 4));
  1033. if (pps->bottom_field_pic_order_in_frame_present_flag &&
  1034. !current->field_pic_flag)
  1035. se(delta_pic_order_cnt_bottom, INT32_MIN + 1, INT32_MAX);
  1036. } else if (sps->pic_order_cnt_type == 1) {
  1037. if (!sps->delta_pic_order_always_zero_flag) {
  1038. se(delta_pic_order_cnt[0], INT32_MIN + 1, INT32_MAX);
  1039. if (pps->bottom_field_pic_order_in_frame_present_flag &&
  1040. !current->field_pic_flag)
  1041. se(delta_pic_order_cnt[1], INT32_MIN + 1, INT32_MAX);
  1042. else
  1043. infer(delta_pic_order_cnt[1], 0);
  1044. } else {
  1045. infer(delta_pic_order_cnt[0], 0);
  1046. infer(delta_pic_order_cnt[1], 0);
  1047. }
  1048. }
  1049. if (pps->redundant_pic_cnt_present_flag)
  1050. ue(redundant_pic_cnt, 0, 127);
  1051. if (slice_type_b)
  1052. flag(direct_spatial_mv_pred_flag);
  1053. if (slice_type_p || slice_type_sp || slice_type_b) {
  1054. flag(num_ref_idx_active_override_flag);
  1055. if (current->num_ref_idx_active_override_flag) {
  1056. ue(num_ref_idx_l0_active_minus1, 0, 31);
  1057. if (slice_type_b)
  1058. ue(num_ref_idx_l1_active_minus1, 0, 31);
  1059. } else {
  1060. infer(num_ref_idx_l0_active_minus1,
  1061. pps->num_ref_idx_l0_default_active_minus1);
  1062. infer(num_ref_idx_l1_active_minus1,
  1063. pps->num_ref_idx_l1_default_active_minus1);
  1064. }
  1065. }
  1066. if (current->nal_unit_header.nal_unit_type == 20 ||
  1067. current->nal_unit_header.nal_unit_type == 21) {
  1068. av_log(ctx->log_ctx, AV_LOG_ERROR, "MVC / 3DAVC not supported.\n");
  1069. return AVERROR_PATCHWELCOME;
  1070. } else {
  1071. CHECK(FUNC(ref_pic_list_modification)(ctx, rw, current));
  1072. }
  1073. if ((pps->weighted_pred_flag && (slice_type_p || slice_type_sp)) ||
  1074. (pps->weighted_bipred_idc == 1 && slice_type_b)) {
  1075. CHECK(FUNC(pred_weight_table)(ctx, rw, current));
  1076. }
  1077. if (current->nal_unit_header.nal_ref_idc != 0) {
  1078. CHECK(FUNC(dec_ref_pic_marking)(ctx, rw, current, idr_pic_flag));
  1079. }
  1080. if (pps->entropy_coding_mode_flag &&
  1081. !slice_type_i && !slice_type_si) {
  1082. ue(cabac_init_idc, 0, 2);
  1083. }
  1084. se(slice_qp_delta, - 51 - 6 * sps->bit_depth_luma_minus8,
  1085. + 51 + 6 * sps->bit_depth_luma_minus8);
  1086. if (slice_type_sp || slice_type_si) {
  1087. if (slice_type_sp)
  1088. flag(sp_for_switch_flag);
  1089. se(slice_qs_delta, -51, +51);
  1090. }
  1091. if (pps->deblocking_filter_control_present_flag) {
  1092. ue(disable_deblocking_filter_idc, 0, 2);
  1093. if (current->disable_deblocking_filter_idc != 1) {
  1094. se(slice_alpha_c0_offset_div2, -6, +6);
  1095. se(slice_beta_offset_div2, -6, +6);
  1096. } else {
  1097. infer(slice_alpha_c0_offset_div2, 0);
  1098. infer(slice_beta_offset_div2, 0);
  1099. }
  1100. } else {
  1101. infer(disable_deblocking_filter_idc, 0);
  1102. infer(slice_alpha_c0_offset_div2, 0);
  1103. infer(slice_beta_offset_div2, 0);
  1104. }
  1105. if (pps->num_slice_groups_minus1 > 0 &&
  1106. pps->slice_group_map_type >= 3 &&
  1107. pps->slice_group_map_type <= 5) {
  1108. unsigned int pic_size, max, bits;
  1109. pic_size = (sps->pic_width_in_mbs_minus1 + 1) *
  1110. (sps->pic_height_in_map_units_minus1 + 1);
  1111. max = (pic_size + pps->slice_group_change_rate_minus1) /
  1112. (pps->slice_group_change_rate_minus1 + 1);
  1113. bits = av_log2(2 * max - 1);
  1114. u(bits, slice_group_change_cycle, 0, max);
  1115. }
  1116. if (pps->entropy_coding_mode_flag) {
  1117. while (byte_alignment(rw))
  1118. fixed(1, cabac_alignment_one_bit, 1);
  1119. }
  1120. return 0;
  1121. }
  1122. static int FUNC(filler)(CodedBitstreamContext *ctx, RWContext *rw,
  1123. H264RawFiller *current)
  1124. {
  1125. int err;
  1126. HEADER("Filler Data");
  1127. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  1128. 1 << H264_NAL_FILLER_DATA));
  1129. #ifdef READ
  1130. while (show_bits(rw, 8) == 0xff) {
  1131. fixed(8, ff_byte, 0xff);
  1132. ++current->filler_size;
  1133. }
  1134. #else
  1135. {
  1136. uint32_t i;
  1137. for (i = 0; i < current->filler_size; i++)
  1138. fixed(8, ff_byte, 0xff);
  1139. }
  1140. #endif
  1141. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  1142. return 0;
  1143. }
  1144. static int FUNC(end_of_sequence)(CodedBitstreamContext *ctx, RWContext *rw,
  1145. H264RawNALUnitHeader *current)
  1146. {
  1147. HEADER("End of Sequence");
  1148. return FUNC(nal_unit_header)(ctx, rw, current,
  1149. 1 << H264_NAL_END_SEQUENCE);
  1150. }
  1151. static int FUNC(end_of_stream)(CodedBitstreamContext *ctx, RWContext *rw,
  1152. H264RawNALUnitHeader *current)
  1153. {
  1154. HEADER("End of Stream");
  1155. return FUNC(nal_unit_header)(ctx, rw, current,
  1156. 1 << H264_NAL_END_STREAM);
  1157. }