You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1329 lines
44KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. static int FUNC(rbsp_trailing_bits)(CodedBitstreamContext *ctx, RWContext *rw)
  19. {
  20. int err;
  21. fixed(1, rbsp_stop_one_bit, 1);
  22. while (byte_alignment(rw) != 0)
  23. fixed(1, rbsp_alignment_zero_bit, 0);
  24. return 0;
  25. }
  26. static int FUNC(nal_unit_header)(CodedBitstreamContext *ctx, RWContext *rw,
  27. H264RawNALUnitHeader *current,
  28. uint32_t valid_type_mask)
  29. {
  30. int err;
  31. fixed(1, forbidden_zero_bit, 0);
  32. ub(2, nal_ref_idc);
  33. ub(5, nal_unit_type);
  34. if (!(1 << current->nal_unit_type & valid_type_mask)) {
  35. av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid NAL unit type %d.\n",
  36. current->nal_unit_type);
  37. return AVERROR_INVALIDDATA;
  38. }
  39. if (current->nal_unit_type == 14 ||
  40. current->nal_unit_type == 20 ||
  41. current->nal_unit_type == 21) {
  42. if (current->nal_unit_type != 21)
  43. flag(svc_extension_flag);
  44. else
  45. flag(avc_3d_extension_flag);
  46. if (current->svc_extension_flag) {
  47. av_log(ctx->log_ctx, AV_LOG_ERROR, "SVC not supported.\n");
  48. return AVERROR_PATCHWELCOME;
  49. } else if (current->avc_3d_extension_flag) {
  50. av_log(ctx->log_ctx, AV_LOG_ERROR, "3DAVC not supported.\n");
  51. return AVERROR_PATCHWELCOME;
  52. } else {
  53. av_log(ctx->log_ctx, AV_LOG_ERROR, "MVC not supported.\n");
  54. return AVERROR_PATCHWELCOME;
  55. }
  56. }
  57. return 0;
  58. }
  59. static int FUNC(scaling_list)(CodedBitstreamContext *ctx, RWContext *rw,
  60. H264RawScalingList *current,
  61. int size_of_scaling_list)
  62. {
  63. int err, i, scale;
  64. scale = 8;
  65. for (i = 0; i < size_of_scaling_list; i++) {
  66. ses(delta_scale[i], -128, +127, 1, i);
  67. scale = (scale + current->delta_scale[i] + 256) % 256;
  68. if (scale == 0)
  69. break;
  70. }
  71. return 0;
  72. }
  73. static int FUNC(hrd_parameters)(CodedBitstreamContext *ctx, RWContext *rw,
  74. H264RawHRD *current)
  75. {
  76. int err, i;
  77. ue(cpb_cnt_minus1, 0, 31);
  78. ub(4, bit_rate_scale);
  79. ub(4, cpb_size_scale);
  80. for (i = 0; i <= current->cpb_cnt_minus1; i++) {
  81. ues(bit_rate_value_minus1[i], 0, UINT32_MAX - 1, 1, i);
  82. ues(cpb_size_value_minus1[i], 0, UINT32_MAX - 1, 1, i);
  83. flags(cbr_flag[i], 1, i);
  84. }
  85. ub(5, initial_cpb_removal_delay_length_minus1);
  86. ub(5, cpb_removal_delay_length_minus1);
  87. ub(5, dpb_output_delay_length_minus1);
  88. ub(5, time_offset_length);
  89. return 0;
  90. }
  91. static int FUNC(vui_parameters)(CodedBitstreamContext *ctx, RWContext *rw,
  92. H264RawVUI *current, H264RawSPS *sps)
  93. {
  94. int err;
  95. flag(aspect_ratio_info_present_flag);
  96. if (current->aspect_ratio_info_present_flag) {
  97. ub(8, aspect_ratio_idc);
  98. if (current->aspect_ratio_idc == 255) {
  99. ub(16, sar_width);
  100. ub(16, sar_height);
  101. }
  102. } else {
  103. infer(aspect_ratio_idc, 0);
  104. }
  105. flag(overscan_info_present_flag);
  106. if (current->overscan_info_present_flag)
  107. flag(overscan_appropriate_flag);
  108. flag(video_signal_type_present_flag);
  109. if (current->video_signal_type_present_flag) {
  110. ub(3, video_format);
  111. flag(video_full_range_flag);
  112. flag(colour_description_present_flag);
  113. if (current->colour_description_present_flag) {
  114. ub(8, colour_primaries);
  115. ub(8, transfer_characteristics);
  116. ub(8, matrix_coefficients);
  117. } else {
  118. infer(colour_primaries, 2);
  119. infer(transfer_characteristics, 2);
  120. infer(matrix_coefficients, 2);
  121. }
  122. } else {
  123. infer(video_format, 5);
  124. infer(video_full_range_flag, 0);
  125. infer(colour_primaries, 2);
  126. infer(transfer_characteristics, 2);
  127. infer(matrix_coefficients, 2);
  128. }
  129. flag(chroma_loc_info_present_flag);
  130. if (current->chroma_loc_info_present_flag) {
  131. ue(chroma_sample_loc_type_top_field, 0, 5);
  132. ue(chroma_sample_loc_type_bottom_field, 0, 5);
  133. } else {
  134. infer(chroma_sample_loc_type_top_field, 0);
  135. infer(chroma_sample_loc_type_bottom_field, 0);
  136. }
  137. flag(timing_info_present_flag);
  138. if (current->timing_info_present_flag) {
  139. u(32, num_units_in_tick, 1, UINT32_MAX);
  140. u(32, time_scale, 1, UINT32_MAX);
  141. flag(fixed_frame_rate_flag);
  142. } else {
  143. infer(fixed_frame_rate_flag, 0);
  144. }
  145. flag(nal_hrd_parameters_present_flag);
  146. if (current->nal_hrd_parameters_present_flag)
  147. CHECK(FUNC(hrd_parameters)(ctx, rw, &current->nal_hrd_parameters));
  148. flag(vcl_hrd_parameters_present_flag);
  149. if (current->vcl_hrd_parameters_present_flag)
  150. CHECK(FUNC(hrd_parameters)(ctx, rw, &current->vcl_hrd_parameters));
  151. if (current->nal_hrd_parameters_present_flag ||
  152. current->vcl_hrd_parameters_present_flag)
  153. flag(low_delay_hrd_flag);
  154. else
  155. infer(low_delay_hrd_flag, 1 - current->fixed_frame_rate_flag);
  156. flag(pic_struct_present_flag);
  157. flag(bitstream_restriction_flag);
  158. if (current->bitstream_restriction_flag) {
  159. flag(motion_vectors_over_pic_boundaries_flag);
  160. ue(max_bytes_per_pic_denom, 0, 16);
  161. ue(max_bits_per_mb_denom, 0, 16);
  162. // The current version of the standard constrains this to be in
  163. // [0,15], but older versions allow 16.
  164. ue(log2_max_mv_length_horizontal, 0, 16);
  165. ue(log2_max_mv_length_vertical, 0, 16);
  166. ue(max_num_reorder_frames, 0, H264_MAX_DPB_FRAMES);
  167. ue(max_dec_frame_buffering, 0, H264_MAX_DPB_FRAMES);
  168. } else {
  169. infer(motion_vectors_over_pic_boundaries_flag, 1);
  170. infer(max_bytes_per_pic_denom, 2);
  171. infer(max_bits_per_mb_denom, 1);
  172. infer(log2_max_mv_length_horizontal, 15);
  173. infer(log2_max_mv_length_vertical, 15);
  174. if ((sps->profile_idc == 44 || sps->profile_idc == 86 ||
  175. sps->profile_idc == 100 || sps->profile_idc == 110 ||
  176. sps->profile_idc == 122 || sps->profile_idc == 244) &&
  177. sps->constraint_set3_flag) {
  178. infer(max_num_reorder_frames, 0);
  179. infer(max_dec_frame_buffering, 0);
  180. } else {
  181. infer(max_num_reorder_frames, H264_MAX_DPB_FRAMES);
  182. infer(max_dec_frame_buffering, H264_MAX_DPB_FRAMES);
  183. }
  184. }
  185. return 0;
  186. }
  187. static int FUNC(vui_parameters_default)(CodedBitstreamContext *ctx,
  188. RWContext *rw, H264RawVUI *current,
  189. H264RawSPS *sps)
  190. {
  191. infer(aspect_ratio_idc, 0);
  192. infer(video_format, 5);
  193. infer(video_full_range_flag, 0);
  194. infer(colour_primaries, 2);
  195. infer(transfer_characteristics, 2);
  196. infer(matrix_coefficients, 2);
  197. infer(chroma_sample_loc_type_top_field, 0);
  198. infer(chroma_sample_loc_type_bottom_field, 0);
  199. infer(fixed_frame_rate_flag, 0);
  200. infer(low_delay_hrd_flag, 1);
  201. infer(pic_struct_present_flag, 0);
  202. infer(motion_vectors_over_pic_boundaries_flag, 1);
  203. infer(max_bytes_per_pic_denom, 2);
  204. infer(max_bits_per_mb_denom, 1);
  205. infer(log2_max_mv_length_horizontal, 15);
  206. infer(log2_max_mv_length_vertical, 15);
  207. if ((sps->profile_idc == 44 || sps->profile_idc == 86 ||
  208. sps->profile_idc == 100 || sps->profile_idc == 110 ||
  209. sps->profile_idc == 122 || sps->profile_idc == 244) &&
  210. sps->constraint_set3_flag) {
  211. infer(max_num_reorder_frames, 0);
  212. infer(max_dec_frame_buffering, 0);
  213. } else {
  214. infer(max_num_reorder_frames, H264_MAX_DPB_FRAMES);
  215. infer(max_dec_frame_buffering, H264_MAX_DPB_FRAMES);
  216. }
  217. return 0;
  218. }
  219. static int FUNC(sps)(CodedBitstreamContext *ctx, RWContext *rw,
  220. H264RawSPS *current)
  221. {
  222. int err, i;
  223. HEADER("Sequence Parameter Set");
  224. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  225. 1 << H264_NAL_SPS));
  226. ub(8, profile_idc);
  227. flag(constraint_set0_flag);
  228. flag(constraint_set1_flag);
  229. flag(constraint_set2_flag);
  230. flag(constraint_set3_flag);
  231. flag(constraint_set4_flag);
  232. flag(constraint_set5_flag);
  233. u(2, reserved_zero_2bits, 0, 0);
  234. ub(8, level_idc);
  235. ue(seq_parameter_set_id, 0, 31);
  236. if (current->profile_idc == 100 || current->profile_idc == 110 ||
  237. current->profile_idc == 122 || current->profile_idc == 244 ||
  238. current->profile_idc == 44 || current->profile_idc == 83 ||
  239. current->profile_idc == 86 || current->profile_idc == 118 ||
  240. current->profile_idc == 128 || current->profile_idc == 138) {
  241. ue(chroma_format_idc, 0, 3);
  242. if (current->chroma_format_idc == 3)
  243. flag(separate_colour_plane_flag);
  244. else
  245. infer(separate_colour_plane_flag, 0);
  246. ue(bit_depth_luma_minus8, 0, 6);
  247. ue(bit_depth_chroma_minus8, 0, 6);
  248. flag(qpprime_y_zero_transform_bypass_flag);
  249. flag(seq_scaling_matrix_present_flag);
  250. if (current->seq_scaling_matrix_present_flag) {
  251. for (i = 0; i < ((current->chroma_format_idc != 3) ? 8 : 12); i++) {
  252. flags(seq_scaling_list_present_flag[i], 1, i);
  253. if (current->seq_scaling_list_present_flag[i]) {
  254. if (i < 6)
  255. CHECK(FUNC(scaling_list)(ctx, rw,
  256. &current->scaling_list_4x4[i],
  257. 16));
  258. else
  259. CHECK(FUNC(scaling_list)(ctx, rw,
  260. &current->scaling_list_8x8[i - 6],
  261. 64));
  262. }
  263. }
  264. }
  265. } else {
  266. infer(chroma_format_idc, current->profile_idc == 183 ? 0 : 1);
  267. infer(separate_colour_plane_flag, 0);
  268. infer(bit_depth_luma_minus8, 0);
  269. infer(bit_depth_chroma_minus8, 0);
  270. }
  271. ue(log2_max_frame_num_minus4, 0, 12);
  272. ue(pic_order_cnt_type, 0, 2);
  273. if (current->pic_order_cnt_type == 0) {
  274. ue(log2_max_pic_order_cnt_lsb_minus4, 0, 12);
  275. } else if (current->pic_order_cnt_type == 1) {
  276. flag(delta_pic_order_always_zero_flag);
  277. se(offset_for_non_ref_pic, INT32_MIN + 1, INT32_MAX);
  278. se(offset_for_top_to_bottom_field, INT32_MIN + 1, INT32_MAX);
  279. ue(num_ref_frames_in_pic_order_cnt_cycle, 0, 255);
  280. for (i = 0; i < current->num_ref_frames_in_pic_order_cnt_cycle; i++)
  281. ses(offset_for_ref_frame[i], INT32_MIN + 1, INT32_MAX, 1, i);
  282. }
  283. ue(max_num_ref_frames, 0, H264_MAX_DPB_FRAMES);
  284. flag(gaps_in_frame_num_allowed_flag);
  285. ue(pic_width_in_mbs_minus1, 0, H264_MAX_MB_WIDTH);
  286. ue(pic_height_in_map_units_minus1, 0, H264_MAX_MB_HEIGHT);
  287. flag(frame_mbs_only_flag);
  288. if (!current->frame_mbs_only_flag)
  289. flag(mb_adaptive_frame_field_flag);
  290. flag(direct_8x8_inference_flag);
  291. flag(frame_cropping_flag);
  292. if (current->frame_cropping_flag) {
  293. ue(frame_crop_left_offset, 0, H264_MAX_WIDTH);
  294. ue(frame_crop_right_offset, 0, H264_MAX_WIDTH);
  295. ue(frame_crop_top_offset, 0, H264_MAX_HEIGHT);
  296. ue(frame_crop_bottom_offset, 0, H264_MAX_HEIGHT);
  297. }
  298. flag(vui_parameters_present_flag);
  299. if (current->vui_parameters_present_flag)
  300. CHECK(FUNC(vui_parameters)(ctx, rw, &current->vui, current));
  301. else
  302. CHECK(FUNC(vui_parameters_default)(ctx, rw, &current->vui, current));
  303. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  304. return 0;
  305. }
  306. static int FUNC(sps_extension)(CodedBitstreamContext *ctx, RWContext *rw,
  307. H264RawSPSExtension *current)
  308. {
  309. int err;
  310. HEADER("Sequence Parameter Set Extension");
  311. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  312. 1 << H264_NAL_SPS_EXT));
  313. ue(seq_parameter_set_id, 0, 31);
  314. ue(aux_format_idc, 0, 3);
  315. if (current->aux_format_idc != 0) {
  316. int bits;
  317. ue(bit_depth_aux_minus8, 0, 4);
  318. flag(alpha_incr_flag);
  319. bits = current->bit_depth_aux_minus8 + 9;
  320. ub(bits, alpha_opaque_value);
  321. ub(bits, alpha_transparent_value);
  322. }
  323. flag(additional_extension_flag);
  324. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  325. return 0;
  326. }
  327. static int FUNC(pps)(CodedBitstreamContext *ctx, RWContext *rw,
  328. H264RawPPS *current)
  329. {
  330. CodedBitstreamH264Context *h264 = ctx->priv_data;
  331. const H264RawSPS *sps;
  332. int err, i;
  333. HEADER("Picture Parameter Set");
  334. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  335. 1 << H264_NAL_PPS));
  336. ue(pic_parameter_set_id, 0, 255);
  337. ue(seq_parameter_set_id, 0, 31);
  338. sps = h264->sps[current->seq_parameter_set_id];
  339. if (!sps) {
  340. av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n",
  341. current->seq_parameter_set_id);
  342. return AVERROR_INVALIDDATA;
  343. }
  344. flag(entropy_coding_mode_flag);
  345. flag(bottom_field_pic_order_in_frame_present_flag);
  346. ue(num_slice_groups_minus1, 0, 7);
  347. if (current->num_slice_groups_minus1 > 0) {
  348. unsigned int pic_size;
  349. int iGroup;
  350. pic_size = (sps->pic_width_in_mbs_minus1 + 1) *
  351. (sps->pic_height_in_map_units_minus1 + 1);
  352. ue(slice_group_map_type, 0, 6);
  353. if (current->slice_group_map_type == 0) {
  354. for (iGroup = 0; iGroup <= current->num_slice_groups_minus1; iGroup++)
  355. ues(run_length_minus1[iGroup], 0, pic_size - 1, 1, iGroup);
  356. } else if (current->slice_group_map_type == 2) {
  357. for (iGroup = 0; iGroup < current->num_slice_groups_minus1; iGroup++) {
  358. ues(top_left[iGroup], 0, pic_size - 1, 1, iGroup);
  359. ues(bottom_right[iGroup],
  360. current->top_left[iGroup], pic_size - 1, 1, iGroup);
  361. }
  362. } else if (current->slice_group_map_type == 3 ||
  363. current->slice_group_map_type == 4 ||
  364. current->slice_group_map_type == 5) {
  365. flag(slice_group_change_direction_flag);
  366. ue(slice_group_change_rate_minus1, 0, pic_size - 1);
  367. } else if (current->slice_group_map_type == 6) {
  368. ue(pic_size_in_map_units_minus1, pic_size - 1, pic_size - 1);
  369. allocate(current->slice_group_id,
  370. current->pic_size_in_map_units_minus1 + 1);
  371. for (i = 0; i <= current->pic_size_in_map_units_minus1; i++)
  372. us(av_log2(2 * current->num_slice_groups_minus1 + 1),
  373. slice_group_id[i], 0, current->num_slice_groups_minus1, 1, i);
  374. }
  375. }
  376. ue(num_ref_idx_l0_default_active_minus1, 0, 31);
  377. ue(num_ref_idx_l1_default_active_minus1, 0, 31);
  378. flag(weighted_pred_flag);
  379. u(2, weighted_bipred_idc, 0, 2);
  380. se(pic_init_qp_minus26, -26 - 6 * sps->bit_depth_luma_minus8, +25);
  381. se(pic_init_qs_minus26, -26, +25);
  382. se(chroma_qp_index_offset, -12, +12);
  383. flag(deblocking_filter_control_present_flag);
  384. flag(constrained_intra_pred_flag);
  385. flag(redundant_pic_cnt_present_flag);
  386. if (more_rbsp_data(current->more_rbsp_data))
  387. {
  388. flag(transform_8x8_mode_flag);
  389. flag(pic_scaling_matrix_present_flag);
  390. if (current->pic_scaling_matrix_present_flag) {
  391. for (i = 0; i < 6 + (((sps->chroma_format_idc != 3) ? 2 : 6) *
  392. current->transform_8x8_mode_flag); i++) {
  393. flags(pic_scaling_list_present_flag[i], 1, i);
  394. if (current->pic_scaling_list_present_flag[i]) {
  395. if (i < 6)
  396. CHECK(FUNC(scaling_list)(ctx, rw,
  397. &current->scaling_list_4x4[i],
  398. 16));
  399. else
  400. CHECK(FUNC(scaling_list)(ctx, rw,
  401. &current->scaling_list_8x8[i - 6],
  402. 64));
  403. }
  404. }
  405. }
  406. se(second_chroma_qp_index_offset, -12, +12);
  407. } else {
  408. infer(transform_8x8_mode_flag, 0);
  409. infer(pic_scaling_matrix_present_flag, 0);
  410. infer(second_chroma_qp_index_offset, current->chroma_qp_index_offset);
  411. }
  412. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  413. return 0;
  414. }
  415. static int FUNC(sei_buffering_period)(CodedBitstreamContext *ctx, RWContext *rw,
  416. H264RawSEIBufferingPeriod *current)
  417. {
  418. CodedBitstreamH264Context *h264 = ctx->priv_data;
  419. const H264RawSPS *sps;
  420. int err, i, length;
  421. HEADER("Buffering Period");
  422. ue(seq_parameter_set_id, 0, 31);
  423. sps = h264->sps[current->seq_parameter_set_id];
  424. if (!sps) {
  425. av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n",
  426. current->seq_parameter_set_id);
  427. return AVERROR_INVALIDDATA;
  428. }
  429. h264->active_sps = sps;
  430. if (sps->vui.nal_hrd_parameters_present_flag) {
  431. for (i = 0; i <= sps->vui.nal_hrd_parameters.cpb_cnt_minus1; i++) {
  432. length = sps->vui.nal_hrd_parameters.initial_cpb_removal_delay_length_minus1 + 1;
  433. xu(length, initial_cpb_removal_delay[SchedSelIdx],
  434. current->nal.initial_cpb_removal_delay[i],
  435. 1, MAX_UINT_BITS(length), 1, i);
  436. xu(length, initial_cpb_removal_delay_offset[SchedSelIdx],
  437. current->nal.initial_cpb_removal_delay_offset[i],
  438. 0, MAX_UINT_BITS(length), 1, i);
  439. }
  440. }
  441. if (sps->vui.vcl_hrd_parameters_present_flag) {
  442. for (i = 0; i <= sps->vui.vcl_hrd_parameters.cpb_cnt_minus1; i++) {
  443. length = sps->vui.vcl_hrd_parameters.initial_cpb_removal_delay_length_minus1 + 1;
  444. xu(length, initial_cpb_removal_delay[SchedSelIdx],
  445. current->vcl.initial_cpb_removal_delay[i],
  446. 1, MAX_UINT_BITS(length), 1, i);
  447. xu(length, initial_cpb_removal_delay_offset[SchedSelIdx],
  448. current->vcl.initial_cpb_removal_delay_offset[i],
  449. 0, MAX_UINT_BITS(length), 1, i);
  450. }
  451. }
  452. return 0;
  453. }
  454. static int FUNC(sei_pic_timestamp)(CodedBitstreamContext *ctx, RWContext *rw,
  455. H264RawSEIPicTimestamp *current,
  456. const H264RawSPS *sps)
  457. {
  458. uint8_t time_offset_length;
  459. int err;
  460. u(2, ct_type, 0, 2);
  461. flag(nuit_field_based_flag);
  462. u(5, counting_type, 0, 6);
  463. flag(full_timestamp_flag);
  464. flag(discontinuity_flag);
  465. flag(cnt_dropped_flag);
  466. ub(8, n_frames);
  467. if (current->full_timestamp_flag) {
  468. u(6, seconds_value, 0, 59);
  469. u(6, minutes_value, 0, 59);
  470. u(5, hours_value, 0, 23);
  471. } else {
  472. flag(seconds_flag);
  473. if (current->seconds_flag) {
  474. u(6, seconds_value, 0, 59);
  475. flag(minutes_flag);
  476. if (current->minutes_flag) {
  477. u(6, minutes_value, 0, 59);
  478. flag(hours_flag);
  479. if (current->hours_flag)
  480. u(5, hours_value, 0, 23);
  481. }
  482. }
  483. }
  484. if (sps->vui.nal_hrd_parameters_present_flag)
  485. time_offset_length = sps->vui.nal_hrd_parameters.time_offset_length;
  486. else if (sps->vui.vcl_hrd_parameters_present_flag)
  487. time_offset_length = sps->vui.vcl_hrd_parameters.time_offset_length;
  488. else
  489. time_offset_length = 24;
  490. if (time_offset_length > 0)
  491. ib(time_offset_length, time_offset);
  492. else
  493. infer(time_offset, 0);
  494. return 0;
  495. }
  496. static int FUNC(sei_pic_timing)(CodedBitstreamContext *ctx, RWContext *rw,
  497. H264RawSEIPicTiming *current)
  498. {
  499. CodedBitstreamH264Context *h264 = ctx->priv_data;
  500. const H264RawSPS *sps;
  501. int err;
  502. HEADER("Picture Timing");
  503. sps = h264->active_sps;
  504. if (!sps) {
  505. // If there is exactly one possible SPS but it is not yet active
  506. // then just assume that it should be the active one.
  507. int i, k = -1;
  508. for (i = 0; i < H264_MAX_SPS_COUNT; i++) {
  509. if (h264->sps[i]) {
  510. if (k >= 0) {
  511. k = -1;
  512. break;
  513. }
  514. k = i;
  515. }
  516. }
  517. if (k >= 0)
  518. sps = h264->sps[k];
  519. }
  520. if (!sps) {
  521. av_log(ctx->log_ctx, AV_LOG_ERROR,
  522. "No active SPS for pic_timing.\n");
  523. return AVERROR_INVALIDDATA;
  524. }
  525. if (sps->vui.nal_hrd_parameters_present_flag ||
  526. sps->vui.vcl_hrd_parameters_present_flag) {
  527. const H264RawHRD *hrd;
  528. if (sps->vui.nal_hrd_parameters_present_flag)
  529. hrd = &sps->vui.nal_hrd_parameters;
  530. else if (sps->vui.vcl_hrd_parameters_present_flag)
  531. hrd = &sps->vui.vcl_hrd_parameters;
  532. else {
  533. av_log(ctx->log_ctx, AV_LOG_ERROR,
  534. "No HRD parameters for pic_timing.\n");
  535. return AVERROR_INVALIDDATA;
  536. }
  537. ub(hrd->cpb_removal_delay_length_minus1 + 1, cpb_removal_delay);
  538. ub(hrd->dpb_output_delay_length_minus1 + 1, dpb_output_delay);
  539. }
  540. if (sps->vui.pic_struct_present_flag) {
  541. static const uint8_t num_clock_ts[9] = {
  542. 1, 1, 1, 2, 2, 3, 3, 2, 3
  543. };
  544. int i;
  545. u(4, pic_struct, 0, 8);
  546. if (current->pic_struct > 8)
  547. return AVERROR_INVALIDDATA;
  548. for (i = 0; i < num_clock_ts[current->pic_struct]; i++) {
  549. flags(clock_timestamp_flag[i], 1, i);
  550. if (current->clock_timestamp_flag[i])
  551. CHECK(FUNC(sei_pic_timestamp)(ctx, rw,
  552. &current->timestamp[i], sps));
  553. }
  554. }
  555. return 0;
  556. }
  557. static int FUNC(sei_pan_scan_rect)(CodedBitstreamContext *ctx, RWContext *rw,
  558. H264RawSEIPanScanRect *current)
  559. {
  560. int err, i;
  561. HEADER("Pan-Scan Rectangle");
  562. ue(pan_scan_rect_id, 0, UINT32_MAX - 1);
  563. flag(pan_scan_rect_cancel_flag);
  564. if (!current->pan_scan_rect_cancel_flag) {
  565. ue(pan_scan_cnt_minus1, 0, 2);
  566. for (i = 0; i <= current->pan_scan_cnt_minus1; i++) {
  567. ses(pan_scan_rect_left_offset[i], INT32_MIN + 1, INT32_MAX, 1, i);
  568. ses(pan_scan_rect_right_offset[i], INT32_MIN + 1, INT32_MAX, 1, i);
  569. ses(pan_scan_rect_top_offset[i], INT32_MIN + 1, INT32_MAX, 1, i);
  570. ses(pan_scan_rect_bottom_offset[i], INT32_MIN + 1, INT32_MAX, 1, i);
  571. }
  572. ue(pan_scan_rect_repetition_period, 0, 16384);
  573. }
  574. return 0;
  575. }
  576. static int FUNC(sei_recovery_point)(CodedBitstreamContext *ctx, RWContext *rw,
  577. H264RawSEIRecoveryPoint *current)
  578. {
  579. int err;
  580. HEADER("Recovery Point");
  581. ue(recovery_frame_cnt, 0, 65535);
  582. flag(exact_match_flag);
  583. flag(broken_link_flag);
  584. u(2, changing_slice_group_idc, 0, 2);
  585. return 0;
  586. }
  587. static int FUNC(sei_display_orientation)(CodedBitstreamContext *ctx, RWContext *rw,
  588. H264RawSEIDisplayOrientation *current)
  589. {
  590. int err;
  591. HEADER("Display Orientation");
  592. flag(display_orientation_cancel_flag);
  593. if (!current->display_orientation_cancel_flag) {
  594. flag(hor_flip);
  595. flag(ver_flip);
  596. ub(16, anticlockwise_rotation);
  597. ue(display_orientation_repetition_period, 0, 16384);
  598. flag(display_orientation_extension_flag);
  599. }
  600. return 0;
  601. }
  602. static int FUNC(sei_payload)(CodedBitstreamContext *ctx, RWContext *rw,
  603. H264RawSEIPayload *current)
  604. {
  605. int err, i;
  606. int start_position, end_position;
  607. #ifdef READ
  608. start_position = get_bits_count(rw);
  609. #else
  610. start_position = put_bits_count(rw);
  611. #endif
  612. switch (current->payload_type) {
  613. case H264_SEI_TYPE_BUFFERING_PERIOD:
  614. CHECK(FUNC(sei_buffering_period)
  615. (ctx, rw, &current->payload.buffering_period));
  616. break;
  617. case H264_SEI_TYPE_PIC_TIMING:
  618. CHECK(FUNC(sei_pic_timing)
  619. (ctx, rw, &current->payload.pic_timing));
  620. break;
  621. case H264_SEI_TYPE_PAN_SCAN_RECT:
  622. CHECK(FUNC(sei_pan_scan_rect)
  623. (ctx, rw, &current->payload.pan_scan_rect));
  624. break;
  625. case H264_SEI_TYPE_FILLER_PAYLOAD:
  626. {
  627. for (i = 0; i < current->payload_size; i++)
  628. fixed(8, ff_byte, 0xff);
  629. }
  630. break;
  631. case H264_SEI_TYPE_USER_DATA_REGISTERED:
  632. CHECK(FUNC_SEI(sei_user_data_registered)
  633. (ctx, rw, &current->payload.user_data_registered, &current->payload_size));
  634. break;
  635. case H264_SEI_TYPE_USER_DATA_UNREGISTERED:
  636. CHECK(FUNC_SEI(sei_user_data_unregistered)
  637. (ctx, rw, &current->payload.user_data_unregistered, &current->payload_size));
  638. break;
  639. case H264_SEI_TYPE_RECOVERY_POINT:
  640. CHECK(FUNC(sei_recovery_point)
  641. (ctx, rw, &current->payload.recovery_point));
  642. break;
  643. case H264_SEI_TYPE_DISPLAY_ORIENTATION:
  644. CHECK(FUNC(sei_display_orientation)
  645. (ctx, rw, &current->payload.display_orientation));
  646. break;
  647. case H264_SEI_TYPE_MASTERING_DISPLAY_COLOUR_VOLUME:
  648. CHECK(FUNC_SEI(sei_mastering_display_colour_volume)
  649. (ctx, rw, &current->payload.mastering_display_colour_volume));
  650. break;
  651. case H264_SEI_TYPE_ALTERNATIVE_TRANSFER:
  652. CHECK(FUNC_SEI(sei_alternative_transfer_characteristics)
  653. (ctx, rw, &current->payload.alternative_transfer_characteristics));
  654. break;
  655. default:
  656. {
  657. #ifdef READ
  658. current->payload.other.data_length = current->payload_size;
  659. #endif
  660. allocate(current->payload.other.data, current->payload.other.data_length);
  661. for (i = 0; i < current->payload.other.data_length; i++)
  662. xu(8, payload_byte[i], current->payload.other.data[i], 0, 255, 1, i);
  663. }
  664. }
  665. if (byte_alignment(rw)) {
  666. fixed(1, bit_equal_to_one, 1);
  667. while (byte_alignment(rw))
  668. fixed(1, bit_equal_to_zero, 0);
  669. }
  670. #ifdef READ
  671. end_position = get_bits_count(rw);
  672. if (end_position < start_position + 8 * current->payload_size) {
  673. av_log(ctx->log_ctx, AV_LOG_ERROR, "Incorrect SEI payload length: "
  674. "header %"PRIu32" bits, actually %d bits.\n",
  675. 8 * current->payload_size,
  676. end_position - start_position);
  677. return AVERROR_INVALIDDATA;
  678. }
  679. #else
  680. end_position = put_bits_count(rw);
  681. current->payload_size = (end_position - start_position) / 8;
  682. #endif
  683. return 0;
  684. }
  685. static int FUNC(sei)(CodedBitstreamContext *ctx, RWContext *rw,
  686. H264RawSEI *current)
  687. {
  688. int err, k;
  689. HEADER("Supplemental Enhancement Information");
  690. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  691. 1 << H264_NAL_SEI));
  692. #ifdef READ
  693. for (k = 0; k < H264_MAX_SEI_PAYLOADS; k++) {
  694. uint32_t payload_type = 0;
  695. uint32_t payload_size = 0;
  696. uint32_t tmp;
  697. while (show_bits(rw, 8) == 0xff) {
  698. fixed(8, ff_byte, 0xff);
  699. payload_type += 255;
  700. }
  701. xu(8, last_payload_type_byte, tmp, 0, 254, 0);
  702. payload_type += tmp;
  703. while (show_bits(rw, 8) == 0xff) {
  704. fixed(8, ff_byte, 0xff);
  705. payload_size += 255;
  706. }
  707. xu(8, last_payload_size_byte, tmp, 0, 254, 0);
  708. payload_size += tmp;
  709. current->payload[k].payload_type = payload_type;
  710. current->payload[k].payload_size = payload_size;
  711. current->payload_count++;
  712. CHECK(FUNC(sei_payload)(ctx, rw, &current->payload[k]));
  713. if (!cbs_h2645_read_more_rbsp_data(rw))
  714. break;
  715. }
  716. if (k >= H264_MAX_SEI_PAYLOADS) {
  717. av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many payloads in "
  718. "SEI message: found %d.\n", k);
  719. return AVERROR_INVALIDDATA;
  720. }
  721. #else
  722. for (k = 0; k < current->payload_count; k++) {
  723. PutBitContext start_state;
  724. uint32_t tmp;
  725. int need_size, i;
  726. // Somewhat clumsy: we write the payload twice when
  727. // we don't know the size in advance. This will mess
  728. // with trace output, but is otherwise harmless.
  729. start_state = *rw;
  730. need_size = !current->payload[k].payload_size;
  731. for (i = 0; i < 1 + need_size; i++) {
  732. *rw = start_state;
  733. tmp = current->payload[k].payload_type;
  734. while (tmp >= 255) {
  735. fixed(8, ff_byte, 0xff);
  736. tmp -= 255;
  737. }
  738. xu(8, last_payload_type_byte, tmp, 0, 254, 0);
  739. tmp = current->payload[k].payload_size;
  740. while (tmp >= 255) {
  741. fixed(8, ff_byte, 0xff);
  742. tmp -= 255;
  743. }
  744. xu(8, last_payload_size_byte, tmp, 0, 254, 0);
  745. CHECK(FUNC(sei_payload)(ctx, rw, &current->payload[k]));
  746. }
  747. }
  748. #endif
  749. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  750. return 0;
  751. }
  752. static int FUNC(aud)(CodedBitstreamContext *ctx, RWContext *rw,
  753. H264RawAUD *current)
  754. {
  755. int err;
  756. HEADER("Access Unit Delimiter");
  757. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  758. 1 << H264_NAL_AUD));
  759. ub(3, primary_pic_type);
  760. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  761. return 0;
  762. }
  763. static int FUNC(ref_pic_list_modification)(CodedBitstreamContext *ctx, RWContext *rw,
  764. H264RawSliceHeader *current)
  765. {
  766. CodedBitstreamH264Context *h264 = ctx->priv_data;
  767. const H264RawSPS *sps = h264->active_sps;
  768. int err, i, mopn;
  769. if (current->slice_type % 5 != 2 &&
  770. current->slice_type % 5 != 4) {
  771. flag(ref_pic_list_modification_flag_l0);
  772. if (current->ref_pic_list_modification_flag_l0) {
  773. for (i = 0; i < H264_MAX_RPLM_COUNT; i++) {
  774. xue(modification_of_pic_nums_idc,
  775. current->rplm_l0[i].modification_of_pic_nums_idc, 0, 3, 0);
  776. mopn = current->rplm_l0[i].modification_of_pic_nums_idc;
  777. if (mopn == 3)
  778. break;
  779. if (mopn == 0 || mopn == 1)
  780. xue(abs_diff_pic_num_minus1,
  781. current->rplm_l0[i].abs_diff_pic_num_minus1,
  782. 0, (1 + current->field_pic_flag) *
  783. (1 << (sps->log2_max_frame_num_minus4 + 4)), 0);
  784. else if (mopn == 2)
  785. xue(long_term_pic_num,
  786. current->rplm_l0[i].long_term_pic_num,
  787. 0, sps->max_num_ref_frames - 1, 0);
  788. }
  789. }
  790. }
  791. if (current->slice_type % 5 == 1) {
  792. flag(ref_pic_list_modification_flag_l1);
  793. if (current->ref_pic_list_modification_flag_l1) {
  794. for (i = 0; i < H264_MAX_RPLM_COUNT; i++) {
  795. xue(modification_of_pic_nums_idc,
  796. current->rplm_l1[i].modification_of_pic_nums_idc, 0, 3, 0);
  797. mopn = current->rplm_l1[i].modification_of_pic_nums_idc;
  798. if (mopn == 3)
  799. break;
  800. if (mopn == 0 || mopn == 1)
  801. xue(abs_diff_pic_num_minus1,
  802. current->rplm_l1[i].abs_diff_pic_num_minus1,
  803. 0, (1 + current->field_pic_flag) *
  804. (1 << (sps->log2_max_frame_num_minus4 + 4)), 0);
  805. else if (mopn == 2)
  806. xue(long_term_pic_num,
  807. current->rplm_l1[i].long_term_pic_num,
  808. 0, sps->max_num_ref_frames - 1, 0);
  809. }
  810. }
  811. }
  812. return 0;
  813. }
  814. static int FUNC(pred_weight_table)(CodedBitstreamContext *ctx, RWContext *rw,
  815. H264RawSliceHeader *current)
  816. {
  817. CodedBitstreamH264Context *h264 = ctx->priv_data;
  818. const H264RawSPS *sps = h264->active_sps;
  819. int chroma;
  820. int err, i, j;
  821. ue(luma_log2_weight_denom, 0, 7);
  822. chroma = !sps->separate_colour_plane_flag && sps->chroma_format_idc != 0;
  823. if (chroma)
  824. ue(chroma_log2_weight_denom, 0, 7);
  825. for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) {
  826. flags(luma_weight_l0_flag[i], 1, i);
  827. if (current->luma_weight_l0_flag[i]) {
  828. ses(luma_weight_l0[i], -128, +127, 1, i);
  829. ses(luma_offset_l0[i], -128, +127, 1, i);
  830. }
  831. if (chroma) {
  832. flags(chroma_weight_l0_flag[i], 1, i);
  833. if (current->chroma_weight_l0_flag[i]) {
  834. for (j = 0; j < 2; j++) {
  835. ses(chroma_weight_l0[i][j], -128, +127, 2, i, j);
  836. ses(chroma_offset_l0[i][j], -128, +127, 2, i, j);
  837. }
  838. }
  839. }
  840. }
  841. if (current->slice_type % 5 == 1) {
  842. for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) {
  843. flags(luma_weight_l1_flag[i], 1, i);
  844. if (current->luma_weight_l1_flag[i]) {
  845. ses(luma_weight_l1[i], -128, +127, 1, i);
  846. ses(luma_offset_l1[i], -128, +127, 1, i);
  847. }
  848. if (chroma) {
  849. flags(chroma_weight_l1_flag[i], 1, i);
  850. if (current->chroma_weight_l1_flag[i]) {
  851. for (j = 0; j < 2; j++) {
  852. ses(chroma_weight_l1[i][j], -128, +127, 2, i, j);
  853. ses(chroma_offset_l1[i][j], -128, +127, 2, i, j);
  854. }
  855. }
  856. }
  857. }
  858. }
  859. return 0;
  860. }
  861. static int FUNC(dec_ref_pic_marking)(CodedBitstreamContext *ctx, RWContext *rw,
  862. H264RawSliceHeader *current, int idr_pic_flag)
  863. {
  864. CodedBitstreamH264Context *h264 = ctx->priv_data;
  865. const H264RawSPS *sps = h264->active_sps;
  866. int err, i;
  867. uint32_t mmco;
  868. if (idr_pic_flag) {
  869. flag(no_output_of_prior_pics_flag);
  870. flag(long_term_reference_flag);
  871. } else {
  872. flag(adaptive_ref_pic_marking_mode_flag);
  873. if (current->adaptive_ref_pic_marking_mode_flag) {
  874. for (i = 0; i < H264_MAX_MMCO_COUNT; i++) {
  875. xue(memory_management_control_operation,
  876. current->mmco[i].memory_management_control_operation,
  877. 0, 6, 0);
  878. mmco = current->mmco[i].memory_management_control_operation;
  879. if (mmco == 0)
  880. break;
  881. if (mmco == 1 || mmco == 3)
  882. xue(difference_of_pic_nums_minus1,
  883. current->mmco[i].difference_of_pic_nums_minus1,
  884. 0, INT32_MAX, 0);
  885. if (mmco == 2)
  886. xue(long_term_pic_num,
  887. current->mmco[i].long_term_pic_num,
  888. 0, sps->max_num_ref_frames - 1, 0);
  889. if (mmco == 3 || mmco == 6)
  890. xue(long_term_frame_idx,
  891. current->mmco[i].long_term_frame_idx,
  892. 0, sps->max_num_ref_frames - 1, 0);
  893. if (mmco == 4)
  894. xue(max_long_term_frame_idx_plus1,
  895. current->mmco[i].max_long_term_frame_idx_plus1,
  896. 0, sps->max_num_ref_frames, 0);
  897. }
  898. if (i == H264_MAX_MMCO_COUNT) {
  899. av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many "
  900. "memory management control operations.\n");
  901. return AVERROR_INVALIDDATA;
  902. }
  903. }
  904. }
  905. return 0;
  906. }
  907. static int FUNC(slice_header)(CodedBitstreamContext *ctx, RWContext *rw,
  908. H264RawSliceHeader *current)
  909. {
  910. CodedBitstreamH264Context *h264 = ctx->priv_data;
  911. const H264RawSPS *sps;
  912. const H264RawPPS *pps;
  913. int err;
  914. int idr_pic_flag;
  915. int slice_type_i, slice_type_p, slice_type_b;
  916. int slice_type_si, slice_type_sp;
  917. HEADER("Slice Header");
  918. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  919. 1 << H264_NAL_SLICE |
  920. 1 << H264_NAL_IDR_SLICE |
  921. 1 << H264_NAL_AUXILIARY_SLICE));
  922. if (current->nal_unit_header.nal_unit_type == H264_NAL_AUXILIARY_SLICE) {
  923. if (!h264->last_slice_nal_unit_type) {
  924. av_log(ctx->log_ctx, AV_LOG_ERROR, "Auxiliary slice "
  925. "is not decodable without the main picture "
  926. "in the same access unit.\n");
  927. return AVERROR_INVALIDDATA;
  928. }
  929. idr_pic_flag = h264->last_slice_nal_unit_type == H264_NAL_IDR_SLICE;
  930. } else {
  931. idr_pic_flag = current->nal_unit_header.nal_unit_type == H264_NAL_IDR_SLICE;
  932. }
  933. ue(first_mb_in_slice, 0, H264_MAX_MB_PIC_SIZE - 1);
  934. ue(slice_type, 0, 9);
  935. slice_type_i = current->slice_type % 5 == 2;
  936. slice_type_p = current->slice_type % 5 == 0;
  937. slice_type_b = current->slice_type % 5 == 1;
  938. slice_type_si = current->slice_type % 5 == 4;
  939. slice_type_sp = current->slice_type % 5 == 3;
  940. if (idr_pic_flag && !(slice_type_i || slice_type_si)) {
  941. av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid slice type %d "
  942. "for IDR picture.\n", current->slice_type);
  943. return AVERROR_INVALIDDATA;
  944. }
  945. ue(pic_parameter_set_id, 0, 255);
  946. pps = h264->pps[current->pic_parameter_set_id];
  947. if (!pps) {
  948. av_log(ctx->log_ctx, AV_LOG_ERROR, "PPS id %d not available.\n",
  949. current->pic_parameter_set_id);
  950. return AVERROR_INVALIDDATA;
  951. }
  952. h264->active_pps = pps;
  953. sps = h264->sps[pps->seq_parameter_set_id];
  954. if (!sps) {
  955. av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n",
  956. pps->seq_parameter_set_id);
  957. return AVERROR_INVALIDDATA;
  958. }
  959. h264->active_sps = sps;
  960. if (sps->separate_colour_plane_flag)
  961. u(2, colour_plane_id, 0, 2);
  962. ub(sps->log2_max_frame_num_minus4 + 4, frame_num);
  963. if (!sps->frame_mbs_only_flag) {
  964. flag(field_pic_flag);
  965. if (current->field_pic_flag)
  966. flag(bottom_field_flag);
  967. else
  968. infer(bottom_field_flag, 0);
  969. } else {
  970. infer(field_pic_flag, 0);
  971. infer(bottom_field_flag, 0);
  972. }
  973. if (idr_pic_flag)
  974. ue(idr_pic_id, 0, 65535);
  975. if (sps->pic_order_cnt_type == 0) {
  976. ub(sps->log2_max_pic_order_cnt_lsb_minus4 + 4, pic_order_cnt_lsb);
  977. if (pps->bottom_field_pic_order_in_frame_present_flag &&
  978. !current->field_pic_flag)
  979. se(delta_pic_order_cnt_bottom, INT32_MIN + 1, INT32_MAX);
  980. } else if (sps->pic_order_cnt_type == 1) {
  981. if (!sps->delta_pic_order_always_zero_flag) {
  982. se(delta_pic_order_cnt[0], INT32_MIN + 1, INT32_MAX);
  983. if (pps->bottom_field_pic_order_in_frame_present_flag &&
  984. !current->field_pic_flag)
  985. se(delta_pic_order_cnt[1], INT32_MIN + 1, INT32_MAX);
  986. else
  987. infer(delta_pic_order_cnt[1], 0);
  988. } else {
  989. infer(delta_pic_order_cnt[0], 0);
  990. infer(delta_pic_order_cnt[1], 0);
  991. }
  992. }
  993. if (pps->redundant_pic_cnt_present_flag)
  994. ue(redundant_pic_cnt, 0, 127);
  995. else
  996. infer(redundant_pic_cnt, 0);
  997. if (current->nal_unit_header.nal_unit_type != H264_NAL_AUXILIARY_SLICE
  998. && !current->redundant_pic_cnt)
  999. h264->last_slice_nal_unit_type =
  1000. current->nal_unit_header.nal_unit_type;
  1001. if (slice_type_b)
  1002. flag(direct_spatial_mv_pred_flag);
  1003. if (slice_type_p || slice_type_sp || slice_type_b) {
  1004. flag(num_ref_idx_active_override_flag);
  1005. if (current->num_ref_idx_active_override_flag) {
  1006. ue(num_ref_idx_l0_active_minus1, 0, 31);
  1007. if (slice_type_b)
  1008. ue(num_ref_idx_l1_active_minus1, 0, 31);
  1009. } else {
  1010. infer(num_ref_idx_l0_active_minus1,
  1011. pps->num_ref_idx_l0_default_active_minus1);
  1012. infer(num_ref_idx_l1_active_minus1,
  1013. pps->num_ref_idx_l1_default_active_minus1);
  1014. }
  1015. }
  1016. if (current->nal_unit_header.nal_unit_type == 20 ||
  1017. current->nal_unit_header.nal_unit_type == 21) {
  1018. av_log(ctx->log_ctx, AV_LOG_ERROR, "MVC / 3DAVC not supported.\n");
  1019. return AVERROR_PATCHWELCOME;
  1020. } else {
  1021. CHECK(FUNC(ref_pic_list_modification)(ctx, rw, current));
  1022. }
  1023. if ((pps->weighted_pred_flag && (slice_type_p || slice_type_sp)) ||
  1024. (pps->weighted_bipred_idc == 1 && slice_type_b)) {
  1025. CHECK(FUNC(pred_weight_table)(ctx, rw, current));
  1026. }
  1027. if (current->nal_unit_header.nal_ref_idc != 0) {
  1028. CHECK(FUNC(dec_ref_pic_marking)(ctx, rw, current, idr_pic_flag));
  1029. }
  1030. if (pps->entropy_coding_mode_flag &&
  1031. !slice_type_i && !slice_type_si) {
  1032. ue(cabac_init_idc, 0, 2);
  1033. }
  1034. se(slice_qp_delta, - 51 - 6 * sps->bit_depth_luma_minus8,
  1035. + 51 + 6 * sps->bit_depth_luma_minus8);
  1036. if (slice_type_sp || slice_type_si) {
  1037. if (slice_type_sp)
  1038. flag(sp_for_switch_flag);
  1039. se(slice_qs_delta, -51, +51);
  1040. }
  1041. if (pps->deblocking_filter_control_present_flag) {
  1042. ue(disable_deblocking_filter_idc, 0, 2);
  1043. if (current->disable_deblocking_filter_idc != 1) {
  1044. se(slice_alpha_c0_offset_div2, -6, +6);
  1045. se(slice_beta_offset_div2, -6, +6);
  1046. } else {
  1047. infer(slice_alpha_c0_offset_div2, 0);
  1048. infer(slice_beta_offset_div2, 0);
  1049. }
  1050. } else {
  1051. infer(disable_deblocking_filter_idc, 0);
  1052. infer(slice_alpha_c0_offset_div2, 0);
  1053. infer(slice_beta_offset_div2, 0);
  1054. }
  1055. if (pps->num_slice_groups_minus1 > 0 &&
  1056. pps->slice_group_map_type >= 3 &&
  1057. pps->slice_group_map_type <= 5) {
  1058. unsigned int pic_size, max, bits;
  1059. pic_size = (sps->pic_width_in_mbs_minus1 + 1) *
  1060. (sps->pic_height_in_map_units_minus1 + 1);
  1061. max = (pic_size + pps->slice_group_change_rate_minus1) /
  1062. (pps->slice_group_change_rate_minus1 + 1);
  1063. bits = av_ceil_log2(max + 1);
  1064. u(bits, slice_group_change_cycle, 0, max);
  1065. }
  1066. if (pps->entropy_coding_mode_flag) {
  1067. while (byte_alignment(rw))
  1068. fixed(1, cabac_alignment_one_bit, 1);
  1069. }
  1070. return 0;
  1071. }
  1072. static int FUNC(filler)(CodedBitstreamContext *ctx, RWContext *rw,
  1073. H264RawFiller *current)
  1074. {
  1075. int err;
  1076. HEADER("Filler Data");
  1077. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  1078. 1 << H264_NAL_FILLER_DATA));
  1079. #ifdef READ
  1080. while (show_bits(rw, 8) == 0xff) {
  1081. fixed(8, ff_byte, 0xff);
  1082. ++current->filler_size;
  1083. }
  1084. #else
  1085. {
  1086. uint32_t i;
  1087. for (i = 0; i < current->filler_size; i++)
  1088. fixed(8, ff_byte, 0xff);
  1089. }
  1090. #endif
  1091. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  1092. return 0;
  1093. }
  1094. static int FUNC(end_of_sequence)(CodedBitstreamContext *ctx, RWContext *rw,
  1095. H264RawNALUnitHeader *current)
  1096. {
  1097. HEADER("End of Sequence");
  1098. return FUNC(nal_unit_header)(ctx, rw, current,
  1099. 1 << H264_NAL_END_SEQUENCE);
  1100. }
  1101. static int FUNC(end_of_stream)(CodedBitstreamContext *ctx, RWContext *rw,
  1102. H264RawNALUnitHeader *current)
  1103. {
  1104. HEADER("End of Stream");
  1105. return FUNC(nal_unit_header)(ctx, rw, current,
  1106. 1 << H264_NAL_END_STREAM);
  1107. }