You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1274 lines
42KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. static int FUNC(rbsp_trailing_bits)(CodedBitstreamContext *ctx, RWContext *rw)
  19. {
  20. int err;
  21. fixed(1, rbsp_stop_one_bit, 1);
  22. while (byte_alignment(rw) != 0)
  23. fixed(1, rbsp_alignment_zero_bit, 0);
  24. return 0;
  25. }
  26. static int FUNC(nal_unit_header)(CodedBitstreamContext *ctx, RWContext *rw,
  27. H264RawNALUnitHeader *current,
  28. uint32_t valid_type_mask)
  29. {
  30. int err;
  31. u(1, forbidden_zero_bit, 0, 0);
  32. u(2, nal_ref_idc, 0, 3);
  33. u(5, nal_unit_type, 0, 31);
  34. if (!(1 << current->nal_unit_type & valid_type_mask)) {
  35. av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid NAL unit type %d.\n",
  36. current->nal_unit_type);
  37. return AVERROR_INVALIDDATA;
  38. }
  39. if (current->nal_unit_type == 14 ||
  40. current->nal_unit_type == 20 ||
  41. current->nal_unit_type == 21) {
  42. if (current->nal_unit_type != 21)
  43. flag(svc_extension_flag);
  44. else
  45. flag(avc_3d_extension_flag);
  46. if (current->svc_extension_flag) {
  47. av_log(ctx->log_ctx, AV_LOG_ERROR, "SVC not supported.\n");
  48. return AVERROR_PATCHWELCOME;
  49. } else if (current->avc_3d_extension_flag) {
  50. av_log(ctx->log_ctx, AV_LOG_ERROR, "3DAVC not supported.\n");
  51. return AVERROR_PATCHWELCOME;
  52. } else {
  53. av_log(ctx->log_ctx, AV_LOG_ERROR, "MVC not supported.\n");
  54. return AVERROR_PATCHWELCOME;
  55. }
  56. }
  57. return 0;
  58. }
  59. static int FUNC(scaling_list)(CodedBitstreamContext *ctx, RWContext *rw,
  60. H264RawScalingList *current,
  61. int size_of_scaling_list)
  62. {
  63. int err, i, scale;
  64. scale = 8;
  65. for (i = 0; i < size_of_scaling_list; i++) {
  66. ses(delta_scale[i], -128, +127, 1, i);
  67. scale = (scale + current->delta_scale[i] + 256) % 256;
  68. if (scale == 0)
  69. break;
  70. }
  71. return 0;
  72. }
  73. static int FUNC(hrd_parameters)(CodedBitstreamContext *ctx, RWContext *rw,
  74. H264RawHRD *current)
  75. {
  76. int err, i;
  77. ue(cpb_cnt_minus1, 0, 31);
  78. u(4, bit_rate_scale, 0, 15);
  79. u(4, cpb_size_scale, 0, 15);
  80. for (i = 0; i <= current->cpb_cnt_minus1; i++) {
  81. ues(bit_rate_value_minus1[i], 0, UINT32_MAX - 1, 1, i);
  82. ues(cpb_size_value_minus1[i], 0, UINT32_MAX - 1, 1, i);
  83. flags(cbr_flag[i], 1, i);
  84. }
  85. u(5, initial_cpb_removal_delay_length_minus1, 0, 31);
  86. u(5, cpb_removal_delay_length_minus1, 0, 31);
  87. u(5, dpb_output_delay_length_minus1, 0, 31);
  88. u(5, time_offset_length, 0, 31);
  89. return 0;
  90. }
  91. static int FUNC(vui_parameters)(CodedBitstreamContext *ctx, RWContext *rw,
  92. H264RawVUI *current, H264RawSPS *sps)
  93. {
  94. int err;
  95. flag(aspect_ratio_info_present_flag);
  96. if (current->aspect_ratio_info_present_flag) {
  97. u(8, aspect_ratio_idc, 0, 255);
  98. if (current->aspect_ratio_idc == 255) {
  99. u(16, sar_width, 0, 65535);
  100. u(16, sar_height, 0, 65535);
  101. }
  102. } else {
  103. infer(aspect_ratio_idc, 0);
  104. }
  105. flag(overscan_info_present_flag);
  106. if (current->overscan_info_present_flag)
  107. flag(overscan_appropriate_flag);
  108. flag(video_signal_type_present_flag);
  109. if (current->video_signal_type_present_flag) {
  110. u(3, video_format, 0, 7);
  111. flag(video_full_range_flag);
  112. flag(colour_description_present_flag);
  113. if (current->colour_description_present_flag) {
  114. u(8, colour_primaries, 0, 255);
  115. u(8, transfer_characteristics, 0, 255);
  116. u(8, matrix_coefficients, 0, 255);
  117. }
  118. } else {
  119. infer(video_format, 5);
  120. infer(video_full_range_flag, 0);
  121. infer(colour_primaries, 2);
  122. infer(transfer_characteristics, 2);
  123. infer(matrix_coefficients, 2);
  124. }
  125. flag(chroma_loc_info_present_flag);
  126. if (current->chroma_loc_info_present_flag) {
  127. ue(chroma_sample_loc_type_top_field, 0, 5);
  128. ue(chroma_sample_loc_type_bottom_field, 0, 5);
  129. } else {
  130. infer(chroma_sample_loc_type_top_field, 0);
  131. infer(chroma_sample_loc_type_bottom_field, 0);
  132. }
  133. flag(timing_info_present_flag);
  134. if (current->timing_info_present_flag) {
  135. u(32, num_units_in_tick, 1, UINT32_MAX);
  136. u(32, time_scale, 1, UINT32_MAX);
  137. flag(fixed_frame_rate_flag);
  138. } else {
  139. infer(fixed_frame_rate_flag, 0);
  140. }
  141. flag(nal_hrd_parameters_present_flag);
  142. if (current->nal_hrd_parameters_present_flag)
  143. CHECK(FUNC(hrd_parameters)(ctx, rw, &current->nal_hrd_parameters));
  144. flag(vcl_hrd_parameters_present_flag);
  145. if (current->vcl_hrd_parameters_present_flag)
  146. CHECK(FUNC(hrd_parameters)(ctx, rw, &current->vcl_hrd_parameters));
  147. if (current->nal_hrd_parameters_present_flag ||
  148. current->vcl_hrd_parameters_present_flag)
  149. flag(low_delay_hrd_flag);
  150. else
  151. infer(low_delay_hrd_flag, 1 - current->fixed_frame_rate_flag);
  152. flag(pic_struct_present_flag);
  153. flag(bitstream_restriction_flag);
  154. if (current->bitstream_restriction_flag) {
  155. flag(motion_vectors_over_pic_boundaries_flag);
  156. ue(max_bytes_per_pic_denom, 0, 16);
  157. ue(max_bits_per_mb_denom, 0, 16);
  158. ue(log2_max_mv_length_horizontal, 0, 16);
  159. ue(log2_max_mv_length_vertical, 0, 16);
  160. ue(max_num_reorder_frames, 0, H264_MAX_DPB_FRAMES);
  161. ue(max_dec_frame_buffering, 0, H264_MAX_DPB_FRAMES);
  162. } else {
  163. infer(motion_vectors_over_pic_boundaries_flag, 1);
  164. infer(max_bytes_per_pic_denom, 2);
  165. infer(max_bits_per_mb_denom, 1);
  166. infer(log2_max_mv_length_horizontal, 16);
  167. infer(log2_max_mv_length_vertical, 16);
  168. if ((sps->profile_idc == 44 || sps->profile_idc == 86 ||
  169. sps->profile_idc == 110 || sps->profile_idc == 110 ||
  170. sps->profile_idc == 122 || sps->profile_idc == 244) &&
  171. sps->constraint_set3_flag) {
  172. infer(max_num_reorder_frames, 0);
  173. infer(max_dec_frame_buffering, 0);
  174. } else {
  175. infer(max_num_reorder_frames, H264_MAX_DPB_FRAMES);
  176. infer(max_dec_frame_buffering, H264_MAX_DPB_FRAMES);
  177. }
  178. }
  179. return 0;
  180. }
  181. static int FUNC(sps)(CodedBitstreamContext *ctx, RWContext *rw,
  182. H264RawSPS *current)
  183. {
  184. int err, i;
  185. HEADER("Sequence Parameter Set");
  186. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  187. 1 << H264_NAL_SPS));
  188. u(8, profile_idc, 0, 255);
  189. flag(constraint_set0_flag);
  190. flag(constraint_set1_flag);
  191. flag(constraint_set2_flag);
  192. flag(constraint_set3_flag);
  193. flag(constraint_set4_flag);
  194. flag(constraint_set5_flag);
  195. u(2, reserved_zero_2bits, 0, 0);
  196. u(8, level_idc, 0, 255);
  197. ue(seq_parameter_set_id, 0, 31);
  198. if (current->profile_idc == 100 || current->profile_idc == 110 ||
  199. current->profile_idc == 122 || current->profile_idc == 244 ||
  200. current->profile_idc == 44 || current->profile_idc == 83 ||
  201. current->profile_idc == 86 || current->profile_idc == 118 ||
  202. current->profile_idc == 128 || current->profile_idc == 138) {
  203. ue(chroma_format_idc, 0, 3);
  204. if (current->chroma_format_idc == 3)
  205. flag(separate_colour_plane_flag);
  206. else
  207. infer(separate_colour_plane_flag, 0);
  208. ue(bit_depth_luma_minus8, 0, 6);
  209. ue(bit_depth_chroma_minus8, 0, 6);
  210. flag(qpprime_y_zero_transform_bypass_flag);
  211. flag(seq_scaling_matrix_present_flag);
  212. if (current->seq_scaling_matrix_present_flag) {
  213. for (i = 0; i < ((current->chroma_format_idc != 3) ? 8 : 12); i++) {
  214. flags(seq_scaling_list_present_flag[i], 1, i);
  215. if (current->seq_scaling_list_present_flag[i]) {
  216. if (i < 6)
  217. CHECK(FUNC(scaling_list)(ctx, rw,
  218. &current->scaling_list_4x4[i],
  219. 16));
  220. else
  221. CHECK(FUNC(scaling_list)(ctx, rw,
  222. &current->scaling_list_8x8[i - 6],
  223. 64));
  224. }
  225. }
  226. }
  227. } else {
  228. infer(chroma_format_idc, current->profile_idc == 183 ? 0 : 1);
  229. infer(separate_colour_plane_flag, 0);
  230. infer(bit_depth_luma_minus8, 0);
  231. infer(bit_depth_chroma_minus8, 0);
  232. }
  233. ue(log2_max_frame_num_minus4, 0, 12);
  234. ue(pic_order_cnt_type, 0, 2);
  235. if (current->pic_order_cnt_type == 0) {
  236. ue(log2_max_pic_order_cnt_lsb_minus4, 0, 12);
  237. } else if (current->pic_order_cnt_type == 1) {
  238. flag(delta_pic_order_always_zero_flag);
  239. se(offset_for_non_ref_pic, INT32_MIN + 1, INT32_MAX);
  240. se(offset_for_top_to_bottom_field, INT32_MIN + 1, INT32_MAX);
  241. ue(num_ref_frames_in_pic_order_cnt_cycle, 0, 255);
  242. for (i = 0; i < current->num_ref_frames_in_pic_order_cnt_cycle; i++)
  243. ses(offset_for_ref_frame[i], INT32_MIN + 1, INT32_MAX, 1, i);
  244. }
  245. ue(max_num_ref_frames, 0, H264_MAX_DPB_FRAMES);
  246. flag(gaps_in_frame_num_allowed_flag);
  247. ue(pic_width_in_mbs_minus1, 0, H264_MAX_MB_WIDTH);
  248. ue(pic_height_in_map_units_minus1, 0, H264_MAX_MB_HEIGHT);
  249. flag(frame_mbs_only_flag);
  250. if (!current->frame_mbs_only_flag)
  251. flag(mb_adaptive_frame_field_flag);
  252. flag(direct_8x8_inference_flag);
  253. flag(frame_cropping_flag);
  254. if (current->frame_cropping_flag) {
  255. ue(frame_crop_left_offset, 0, H264_MAX_WIDTH);
  256. ue(frame_crop_right_offset, 0, H264_MAX_WIDTH);
  257. ue(frame_crop_top_offset, 0, H264_MAX_HEIGHT);
  258. ue(frame_crop_bottom_offset, 0, H264_MAX_HEIGHT);
  259. }
  260. flag(vui_parameters_present_flag);
  261. if (current->vui_parameters_present_flag)
  262. CHECK(FUNC(vui_parameters)(ctx, rw, &current->vui, current));
  263. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  264. return 0;
  265. }
  266. static int FUNC(sps_extension)(CodedBitstreamContext *ctx, RWContext *rw,
  267. H264RawSPSExtension *current)
  268. {
  269. int err;
  270. HEADER("Sequence Parameter Set Extension");
  271. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  272. 1 << H264_NAL_SPS_EXT));
  273. ue(seq_parameter_set_id, 0, 31);
  274. ue(aux_format_idc, 0, 3);
  275. if (current->aux_format_idc != 0) {
  276. int bits;
  277. ue(bit_depth_aux_minus8, 0, 4);
  278. flag(alpha_incr_flag);
  279. bits = current->bit_depth_aux_minus8 + 9;
  280. u(bits, alpha_opaque_value, 0, MAX_UINT_BITS(bits));
  281. u(bits, alpha_transparent_value, 0, MAX_UINT_BITS(bits));
  282. }
  283. flag(additional_extension_flag);
  284. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  285. return 0;
  286. }
  287. static int FUNC(pps)(CodedBitstreamContext *ctx, RWContext *rw,
  288. H264RawPPS *current)
  289. {
  290. CodedBitstreamH264Context *h264 = ctx->priv_data;
  291. const H264RawSPS *sps;
  292. int err, i;
  293. HEADER("Picture Parameter Set");
  294. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  295. 1 << H264_NAL_PPS));
  296. ue(pic_parameter_set_id, 0, 255);
  297. ue(seq_parameter_set_id, 0, 31);
  298. sps = h264->sps[current->seq_parameter_set_id];
  299. if (!sps) {
  300. av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n",
  301. current->seq_parameter_set_id);
  302. return AVERROR_INVALIDDATA;
  303. }
  304. flag(entropy_coding_mode_flag);
  305. flag(bottom_field_pic_order_in_frame_present_flag);
  306. ue(num_slice_groups_minus1, 0, 7);
  307. if (current->num_slice_groups_minus1 > 0) {
  308. unsigned int pic_size;
  309. int iGroup;
  310. pic_size = (sps->pic_width_in_mbs_minus1 + 1) *
  311. (sps->pic_height_in_map_units_minus1 + 1);
  312. ue(slice_group_map_type, 0, 6);
  313. if (current->slice_group_map_type == 0) {
  314. for (iGroup = 0; iGroup <= current->num_slice_groups_minus1; iGroup++)
  315. ues(run_length_minus1[iGroup], 0, pic_size - 1, 1, iGroup);
  316. } else if (current->slice_group_map_type == 2) {
  317. for (iGroup = 0; iGroup < current->num_slice_groups_minus1; iGroup++) {
  318. ues(top_left[iGroup], 0, pic_size - 1, 1, iGroup);
  319. ues(bottom_right[iGroup],
  320. current->top_left[iGroup], pic_size - 1, 1, iGroup);
  321. }
  322. } else if (current->slice_group_map_type == 3 ||
  323. current->slice_group_map_type == 4 ||
  324. current->slice_group_map_type == 5) {
  325. flag(slice_group_change_direction_flag);
  326. ue(slice_group_change_rate_minus1, 0, pic_size - 1);
  327. } else if (current->slice_group_map_type == 6) {
  328. ue(pic_size_in_map_units_minus1, pic_size - 1, pic_size - 1);
  329. allocate(current->slice_group_id,
  330. current->pic_size_in_map_units_minus1 + 1);
  331. for (i = 0; i <= current->pic_size_in_map_units_minus1; i++)
  332. us(av_log2(2 * current->num_slice_groups_minus1 + 1),
  333. slice_group_id[i], 0, current->num_slice_groups_minus1, 1, i);
  334. }
  335. }
  336. ue(num_ref_idx_l0_default_active_minus1, 0, 31);
  337. ue(num_ref_idx_l1_default_active_minus1, 0, 31);
  338. flag(weighted_pred_flag);
  339. u(2, weighted_bipred_idc, 0, 2);
  340. se(pic_init_qp_minus26, -26 - 6 * sps->bit_depth_luma_minus8, +25);
  341. se(pic_init_qs_minus26, -26, +25);
  342. se(chroma_qp_index_offset, -12, +12);
  343. flag(deblocking_filter_control_present_flag);
  344. flag(constrained_intra_pred_flag);
  345. flag(redundant_pic_cnt_present_flag);
  346. if (more_rbsp_data(current->more_rbsp_data))
  347. {
  348. flag(transform_8x8_mode_flag);
  349. flag(pic_scaling_matrix_present_flag);
  350. if (current->pic_scaling_matrix_present_flag) {
  351. for (i = 0; i < 6 + (((sps->chroma_format_idc != 3) ? 2 : 6) *
  352. current->transform_8x8_mode_flag); i++) {
  353. flags(pic_scaling_list_present_flag[i], 1, i);
  354. if (current->pic_scaling_list_present_flag[i]) {
  355. if (i < 6)
  356. CHECK(FUNC(scaling_list)(ctx, rw,
  357. &current->scaling_list_4x4[i],
  358. 16));
  359. else
  360. CHECK(FUNC(scaling_list)(ctx, rw,
  361. &current->scaling_list_8x8[i - 6],
  362. 64));
  363. }
  364. }
  365. }
  366. se(second_chroma_qp_index_offset, -12, +12);
  367. } else {
  368. infer(transform_8x8_mode_flag, 0);
  369. infer(pic_scaling_matrix_present_flag, 0);
  370. infer(second_chroma_qp_index_offset, current->chroma_qp_index_offset);
  371. }
  372. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  373. return 0;
  374. }
  375. static int FUNC(sei_buffering_period)(CodedBitstreamContext *ctx, RWContext *rw,
  376. H264RawSEIBufferingPeriod *current)
  377. {
  378. CodedBitstreamH264Context *h264 = ctx->priv_data;
  379. const H264RawSPS *sps;
  380. int err, i, length;
  381. ue(seq_parameter_set_id, 0, 31);
  382. sps = h264->sps[current->seq_parameter_set_id];
  383. if (!sps) {
  384. av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n",
  385. current->seq_parameter_set_id);
  386. return AVERROR_INVALIDDATA;
  387. }
  388. h264->active_sps = sps;
  389. if (sps->vui.nal_hrd_parameters_present_flag) {
  390. for (i = 0; i <= sps->vui.nal_hrd_parameters.cpb_cnt_minus1; i++) {
  391. length = sps->vui.nal_hrd_parameters.initial_cpb_removal_delay_length_minus1 + 1;
  392. xu(length, initial_cpb_removal_delay[SchedSelIdx],
  393. current->nal.initial_cpb_removal_delay[i],
  394. 1, MAX_UINT_BITS(length), 1, i);
  395. xu(length, initial_cpb_removal_delay_offset[SchedSelIdx],
  396. current->nal.initial_cpb_removal_delay_offset[i],
  397. 0, MAX_UINT_BITS(length), 1, i);
  398. }
  399. }
  400. if (sps->vui.vcl_hrd_parameters_present_flag) {
  401. for (i = 0; i <= sps->vui.vcl_hrd_parameters.cpb_cnt_minus1; i++) {
  402. length = sps->vui.vcl_hrd_parameters.initial_cpb_removal_delay_length_minus1 + 1;
  403. xu(length, initial_cpb_removal_delay[SchedSelIdx],
  404. current->vcl.initial_cpb_removal_delay[i],
  405. 1, MAX_UINT_BITS(length), 1, i);
  406. xu(length, initial_cpb_removal_delay_offset[SchedSelIdx],
  407. current->vcl.initial_cpb_removal_delay_offset[i],
  408. 0, MAX_UINT_BITS(length), 1, i);
  409. }
  410. }
  411. return 0;
  412. }
  413. static int FUNC(sei_pic_timestamp)(CodedBitstreamContext *ctx, RWContext *rw,
  414. H264RawSEIPicTimestamp *current)
  415. {
  416. CodedBitstreamH264Context *h264 = ctx->priv_data;
  417. const H264RawSPS *sps;
  418. uint8_t time_offset_length;
  419. int err;
  420. u(2, ct_type, 0, 2);
  421. flag(nuit_field_based_flag);
  422. u(5, counting_type, 0, 6);
  423. flag(full_timestamp_flag);
  424. flag(discontinuity_flag);
  425. flag(cnt_dropped_flag);
  426. u(8, n_frames, 0, 255);
  427. if (current->full_timestamp_flag) {
  428. u(6, seconds_value, 0, 59);
  429. u(6, minutes_value, 0, 59);
  430. u(5, hours_value, 0, 23);
  431. } else {
  432. flag(seconds_flag);
  433. if (current->seconds_flag) {
  434. u(6, seconds_value, 0, 59);
  435. flag(minutes_flag);
  436. if (current->minutes_flag) {
  437. u(6, minutes_value, 0, 59);
  438. flag(hours_flag);
  439. if (current->hours_flag)
  440. u(5, hours_value, 0, 23);
  441. }
  442. }
  443. }
  444. sps = h264->active_sps;
  445. if (sps->vui.nal_hrd_parameters_present_flag)
  446. time_offset_length = sps->vui.nal_hrd_parameters.time_offset_length;
  447. else if (sps->vui.vcl_hrd_parameters_present_flag)
  448. time_offset_length = sps->vui.vcl_hrd_parameters.time_offset_length;
  449. else
  450. time_offset_length = 24;
  451. if (time_offset_length > 0)
  452. u(time_offset_length, time_offset,
  453. 0, MAX_UINT_BITS(time_offset_length));
  454. else
  455. infer(time_offset, 0);
  456. return 0;
  457. }
  458. static int FUNC(sei_pic_timing)(CodedBitstreamContext *ctx, RWContext *rw,
  459. H264RawSEIPicTiming *current)
  460. {
  461. CodedBitstreamH264Context *h264 = ctx->priv_data;
  462. const H264RawSPS *sps;
  463. int err;
  464. sps = h264->active_sps;
  465. if (!sps) {
  466. // If there is exactly one possible SPS but it is not yet active
  467. // then just assume that it should be the active one.
  468. int i, k = -1;
  469. for (i = 0; i < H264_MAX_SPS_COUNT; i++) {
  470. if (h264->sps[i]) {
  471. if (k >= 0) {
  472. k = -1;
  473. break;
  474. }
  475. k = i;
  476. }
  477. }
  478. if (k >= 0)
  479. sps = h264->sps[k];
  480. }
  481. if (!sps) {
  482. av_log(ctx->log_ctx, AV_LOG_ERROR,
  483. "No active SPS for pic_timing.\n");
  484. return AVERROR_INVALIDDATA;
  485. }
  486. if (sps->vui.nal_hrd_parameters_present_flag ||
  487. sps->vui.vcl_hrd_parameters_present_flag) {
  488. const H264RawHRD *hrd;
  489. if (sps->vui.nal_hrd_parameters_present_flag)
  490. hrd = &sps->vui.nal_hrd_parameters;
  491. else if (sps->vui.vcl_hrd_parameters_present_flag)
  492. hrd = &sps->vui.vcl_hrd_parameters;
  493. else {
  494. av_log(ctx->log_ctx, AV_LOG_ERROR,
  495. "No HRD parameters for pic_timing.\n");
  496. return AVERROR_INVALIDDATA;
  497. }
  498. u(hrd->cpb_removal_delay_length_minus1 + 1, cpb_removal_delay,
  499. 0, MAX_UINT_BITS(hrd->cpb_removal_delay_length_minus1 + 1));
  500. u(hrd->dpb_output_delay_length_minus1 + 1, dpb_output_delay,
  501. 0, MAX_UINT_BITS(hrd->dpb_output_delay_length_minus1 + 1));
  502. }
  503. if (sps->vui.pic_struct_present_flag) {
  504. static const int num_clock_ts[9] = {
  505. 1, 1, 1, 2, 2, 3, 3, 2, 3
  506. };
  507. int i;
  508. u(4, pic_struct, 0, 8);
  509. if (current->pic_struct > 8)
  510. return AVERROR_INVALIDDATA;
  511. for (i = 0; i < num_clock_ts[current->pic_struct]; i++) {
  512. flags(clock_timestamp_flag[i], 1, i);
  513. if (current->clock_timestamp_flag[i])
  514. CHECK(FUNC(sei_pic_timestamp)(ctx, rw, &current->timestamp[i]));
  515. }
  516. }
  517. return 0;
  518. }
  519. static int FUNC(sei_user_data_registered)(CodedBitstreamContext *ctx, RWContext *rw,
  520. H264RawSEIUserDataRegistered *current,
  521. uint32_t *payload_size)
  522. {
  523. int err, i, j;
  524. u(8, itu_t_t35_country_code, 0x00, 0xff);
  525. if (current->itu_t_t35_country_code != 0xff)
  526. i = 1;
  527. else {
  528. u(8, itu_t_t35_country_code_extension_byte, 0x00, 0xff);
  529. i = 2;
  530. }
  531. #ifdef READ
  532. if (*payload_size < i) {
  533. av_log(ctx->log_ctx, AV_LOG_ERROR,
  534. "Invalid SEI user data registered payload.\n");
  535. return AVERROR_INVALIDDATA;
  536. }
  537. current->data_length = *payload_size - i;
  538. #else
  539. *payload_size = i + current->data_length;
  540. #endif
  541. allocate(current->data, current->data_length);
  542. for (j = 0; j < current->data_length; j++)
  543. xu(8, itu_t_t35_payload_byte[i], current->data[j], 0x00, 0xff, 1, i + j);
  544. return 0;
  545. }
  546. static int FUNC(sei_user_data_unregistered)(CodedBitstreamContext *ctx, RWContext *rw,
  547. H264RawSEIUserDataUnregistered *current,
  548. uint32_t *payload_size)
  549. {
  550. int err, i;
  551. #ifdef READ
  552. if (*payload_size < 16) {
  553. av_log(ctx->log_ctx, AV_LOG_ERROR,
  554. "Invalid SEI user data unregistered payload.\n");
  555. return AVERROR_INVALIDDATA;
  556. }
  557. current->data_length = *payload_size - 16;
  558. #else
  559. *payload_size = 16 + current->data_length;
  560. #endif
  561. for (i = 0; i < 16; i++)
  562. us(8, uuid_iso_iec_11578[i], 0x00, 0xff, 1, i);
  563. allocate(current->data, current->data_length);
  564. for (i = 0; i < current->data_length; i++)
  565. xu(8, user_data_payload_byte[i], current->data[i], 0x00, 0xff, 1, i);
  566. return 0;
  567. }
  568. static int FUNC(sei_recovery_point)(CodedBitstreamContext *ctx, RWContext *rw,
  569. H264RawSEIRecoveryPoint *current)
  570. {
  571. int err;
  572. ue(recovery_frame_cnt, 0, 65535);
  573. flag(exact_match_flag);
  574. flag(broken_link_flag);
  575. u(2, changing_slice_group_idc, 0, 2);
  576. return 0;
  577. }
  578. static int FUNC(sei_display_orientation)(CodedBitstreamContext *ctx, RWContext *rw,
  579. H264RawSEIDisplayOrientation *current)
  580. {
  581. int err;
  582. flag(display_orientation_cancel_flag);
  583. if (!current->display_orientation_cancel_flag) {
  584. flag(hor_flip);
  585. flag(ver_flip);
  586. u(16, anticlockwise_rotation, 0, 65535);
  587. ue(display_orientation_repetition_period, 0, 16384);
  588. flag(display_orientation_extension_flag);
  589. }
  590. return 0;
  591. }
  592. static int FUNC(sei_payload)(CodedBitstreamContext *ctx, RWContext *rw,
  593. H264RawSEIPayload *current)
  594. {
  595. int err, i;
  596. int start_position, end_position;
  597. #ifdef READ
  598. start_position = get_bits_count(rw);
  599. #else
  600. start_position = put_bits_count(rw);
  601. #endif
  602. switch (current->payload_type) {
  603. case H264_SEI_TYPE_BUFFERING_PERIOD:
  604. CHECK(FUNC(sei_buffering_period)
  605. (ctx, rw, &current->payload.buffering_period));
  606. break;
  607. case H264_SEI_TYPE_PIC_TIMING:
  608. CHECK(FUNC(sei_pic_timing)
  609. (ctx, rw, &current->payload.pic_timing));
  610. break;
  611. case H264_SEI_TYPE_FILLER_PAYLOAD:
  612. {
  613. for (i = 0; i < current->payload_size; i++)
  614. fixed(8, ff_byte, 0xff);
  615. }
  616. break;
  617. case H264_SEI_TYPE_USER_DATA_REGISTERED:
  618. CHECK(FUNC(sei_user_data_registered)
  619. (ctx, rw, &current->payload.user_data_registered, &current->payload_size));
  620. break;
  621. case H264_SEI_TYPE_USER_DATA_UNREGISTERED:
  622. CHECK(FUNC(sei_user_data_unregistered)
  623. (ctx, rw, &current->payload.user_data_unregistered, &current->payload_size));
  624. break;
  625. case H264_SEI_TYPE_RECOVERY_POINT:
  626. CHECK(FUNC(sei_recovery_point)
  627. (ctx, rw, &current->payload.recovery_point));
  628. break;
  629. case H264_SEI_TYPE_DISPLAY_ORIENTATION:
  630. CHECK(FUNC(sei_display_orientation)
  631. (ctx, rw, &current->payload.display_orientation));
  632. break;
  633. default:
  634. {
  635. allocate(current->payload.other.data, current->payload_size);
  636. for (i = 0; i < current->payload_size; i++)
  637. xu(8, payload_byte[i], current->payload.other.data[i], 0, 255, 1, i);
  638. }
  639. }
  640. if (byte_alignment(rw)) {
  641. fixed(1, bit_equal_to_one, 1);
  642. while (byte_alignment(rw))
  643. fixed(1, bit_equal_to_zero, 0);
  644. }
  645. #ifdef READ
  646. end_position = get_bits_count(rw);
  647. if (end_position < start_position + 8 * current->payload_size) {
  648. av_log(ctx->log_ctx, AV_LOG_ERROR, "Incorrect SEI payload length: "
  649. "header %"PRIu32" bits, actually %d bits.\n",
  650. 8 * current->payload_size,
  651. end_position - start_position);
  652. return AVERROR_INVALIDDATA;
  653. }
  654. #else
  655. end_position = put_bits_count(rw);
  656. current->payload_size = (end_position - start_position) / 8;
  657. #endif
  658. return 0;
  659. }
  660. static int FUNC(sei)(CodedBitstreamContext *ctx, RWContext *rw,
  661. H264RawSEI *current)
  662. {
  663. int err, k;
  664. HEADER("Supplemental Enhancement Information");
  665. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  666. 1 << H264_NAL_SEI));
  667. #ifdef READ
  668. for (k = 0; k < H264_MAX_SEI_PAYLOADS; k++) {
  669. uint32_t payload_type = 0;
  670. uint32_t payload_size = 0;
  671. uint32_t tmp;
  672. while (show_bits(rw, 8) == 0xff) {
  673. fixed(8, ff_byte, 0xff);
  674. payload_type += 255;
  675. }
  676. xu(8, last_payload_type_byte, tmp, 0, 254, 0);
  677. payload_type += tmp;
  678. while (show_bits(rw, 8) == 0xff) {
  679. fixed(8, ff_byte, 0xff);
  680. payload_size += 255;
  681. }
  682. xu(8, last_payload_size_byte, tmp, 0, 254, 0);
  683. payload_size += tmp;
  684. current->payload[k].payload_type = payload_type;
  685. current->payload[k].payload_size = payload_size;
  686. CHECK(FUNC(sei_payload)(ctx, rw, &current->payload[k]));
  687. if (!cbs_h2645_read_more_rbsp_data(rw))
  688. break;
  689. }
  690. if (k >= H264_MAX_SEI_PAYLOADS) {
  691. av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many payloads in "
  692. "SEI message: found %d.\n", k);
  693. return AVERROR_INVALIDDATA;
  694. }
  695. current->payload_count = k + 1;
  696. #else
  697. for (k = 0; k < current->payload_count; k++) {
  698. PutBitContext start_state;
  699. uint32_t tmp;
  700. int need_size, i;
  701. // Somewhat clumsy: we write the payload twice when
  702. // we don't know the size in advance. This will mess
  703. // with trace output, but is otherwise harmless.
  704. start_state = *rw;
  705. need_size = !current->payload[k].payload_size;
  706. for (i = 0; i < 1 + need_size; i++) {
  707. *rw = start_state;
  708. tmp = current->payload[k].payload_type;
  709. while (tmp >= 255) {
  710. fixed(8, ff_byte, 0xff);
  711. tmp -= 255;
  712. }
  713. xu(8, last_payload_type_byte, tmp, 0, 254, 0);
  714. tmp = current->payload[k].payload_size;
  715. while (tmp >= 255) {
  716. fixed(8, ff_byte, 0xff);
  717. tmp -= 255;
  718. }
  719. xu(8, last_payload_size_byte, tmp, 0, 254, 0);
  720. CHECK(FUNC(sei_payload)(ctx, rw, &current->payload[k]));
  721. }
  722. }
  723. #endif
  724. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  725. return 0;
  726. }
  727. static int FUNC(aud)(CodedBitstreamContext *ctx, RWContext *rw,
  728. H264RawAUD *current)
  729. {
  730. int err;
  731. HEADER("Access Unit Delimiter");
  732. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  733. 1 << H264_NAL_AUD));
  734. u(3, primary_pic_type, 0, 7);
  735. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  736. return 0;
  737. }
  738. static int FUNC(ref_pic_list_modification)(CodedBitstreamContext *ctx, RWContext *rw,
  739. H264RawSliceHeader *current)
  740. {
  741. CodedBitstreamH264Context *h264 = ctx->priv_data;
  742. const H264RawSPS *sps = h264->active_sps;
  743. int err, i, mopn;
  744. if (current->slice_type % 5 != 2 &&
  745. current->slice_type % 5 != 4) {
  746. flag(ref_pic_list_modification_flag_l0);
  747. if (current->ref_pic_list_modification_flag_l0) {
  748. for (i = 0; i < H264_MAX_RPLM_COUNT; i++) {
  749. xue(modification_of_pic_nums_idc,
  750. current->rplm_l0[i].modification_of_pic_nums_idc, 0, 3, 0);
  751. mopn = current->rplm_l0[i].modification_of_pic_nums_idc;
  752. if (mopn == 3)
  753. break;
  754. if (mopn == 0 || mopn == 1)
  755. xue(abs_diff_pic_num_minus1,
  756. current->rplm_l0[i].abs_diff_pic_num_minus1,
  757. 0, (1 + current->field_pic_flag) *
  758. (1 << (sps->log2_max_frame_num_minus4 + 4)), 0);
  759. else if (mopn == 2)
  760. xue(long_term_pic_num,
  761. current->rplm_l0[i].long_term_pic_num,
  762. 0, sps->max_num_ref_frames - 1, 0);
  763. }
  764. }
  765. }
  766. if (current->slice_type % 5 == 1) {
  767. flag(ref_pic_list_modification_flag_l1);
  768. if (current->ref_pic_list_modification_flag_l1) {
  769. for (i = 0; i < H264_MAX_RPLM_COUNT; i++) {
  770. xue(modification_of_pic_nums_idc,
  771. current->rplm_l1[i].modification_of_pic_nums_idc, 0, 3, 0);
  772. mopn = current->rplm_l1[i].modification_of_pic_nums_idc;
  773. if (mopn == 3)
  774. break;
  775. if (mopn == 0 || mopn == 1)
  776. xue(abs_diff_pic_num_minus1,
  777. current->rplm_l1[i].abs_diff_pic_num_minus1,
  778. 0, (1 + current->field_pic_flag) *
  779. (1 << (sps->log2_max_frame_num_minus4 + 4)), 0);
  780. else if (mopn == 2)
  781. xue(long_term_pic_num,
  782. current->rplm_l1[i].long_term_pic_num,
  783. 0, sps->max_num_ref_frames - 1, 0);
  784. }
  785. }
  786. }
  787. return 0;
  788. }
  789. static int FUNC(pred_weight_table)(CodedBitstreamContext *ctx, RWContext *rw,
  790. H264RawSliceHeader *current)
  791. {
  792. CodedBitstreamH264Context *h264 = ctx->priv_data;
  793. const H264RawSPS *sps = h264->active_sps;
  794. int chroma;
  795. int err, i, j;
  796. ue(luma_log2_weight_denom, 0, 7);
  797. chroma = !sps->separate_colour_plane_flag && sps->chroma_format_idc != 0;
  798. if (chroma)
  799. ue(chroma_log2_weight_denom, 0, 7);
  800. for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) {
  801. flags(luma_weight_l0_flag[i], 1, i);
  802. if (current->luma_weight_l0_flag[i]) {
  803. ses(luma_weight_l0[i], -128, +127, 1, i);
  804. ses(luma_offset_l0[i], -128, +127, 1, i);
  805. }
  806. if (chroma) {
  807. flags(chroma_weight_l0_flag[i], 1, i);
  808. if (current->chroma_weight_l0_flag[i]) {
  809. for (j = 0; j < 2; j++) {
  810. ses(chroma_weight_l0[i][j], -128, +127, 2, i, j);
  811. ses(chroma_offset_l0[i][j], -128, +127, 2, i, j);
  812. }
  813. }
  814. }
  815. }
  816. if (current->slice_type % 5 == 1) {
  817. for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) {
  818. flags(luma_weight_l1_flag[i], 1, i);
  819. if (current->luma_weight_l1_flag[i]) {
  820. ses(luma_weight_l1[i], -128, +127, 1, i);
  821. ses(luma_offset_l1[i], -128, +127, 1, i);
  822. }
  823. if (chroma) {
  824. flags(chroma_weight_l1_flag[i], 1, i);
  825. if (current->chroma_weight_l1_flag[i]) {
  826. for (j = 0; j < 2; j++) {
  827. ses(chroma_weight_l1[i][j], -128, +127, 2, i, j);
  828. ses(chroma_offset_l1[i][j], -128, +127, 2, i, j);
  829. }
  830. }
  831. }
  832. }
  833. }
  834. return 0;
  835. }
  836. static int FUNC(dec_ref_pic_marking)(CodedBitstreamContext *ctx, RWContext *rw,
  837. H264RawSliceHeader *current, int idr_pic_flag)
  838. {
  839. CodedBitstreamH264Context *h264 = ctx->priv_data;
  840. const H264RawSPS *sps = h264->active_sps;
  841. int err, i;
  842. uint32_t mmco;
  843. if (idr_pic_flag) {
  844. flag(no_output_of_prior_pics_flag);
  845. flag(long_term_reference_flag);
  846. } else {
  847. flag(adaptive_ref_pic_marking_mode_flag);
  848. if (current->adaptive_ref_pic_marking_mode_flag) {
  849. for (i = 0; i < H264_MAX_MMCO_COUNT; i++) {
  850. xue(memory_management_control_operation,
  851. current->mmco[i].memory_management_control_operation,
  852. 0, 6, 0);
  853. mmco = current->mmco[i].memory_management_control_operation;
  854. if (mmco == 0)
  855. break;
  856. if (mmco == 1 || mmco == 3)
  857. xue(difference_of_pic_nums_minus1,
  858. current->mmco[i].difference_of_pic_nums_minus1,
  859. 0, INT32_MAX, 0);
  860. if (mmco == 2)
  861. xue(long_term_pic_num,
  862. current->mmco[i].long_term_pic_num,
  863. 0, sps->max_num_ref_frames - 1, 0);
  864. if (mmco == 3 || mmco == 6)
  865. xue(long_term_frame_idx,
  866. current->mmco[i].long_term_frame_idx,
  867. 0, sps->max_num_ref_frames - 1, 0);
  868. if (mmco == 4)
  869. xue(max_long_term_frame_idx_plus1,
  870. current->mmco[i].max_long_term_frame_idx_plus1,
  871. 0, sps->max_num_ref_frames, 0);
  872. }
  873. if (i == H264_MAX_MMCO_COUNT) {
  874. av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many "
  875. "memory management control operations.\n");
  876. return AVERROR_INVALIDDATA;
  877. }
  878. }
  879. }
  880. return 0;
  881. }
  882. static int FUNC(slice_header)(CodedBitstreamContext *ctx, RWContext *rw,
  883. H264RawSliceHeader *current)
  884. {
  885. CodedBitstreamH264Context *h264 = ctx->priv_data;
  886. const H264RawSPS *sps;
  887. const H264RawPPS *pps;
  888. int err;
  889. int idr_pic_flag;
  890. int slice_type_i, slice_type_p, slice_type_b;
  891. int slice_type_si, slice_type_sp;
  892. HEADER("Slice Header");
  893. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  894. 1 << H264_NAL_SLICE |
  895. 1 << H264_NAL_IDR_SLICE |
  896. 1 << H264_NAL_AUXILIARY_SLICE));
  897. if (current->nal_unit_header.nal_unit_type == H264_NAL_AUXILIARY_SLICE) {
  898. if (!h264->last_slice_nal_unit_type) {
  899. av_log(ctx->log_ctx, AV_LOG_ERROR, "Auxiliary slice "
  900. "is not decodable without the main picture "
  901. "in the same access unit.\n");
  902. return AVERROR_INVALIDDATA;
  903. }
  904. } else {
  905. h264->last_slice_nal_unit_type =
  906. current->nal_unit_header.nal_unit_type;
  907. }
  908. idr_pic_flag = h264->last_slice_nal_unit_type == H264_NAL_IDR_SLICE;
  909. ue(first_mb_in_slice, 0, H264_MAX_MB_PIC_SIZE - 1);
  910. ue(slice_type, 0, 9);
  911. slice_type_i = current->slice_type % 5 == 2;
  912. slice_type_p = current->slice_type % 5 == 0;
  913. slice_type_b = current->slice_type % 5 == 1;
  914. slice_type_si = current->slice_type % 5 == 4;
  915. slice_type_sp = current->slice_type % 5 == 3;
  916. if (idr_pic_flag && !(slice_type_i || slice_type_si)) {
  917. av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid slice type %d "
  918. "for IDR picture.\n", current->slice_type);
  919. return AVERROR_INVALIDDATA;
  920. }
  921. ue(pic_parameter_set_id, 0, 255);
  922. pps = h264->pps[current->pic_parameter_set_id];
  923. if (!pps) {
  924. av_log(ctx->log_ctx, AV_LOG_ERROR, "PPS id %d not available.\n",
  925. current->pic_parameter_set_id);
  926. return AVERROR_INVALIDDATA;
  927. }
  928. h264->active_pps = pps;
  929. sps = h264->sps[pps->seq_parameter_set_id];
  930. if (!sps) {
  931. av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n",
  932. pps->seq_parameter_set_id);
  933. return AVERROR_INVALIDDATA;
  934. }
  935. h264->active_sps = sps;
  936. if (sps->separate_colour_plane_flag)
  937. u(2, colour_plane_id, 0, 2);
  938. u(sps->log2_max_frame_num_minus4 + 4, frame_num,
  939. 0, MAX_UINT_BITS(sps->log2_max_frame_num_minus4 + 4));
  940. if (!sps->frame_mbs_only_flag) {
  941. flag(field_pic_flag);
  942. if (current->field_pic_flag)
  943. flag(bottom_field_flag);
  944. else
  945. infer(bottom_field_flag, 0);
  946. } else {
  947. infer(field_pic_flag, 0);
  948. infer(bottom_field_flag, 0);
  949. }
  950. if (idr_pic_flag)
  951. ue(idr_pic_id, 0, 65535);
  952. if (sps->pic_order_cnt_type == 0) {
  953. u(sps->log2_max_pic_order_cnt_lsb_minus4 + 4, pic_order_cnt_lsb,
  954. 0, MAX_UINT_BITS(sps->log2_max_pic_order_cnt_lsb_minus4 + 4));
  955. if (pps->bottom_field_pic_order_in_frame_present_flag &&
  956. !current->field_pic_flag)
  957. se(delta_pic_order_cnt_bottom, INT32_MIN + 1, INT32_MAX);
  958. } else if (sps->pic_order_cnt_type == 1) {
  959. if (!sps->delta_pic_order_always_zero_flag) {
  960. se(delta_pic_order_cnt[0], INT32_MIN + 1, INT32_MAX);
  961. if (pps->bottom_field_pic_order_in_frame_present_flag &&
  962. !current->field_pic_flag)
  963. se(delta_pic_order_cnt[1], INT32_MIN + 1, INT32_MAX);
  964. else
  965. infer(delta_pic_order_cnt[1], 0);
  966. } else {
  967. infer(delta_pic_order_cnt[0], 0);
  968. infer(delta_pic_order_cnt[1], 0);
  969. }
  970. }
  971. if (pps->redundant_pic_cnt_present_flag)
  972. ue(redundant_pic_cnt, 0, 127);
  973. if (slice_type_b)
  974. flag(direct_spatial_mv_pred_flag);
  975. if (slice_type_p || slice_type_sp || slice_type_b) {
  976. flag(num_ref_idx_active_override_flag);
  977. if (current->num_ref_idx_active_override_flag) {
  978. ue(num_ref_idx_l0_active_minus1, 0, 31);
  979. if (slice_type_b)
  980. ue(num_ref_idx_l1_active_minus1, 0, 31);
  981. } else {
  982. infer(num_ref_idx_l0_active_minus1,
  983. pps->num_ref_idx_l0_default_active_minus1);
  984. infer(num_ref_idx_l1_active_minus1,
  985. pps->num_ref_idx_l1_default_active_minus1);
  986. }
  987. }
  988. if (current->nal_unit_header.nal_unit_type == 20 ||
  989. current->nal_unit_header.nal_unit_type == 21) {
  990. av_log(ctx->log_ctx, AV_LOG_ERROR, "MVC / 3DAVC not supported.\n");
  991. return AVERROR_PATCHWELCOME;
  992. } else {
  993. CHECK(FUNC(ref_pic_list_modification)(ctx, rw, current));
  994. }
  995. if ((pps->weighted_pred_flag && (slice_type_p || slice_type_sp)) ||
  996. (pps->weighted_bipred_idc == 1 && slice_type_b)) {
  997. CHECK(FUNC(pred_weight_table)(ctx, rw, current));
  998. }
  999. if (current->nal_unit_header.nal_ref_idc != 0) {
  1000. CHECK(FUNC(dec_ref_pic_marking)(ctx, rw, current, idr_pic_flag));
  1001. }
  1002. if (pps->entropy_coding_mode_flag &&
  1003. !slice_type_i && !slice_type_si) {
  1004. ue(cabac_init_idc, 0, 2);
  1005. }
  1006. se(slice_qp_delta, - 51 - 6 * sps->bit_depth_luma_minus8,
  1007. + 51 + 6 * sps->bit_depth_luma_minus8);
  1008. if (slice_type_sp || slice_type_si) {
  1009. if (slice_type_sp)
  1010. flag(sp_for_switch_flag);
  1011. se(slice_qs_delta, -51, +51);
  1012. }
  1013. if (pps->deblocking_filter_control_present_flag) {
  1014. ue(disable_deblocking_filter_idc, 0, 2);
  1015. if (current->disable_deblocking_filter_idc != 1) {
  1016. se(slice_alpha_c0_offset_div2, -6, +6);
  1017. se(slice_beta_offset_div2, -6, +6);
  1018. } else {
  1019. infer(slice_alpha_c0_offset_div2, 0);
  1020. infer(slice_beta_offset_div2, 0);
  1021. }
  1022. } else {
  1023. infer(disable_deblocking_filter_idc, 0);
  1024. infer(slice_alpha_c0_offset_div2, 0);
  1025. infer(slice_beta_offset_div2, 0);
  1026. }
  1027. if (pps->num_slice_groups_minus1 > 0 &&
  1028. pps->slice_group_map_type >= 3 &&
  1029. pps->slice_group_map_type <= 5) {
  1030. unsigned int pic_size, max, bits;
  1031. pic_size = (sps->pic_width_in_mbs_minus1 + 1) *
  1032. (sps->pic_height_in_map_units_minus1 + 1);
  1033. max = (pic_size + pps->slice_group_change_rate_minus1) /
  1034. (pps->slice_group_change_rate_minus1 + 1);
  1035. bits = av_log2(2 * max - 1);
  1036. u(bits, slice_group_change_cycle, 0, max);
  1037. }
  1038. if (pps->entropy_coding_mode_flag) {
  1039. while (byte_alignment(rw))
  1040. fixed(1, cabac_alignment_one_bit, 1);
  1041. }
  1042. return 0;
  1043. }
  1044. static int FUNC(filler)(CodedBitstreamContext *ctx, RWContext *rw,
  1045. H264RawFiller *current)
  1046. {
  1047. int err;
  1048. HEADER("Filler Data");
  1049. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  1050. 1 << H264_NAL_FILLER_DATA));
  1051. #ifdef READ
  1052. while (show_bits(rw, 8) == 0xff) {
  1053. fixed(8, ff_byte, 0xff);
  1054. ++current->filler_size;
  1055. }
  1056. #else
  1057. {
  1058. uint32_t i;
  1059. for (i = 0; i < current->filler_size; i++)
  1060. fixed(8, ff_byte, 0xff);
  1061. }
  1062. #endif
  1063. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  1064. return 0;
  1065. }