You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1247 lines
41KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. static int FUNC(rbsp_trailing_bits)(CodedBitstreamContext *ctx, RWContext *rw)
  19. {
  20. int err;
  21. av_unused int one = 1, zero = 0;
  22. xu(1, rbsp_stop_one_bit, one, 1, 1);
  23. while (byte_alignment(rw) != 0)
  24. xu(1, rbsp_alignment_zero_bit, zero, 0, 0);
  25. return 0;
  26. }
  27. static int FUNC(nal_unit_header)(CodedBitstreamContext *ctx, RWContext *rw,
  28. H264RawNALUnitHeader *current,
  29. uint32_t valid_type_mask)
  30. {
  31. int err;
  32. u(1, forbidden_zero_bit, 0, 0);
  33. u(2, nal_ref_idc, 0, 3);
  34. u(5, nal_unit_type, 0, 31);
  35. if (!(1 << current->nal_unit_type & valid_type_mask)) {
  36. av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid NAL unit type %d.\n",
  37. current->nal_unit_type);
  38. return AVERROR_INVALIDDATA;
  39. }
  40. if (current->nal_unit_type == 14 ||
  41. current->nal_unit_type == 20 ||
  42. current->nal_unit_type == 21) {
  43. if (current->nal_unit_type != 21)
  44. flag(svc_extension_flag);
  45. else
  46. flag(avc_3d_extension_flag);
  47. if (current->svc_extension_flag) {
  48. av_log(ctx->log_ctx, AV_LOG_ERROR, "SVC not supported.\n");
  49. return AVERROR_PATCHWELCOME;
  50. } else if (current->avc_3d_extension_flag) {
  51. av_log(ctx->log_ctx, AV_LOG_ERROR, "3DAVC not supported.\n");
  52. return AVERROR_PATCHWELCOME;
  53. } else {
  54. av_log(ctx->log_ctx, AV_LOG_ERROR, "MVC not supported.\n");
  55. return AVERROR_PATCHWELCOME;
  56. }
  57. }
  58. return 0;
  59. }
  60. static int FUNC(scaling_list)(CodedBitstreamContext *ctx, RWContext *rw,
  61. H264RawScalingList *current,
  62. int size_of_scaling_list)
  63. {
  64. int err, i, scale;
  65. scale = 8;
  66. for (i = 0; i < size_of_scaling_list; i++) {
  67. xse(delta_scale, current->delta_scale[i], -128, +127);
  68. scale = (scale + current->delta_scale[i] + 256) % 256;
  69. if (scale == 0)
  70. break;
  71. }
  72. return 0;
  73. }
  74. static int FUNC(hrd_parameters)(CodedBitstreamContext *ctx, RWContext *rw,
  75. H264RawHRD *current)
  76. {
  77. int err, i;
  78. ue(cpb_cnt_minus1, 0, 31);
  79. u(4, bit_rate_scale, 0, 15);
  80. u(4, cpb_size_scale, 0, 15);
  81. for (i = 0; i <= current->cpb_cnt_minus1; i++) {
  82. ue(bit_rate_value_minus1[i], 0, UINT32_MAX - 1);
  83. ue(cpb_size_value_minus1[i], 0, UINT32_MAX - 1);
  84. flag(cbr_flag[i]);
  85. }
  86. u(5, initial_cpb_removal_delay_length_minus1, 0, 31);
  87. u(5, cpb_removal_delay_length_minus1, 0, 31);
  88. u(5, dpb_output_delay_length_minus1, 0, 31);
  89. u(5, time_offset_length, 0, 31);
  90. return 0;
  91. }
  92. static int FUNC(vui_parameters)(CodedBitstreamContext *ctx, RWContext *rw,
  93. H264RawVUI *current, H264RawSPS *sps)
  94. {
  95. int err;
  96. flag(aspect_ratio_info_present_flag);
  97. if (current->aspect_ratio_info_present_flag) {
  98. u(8, aspect_ratio_idc, 0, 255);
  99. if (current->aspect_ratio_idc == 255) {
  100. u(16, sar_width, 0, 65535);
  101. u(16, sar_height, 0, 65535);
  102. }
  103. } else {
  104. infer(aspect_ratio_idc, 0);
  105. }
  106. flag(overscan_info_present_flag);
  107. if (current->overscan_info_present_flag)
  108. flag(overscan_appropriate_flag);
  109. flag(video_signal_type_present_flag);
  110. if (current->video_signal_type_present_flag) {
  111. u(3, video_format, 0, 7);
  112. flag(video_full_range_flag);
  113. flag(colour_description_present_flag);
  114. if (current->colour_description_present_flag) {
  115. u(8, colour_primaries, 0, 255);
  116. u(8, transfer_characteristics, 0, 255);
  117. u(8, matrix_coefficients, 0, 255);
  118. }
  119. } else {
  120. infer(video_format, 5);
  121. infer(video_full_range_flag, 0);
  122. infer(colour_primaries, 2);
  123. infer(transfer_characteristics, 2);
  124. infer(matrix_coefficients, 2);
  125. }
  126. flag(chroma_loc_info_present_flag);
  127. if (current->chroma_loc_info_present_flag) {
  128. ue(chroma_sample_loc_type_top_field, 0, 5);
  129. ue(chroma_sample_loc_type_bottom_field, 0, 5);
  130. } else {
  131. infer(chroma_sample_loc_type_top_field, 0);
  132. infer(chroma_sample_loc_type_bottom_field, 0);
  133. }
  134. flag(timing_info_present_flag);
  135. if (current->timing_info_present_flag) {
  136. u(32, num_units_in_tick, 1, UINT32_MAX);
  137. u(32, time_scale, 1, UINT32_MAX);
  138. flag(fixed_frame_rate_flag);
  139. } else {
  140. infer(fixed_frame_rate_flag, 0);
  141. }
  142. flag(nal_hrd_parameters_present_flag);
  143. if (current->nal_hrd_parameters_present_flag)
  144. CHECK(FUNC(hrd_parameters)(ctx, rw, &current->nal_hrd_parameters));
  145. flag(vcl_hrd_parameters_present_flag);
  146. if (current->vcl_hrd_parameters_present_flag)
  147. CHECK(FUNC(hrd_parameters)(ctx, rw, &current->vcl_hrd_parameters));
  148. if (current->nal_hrd_parameters_present_flag ||
  149. current->vcl_hrd_parameters_present_flag)
  150. flag(low_delay_hrd_flag);
  151. else
  152. infer(low_delay_hrd_flag, 1 - current->fixed_frame_rate_flag);
  153. flag(pic_struct_present_flag);
  154. flag(bitstream_restriction_flag);
  155. if (current->bitstream_restriction_flag) {
  156. flag(motion_vectors_over_pic_boundaries_flag);
  157. ue(max_bytes_per_pic_denom, 0, 16);
  158. ue(max_bits_per_mb_denom, 0, 16);
  159. ue(log2_max_mv_length_horizontal, 0, 16);
  160. ue(log2_max_mv_length_vertical, 0, 16);
  161. ue(max_num_reorder_frames, 0, H264_MAX_DPB_FRAMES);
  162. ue(max_dec_frame_buffering, 0, H264_MAX_DPB_FRAMES);
  163. } else {
  164. infer(motion_vectors_over_pic_boundaries_flag, 1);
  165. infer(max_bytes_per_pic_denom, 2);
  166. infer(max_bits_per_mb_denom, 1);
  167. infer(log2_max_mv_length_horizontal, 16);
  168. infer(log2_max_mv_length_vertical, 16);
  169. if ((sps->profile_idc == 44 || sps->profile_idc == 86 ||
  170. sps->profile_idc == 110 || sps->profile_idc == 110 ||
  171. sps->profile_idc == 122 || sps->profile_idc == 244) &&
  172. sps->constraint_set3_flag) {
  173. infer(max_num_reorder_frames, 0);
  174. infer(max_dec_frame_buffering, 0);
  175. } else {
  176. infer(max_num_reorder_frames, H264_MAX_DPB_FRAMES);
  177. infer(max_dec_frame_buffering, H264_MAX_DPB_FRAMES);
  178. }
  179. }
  180. return 0;
  181. }
  182. static int FUNC(sps)(CodedBitstreamContext *ctx, RWContext *rw,
  183. H264RawSPS *current)
  184. {
  185. int err, i;
  186. HEADER("Sequence Parameter Set");
  187. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  188. 1 << H264_NAL_SPS));
  189. u(8, profile_idc, 0, 255);
  190. flag(constraint_set0_flag);
  191. flag(constraint_set1_flag);
  192. flag(constraint_set2_flag);
  193. flag(constraint_set3_flag);
  194. flag(constraint_set4_flag);
  195. flag(constraint_set5_flag);
  196. u(2, reserved_zero_2bits, 0, 0);
  197. u(8, level_idc, 0, 255);
  198. ue(seq_parameter_set_id, 0, 31);
  199. if (current->profile_idc == 100 || current->profile_idc == 110 ||
  200. current->profile_idc == 122 || current->profile_idc == 244 ||
  201. current->profile_idc == 44 || current->profile_idc == 83 ||
  202. current->profile_idc == 86 || current->profile_idc == 118 ||
  203. current->profile_idc == 128 || current->profile_idc == 138) {
  204. ue(chroma_format_idc, 0, 3);
  205. if (current->chroma_format_idc == 3)
  206. flag(separate_colour_plane_flag);
  207. else
  208. infer(separate_colour_plane_flag, 0);
  209. ue(bit_depth_luma_minus8, 0, 6);
  210. ue(bit_depth_chroma_minus8, 0, 6);
  211. flag(qpprime_y_zero_transform_bypass_flag);
  212. flag(seq_scaling_matrix_present_flag);
  213. if (current->seq_scaling_matrix_present_flag) {
  214. for (i = 0; i < ((current->chroma_format_idc != 3) ? 8 : 12); i++) {
  215. flag(seq_scaling_list_present_flag[i]);
  216. if (current->seq_scaling_list_present_flag[i]) {
  217. if (i < 6)
  218. CHECK(FUNC(scaling_list)(ctx, rw,
  219. &current->scaling_list_4x4[i],
  220. 16));
  221. else
  222. CHECK(FUNC(scaling_list)(ctx, rw,
  223. &current->scaling_list_8x8[i - 6],
  224. 64));
  225. }
  226. }
  227. }
  228. } else {
  229. infer(chroma_format_idc, current->profile_idc == 183 ? 0 : 1);
  230. infer(separate_colour_plane_flag, 0);
  231. infer(bit_depth_luma_minus8, 0);
  232. infer(bit_depth_chroma_minus8, 0);
  233. }
  234. ue(log2_max_frame_num_minus4, 0, 12);
  235. ue(pic_order_cnt_type, 0, 2);
  236. if (current->pic_order_cnt_type == 0) {
  237. ue(log2_max_pic_order_cnt_lsb_minus4, 0, 12);
  238. } else if (current->pic_order_cnt_type == 1) {
  239. flag(delta_pic_order_always_zero_flag);
  240. se(offset_for_non_ref_pic, INT32_MIN + 1, INT32_MAX);
  241. se(offset_for_top_to_bottom_field, INT32_MIN + 1, INT32_MAX);
  242. ue(num_ref_frames_in_pic_order_cnt_cycle, 0, 255);
  243. for (i = 0; i < current->num_ref_frames_in_pic_order_cnt_cycle; i++)
  244. se(offset_for_ref_frame[i], INT32_MIN + 1, INT32_MAX);
  245. }
  246. ue(max_num_ref_frames, 0, H264_MAX_DPB_FRAMES);
  247. flag(gaps_in_frame_num_allowed_flag);
  248. ue(pic_width_in_mbs_minus1, 0, H264_MAX_MB_WIDTH);
  249. ue(pic_height_in_map_units_minus1, 0, H264_MAX_MB_HEIGHT);
  250. flag(frame_mbs_only_flag);
  251. if (!current->frame_mbs_only_flag)
  252. flag(mb_adaptive_frame_field_flag);
  253. flag(direct_8x8_inference_flag);
  254. flag(frame_cropping_flag);
  255. if (current->frame_cropping_flag) {
  256. ue(frame_crop_left_offset, 0, H264_MAX_WIDTH);
  257. ue(frame_crop_right_offset, 0, H264_MAX_WIDTH);
  258. ue(frame_crop_top_offset, 0, H264_MAX_HEIGHT);
  259. ue(frame_crop_bottom_offset, 0, H264_MAX_HEIGHT);
  260. }
  261. flag(vui_parameters_present_flag);
  262. if (current->vui_parameters_present_flag)
  263. CHECK(FUNC(vui_parameters)(ctx, rw, &current->vui, current));
  264. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  265. return 0;
  266. }
  267. static int FUNC(sps_extension)(CodedBitstreamContext *ctx, RWContext *rw,
  268. H264RawSPSExtension *current)
  269. {
  270. int err;
  271. HEADER("Sequence Parameter Set Extension");
  272. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  273. 1 << H264_NAL_SPS_EXT));
  274. ue(seq_parameter_set_id, 0, 31);
  275. ue(aux_format_idc, 0, 3);
  276. if (current->aux_format_idc != 0) {
  277. int bits;
  278. ue(bit_depth_aux_minus8, 0, 4);
  279. flag(alpha_incr_flag);
  280. bits = current->bit_depth_aux_minus8 + 9;
  281. u(bits, alpha_opaque_value, 0, (1 << bits) - 1);
  282. u(bits, alpha_transparent_value, 0, (1 << bits) - 1);
  283. }
  284. flag(additional_extension_flag);
  285. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  286. return 0;
  287. }
  288. static int FUNC(pps)(CodedBitstreamContext *ctx, RWContext *rw,
  289. H264RawPPS *current)
  290. {
  291. CodedBitstreamH264Context *h264 = ctx->priv_data;
  292. const H264RawSPS *sps;
  293. int err, i;
  294. HEADER("Picture Parameter Set");
  295. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  296. 1 << H264_NAL_PPS));
  297. ue(pic_parameter_set_id, 0, 255);
  298. ue(seq_parameter_set_id, 0, 31);
  299. sps = h264->sps[current->seq_parameter_set_id];
  300. if (!sps) {
  301. av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n",
  302. current->seq_parameter_set_id);
  303. return AVERROR_INVALIDDATA;
  304. }
  305. flag(entropy_coding_mode_flag);
  306. flag(bottom_field_pic_order_in_frame_present_flag);
  307. ue(num_slice_groups_minus1, 0, 7);
  308. if (current->num_slice_groups_minus1 > 0) {
  309. unsigned int pic_size;
  310. int iGroup;
  311. pic_size = (sps->pic_width_in_mbs_minus1 + 1) *
  312. (sps->pic_height_in_map_units_minus1 + 1);
  313. ue(slice_group_map_type, 0, 6);
  314. if (current->slice_group_map_type == 0) {
  315. for (iGroup = 0; iGroup <= current->num_slice_groups_minus1; iGroup++)
  316. ue(run_length_minus1[iGroup], 0, pic_size - 1);
  317. } else if (current->slice_group_map_type == 2) {
  318. for (iGroup = 0; iGroup < current->num_slice_groups_minus1; iGroup++) {
  319. ue(top_left[iGroup], 0, pic_size - 1);
  320. ue(bottom_right[iGroup], current->top_left[iGroup], pic_size - 1);
  321. }
  322. } else if (current->slice_group_map_type == 3 ||
  323. current->slice_group_map_type == 4 ||
  324. current->slice_group_map_type == 5) {
  325. flag(slice_group_change_direction_flag);
  326. ue(slice_group_change_rate_minus1, 0, pic_size - 1);
  327. } else if (current->slice_group_map_type == 6) {
  328. ue(pic_size_in_map_units_minus1, pic_size - 1, pic_size - 1);
  329. for (i = 0; i <= current->pic_size_in_map_units_minus1; i++)
  330. u(av_log2(2 * current->num_slice_groups_minus1 + 1),
  331. slice_group_id[i], 0, current->num_slice_groups_minus1);
  332. }
  333. }
  334. ue(num_ref_idx_l0_default_active_minus1, 0, 31);
  335. ue(num_ref_idx_l1_default_active_minus1, 0, 31);
  336. flag(weighted_pred_flag);
  337. u(2, weighted_bipred_idc, 0, 2);
  338. se(pic_init_qp_minus26, -26 - 6 * sps->bit_depth_luma_minus8, +25);
  339. se(pic_init_qs_minus26, -26, +25);
  340. se(chroma_qp_index_offset, -12, +12);
  341. flag(deblocking_filter_control_present_flag);
  342. flag(constrained_intra_pred_flag);
  343. flag(redundant_pic_cnt_present_flag);
  344. if (more_rbsp_data(current->more_rbsp_data))
  345. {
  346. flag(transform_8x8_mode_flag);
  347. flag(pic_scaling_matrix_present_flag);
  348. if (current->pic_scaling_matrix_present_flag) {
  349. for (i = 0; i < 6 + (((sps->chroma_format_idc != 3) ? 2 : 6) *
  350. current->transform_8x8_mode_flag); i++) {
  351. flag(pic_scaling_list_present_flag[i]);
  352. if (current->pic_scaling_list_present_flag[i]) {
  353. if (i < 6)
  354. CHECK(FUNC(scaling_list)(ctx, rw,
  355. &current->scaling_list_4x4[i],
  356. 16));
  357. else
  358. CHECK(FUNC(scaling_list)(ctx, rw,
  359. &current->scaling_list_8x8[i - 6],
  360. 64));
  361. }
  362. }
  363. }
  364. se(second_chroma_qp_index_offset, -12, +12);
  365. } else {
  366. infer(transform_8x8_mode_flag, 0);
  367. infer(pic_scaling_matrix_present_flag, 0);
  368. infer(second_chroma_qp_index_offset, current->chroma_qp_index_offset);
  369. }
  370. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  371. return 0;
  372. }
  373. static int FUNC(sei_buffering_period)(CodedBitstreamContext *ctx, RWContext *rw,
  374. H264RawSEIBufferingPeriod *current)
  375. {
  376. CodedBitstreamH264Context *h264 = ctx->priv_data;
  377. const H264RawSPS *sps;
  378. int err, i, length;
  379. ue(seq_parameter_set_id, 0, 31);
  380. sps = h264->sps[current->seq_parameter_set_id];
  381. if (!sps) {
  382. av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n",
  383. current->seq_parameter_set_id);
  384. return AVERROR_INVALIDDATA;
  385. }
  386. h264->active_sps = sps;
  387. if (sps->vui.nal_hrd_parameters_present_flag) {
  388. for (i = 0; i <= sps->vui.nal_hrd_parameters.cpb_cnt_minus1; i++) {
  389. length = sps->vui.nal_hrd_parameters.initial_cpb_removal_delay_length_minus1 + 1;
  390. xu(length, initial_cpb_removal_delay[SchedSelIdx],
  391. current->nal.initial_cpb_removal_delay[i],
  392. 0, (1 << (uint64_t)length) - 1);
  393. xu(length, initial_cpb_removal_delay_offset[SchedSelIdx],
  394. current->nal.initial_cpb_removal_delay_offset[i],
  395. 0, (1 << (uint64_t)length) - 1);
  396. }
  397. }
  398. if (sps->vui.vcl_hrd_parameters_present_flag) {
  399. for (i = 0; i <= sps->vui.vcl_hrd_parameters.cpb_cnt_minus1; i++) {
  400. length = sps->vui.vcl_hrd_parameters.initial_cpb_removal_delay_length_minus1 + 1;
  401. xu(length, initial_cpb_removal_delay[SchedSelIdx],
  402. current->vcl.initial_cpb_removal_delay[i],
  403. 0, (1 << (uint64_t)length) - 1);
  404. xu(length, initial_cpb_removal_delay_offset[SchedSelIdx],
  405. current->vcl.initial_cpb_removal_delay_offset[i],
  406. 0, (1 << (uint64_t)length) - 1);
  407. }
  408. }
  409. return 0;
  410. }
  411. static int FUNC(sei_pic_timestamp)(CodedBitstreamContext *ctx, RWContext *rw,
  412. H264RawSEIPicTimestamp *current)
  413. {
  414. CodedBitstreamH264Context *h264 = ctx->priv_data;
  415. const H264RawSPS *sps;
  416. uint8_t time_offset_length;
  417. int err;
  418. u(2, ct_type, 0, 2);
  419. flag(nuit_field_based_flag);
  420. u(5, counting_type, 0, 6);
  421. flag(full_timestamp_flag);
  422. flag(discontinuity_flag);
  423. flag(cnt_dropped_flag);
  424. u(8, n_frames, 0, 255);
  425. if (current->full_timestamp_flag) {
  426. u(6, seconds_value, 0, 59);
  427. u(6, minutes_value, 0, 59);
  428. u(5, hours_value, 0, 23);
  429. } else {
  430. flag(seconds_flag);
  431. if (current->seconds_flag) {
  432. u(6, seconds_value, 0, 59);
  433. flag(minutes_flag);
  434. if (current->minutes_flag) {
  435. u(6, minutes_value, 0, 59);
  436. flag(hours_flag);
  437. if (current->hours_flag)
  438. u(5, hours_value, 0, 23);
  439. }
  440. }
  441. }
  442. sps = h264->active_sps;
  443. if (sps->vui.nal_hrd_parameters_present_flag)
  444. time_offset_length = sps->vui.nal_hrd_parameters.time_offset_length;
  445. else if (sps->vui.vcl_hrd_parameters_present_flag)
  446. time_offset_length = sps->vui.vcl_hrd_parameters.time_offset_length;
  447. else
  448. time_offset_length = 24;
  449. if (time_offset_length > 0)
  450. u(time_offset_length, time_offset,
  451. 0, (1 << (uint64_t)time_offset_length) - 1);
  452. else
  453. infer(time_offset, 0);
  454. return 0;
  455. }
  456. static int FUNC(sei_pic_timing)(CodedBitstreamContext *ctx, RWContext *rw,
  457. H264RawSEIPicTiming *current)
  458. {
  459. CodedBitstreamH264Context *h264 = ctx->priv_data;
  460. const H264RawSPS *sps;
  461. int err;
  462. sps = h264->active_sps;
  463. if (!sps) {
  464. // If there is exactly one possible SPS but it is not yet active
  465. // then just assume that it should be the active one.
  466. int i, k = -1;
  467. for (i = 0; i < H264_MAX_SPS_COUNT; i++) {
  468. if (h264->sps[i]) {
  469. if (k >= 0) {
  470. k = -1;
  471. break;
  472. }
  473. k = i;
  474. }
  475. }
  476. if (k >= 0)
  477. sps = h264->sps[k];
  478. }
  479. if (!sps) {
  480. av_log(ctx->log_ctx, AV_LOG_ERROR,
  481. "No active SPS for pic_timing.\n");
  482. return AVERROR_INVALIDDATA;
  483. }
  484. if (sps->vui.nal_hrd_parameters_present_flag ||
  485. sps->vui.vcl_hrd_parameters_present_flag) {
  486. const H264RawHRD *hrd;
  487. if (sps->vui.nal_hrd_parameters_present_flag)
  488. hrd = &sps->vui.nal_hrd_parameters;
  489. else if (sps->vui.vcl_hrd_parameters_present_flag)
  490. hrd = &sps->vui.vcl_hrd_parameters;
  491. else {
  492. av_log(ctx->log_ctx, AV_LOG_ERROR,
  493. "No HRD parameters for pic_timing.\n");
  494. return AVERROR_INVALIDDATA;
  495. }
  496. u(hrd->cpb_removal_delay_length_minus1 + 1, cpb_removal_delay,
  497. 0, (1 << (uint64_t)hrd->cpb_removal_delay_length_minus1) + 1);
  498. u(hrd->dpb_output_delay_length_minus1 + 1, dpb_output_delay,
  499. 0, (1 << (uint64_t)hrd->dpb_output_delay_length_minus1) + 1);
  500. }
  501. if (sps->vui.pic_struct_present_flag) {
  502. static const int num_clock_ts[9] = {
  503. 1, 1, 1, 2, 2, 3, 3, 2, 3
  504. };
  505. int i;
  506. u(4, pic_struct, 0, 8);
  507. if (current->pic_struct > 8)
  508. return AVERROR_INVALIDDATA;
  509. for (i = 0; i < num_clock_ts[current->pic_struct]; i++) {
  510. flag(clock_timestamp_flag[i]);
  511. if (current->clock_timestamp_flag[i])
  512. CHECK(FUNC(sei_pic_timestamp)(ctx, rw, &current->timestamp[i]));
  513. }
  514. }
  515. return 0;
  516. }
  517. static int FUNC(sei_user_data_registered)(CodedBitstreamContext *ctx, RWContext *rw,
  518. H264RawSEIUserDataRegistered *current,
  519. uint32_t *payload_size)
  520. {
  521. int err, i, j;
  522. u(8, itu_t_t35_country_code, 0x00, 0xff);
  523. if (current->itu_t_t35_country_code != 0xff)
  524. i = 1;
  525. else {
  526. u(8, itu_t_t35_country_code_extension_byte, 0x00, 0xff);
  527. i = 2;
  528. }
  529. #ifdef READ
  530. if (*payload_size < i) {
  531. av_log(ctx->log_ctx, AV_LOG_ERROR,
  532. "Invalid SEI user data registered payload.\n");
  533. return AVERROR_INVALIDDATA;
  534. }
  535. current->data_length = *payload_size - i;
  536. #else
  537. *payload_size = i + current->data_length;
  538. #endif
  539. allocate(current->data, current->data_length);
  540. for (j = 0; j < current->data_length; j++)
  541. xu(8, itu_t_t35_payload_byte, current->data[j], 0x00, 0xff);
  542. return 0;
  543. }
  544. static int FUNC(sei_user_data_unregistered)(CodedBitstreamContext *ctx, RWContext *rw,
  545. H264RawSEIUserDataUnregistered *current,
  546. uint32_t *payload_size)
  547. {
  548. int err, i;
  549. #ifdef READ
  550. if (*payload_size < 16) {
  551. av_log(ctx->log_ctx, AV_LOG_ERROR,
  552. "Invalid SEI user data unregistered payload.\n");
  553. return AVERROR_INVALIDDATA;
  554. }
  555. current->data_length = *payload_size - 16;
  556. #else
  557. *payload_size = 16 + current->data_length;
  558. #endif
  559. for (i = 0; i < 16; i++) {
  560. xu(8, uuid_iso_iec_11578,
  561. current->uuid_iso_iec_11578[i], 0x00, 0xff);
  562. }
  563. allocate(current->data, current->data_length);
  564. for (i = 0; i < current->data_length; i++)
  565. xu(8, user_data_payload_byte, current->data[i], 0x00, 0xff);
  566. return 0;
  567. }
  568. static int FUNC(sei_recovery_point)(CodedBitstreamContext *ctx, RWContext *rw,
  569. H264RawSEIRecoveryPoint *current)
  570. {
  571. int err;
  572. ue(recovery_frame_cnt, 0, 65535);
  573. flag(exact_match_flag);
  574. flag(broken_link_flag);
  575. u(2, changing_slice_group_idc, 0, 2);
  576. return 0;
  577. }
  578. static int FUNC(sei_display_orientation)(CodedBitstreamContext *ctx, RWContext *rw,
  579. H264RawSEIDisplayOrientation *current)
  580. {
  581. int err;
  582. flag(display_orientation_cancel_flag);
  583. if (!current->display_orientation_cancel_flag) {
  584. flag(hor_flip);
  585. flag(ver_flip);
  586. u(16, anticlockwise_rotation, 0, 65535);
  587. ue(display_orientation_repetition_period, 0, 16384);
  588. flag(display_orientation_extension_flag);
  589. }
  590. return 0;
  591. }
  592. static int FUNC(sei_payload)(CodedBitstreamContext *ctx, RWContext *rw,
  593. H264RawSEIPayload *current)
  594. {
  595. int err, i;
  596. int start_position, end_position;
  597. #ifdef READ
  598. start_position = get_bits_count(rw);
  599. #else
  600. start_position = put_bits_count(rw);
  601. #endif
  602. switch (current->payload_type) {
  603. case H264_SEI_TYPE_BUFFERING_PERIOD:
  604. CHECK(FUNC(sei_buffering_period)
  605. (ctx, rw, &current->payload.buffering_period));
  606. break;
  607. case H264_SEI_TYPE_PIC_TIMING:
  608. CHECK(FUNC(sei_pic_timing)
  609. (ctx, rw, &current->payload.pic_timing));
  610. break;
  611. case H264_SEI_TYPE_FILLER_PAYLOAD:
  612. {
  613. av_unused int ff_byte = 0xff;
  614. for (i = 0; i < current->payload_size; i++)
  615. xu(8, ff_byte, ff_byte, 0xff, 0xff);
  616. }
  617. break;
  618. case H264_SEI_TYPE_USER_DATA_REGISTERED:
  619. CHECK(FUNC(sei_user_data_registered)
  620. (ctx, rw, &current->payload.user_data_registered, &current->payload_size));
  621. break;
  622. case H264_SEI_TYPE_USER_DATA_UNREGISTERED:
  623. CHECK(FUNC(sei_user_data_unregistered)
  624. (ctx, rw, &current->payload.user_data_unregistered, &current->payload_size));
  625. break;
  626. case H264_SEI_TYPE_RECOVERY_POINT:
  627. CHECK(FUNC(sei_recovery_point)
  628. (ctx, rw, &current->payload.recovery_point));
  629. break;
  630. case H264_SEI_TYPE_DISPLAY_ORIENTATION:
  631. CHECK(FUNC(sei_display_orientation)
  632. (ctx, rw, &current->payload.display_orientation));
  633. break;
  634. default:
  635. {
  636. allocate(current->payload.other.data, current->payload_size);
  637. for (i = 0; i < current->payload_size; i++)
  638. xu(8, payload_byte, current->payload.other.data[i], 0, 255);
  639. }
  640. }
  641. if (byte_alignment(rw)) {
  642. av_unused int one = 1, zero = 0;
  643. xu(1, bit_equal_to_one, one, 1, 1);
  644. while (byte_alignment(rw))
  645. xu(1, bit_equal_to_zero, zero, 0, 0);
  646. }
  647. #ifdef READ
  648. end_position = get_bits_count(rw);
  649. if (end_position < start_position + 8 * current->payload_size) {
  650. av_log(ctx->log_ctx, AV_LOG_ERROR, "Incorrect SEI payload length: "
  651. "header %"PRIu32" bits, actually %d bits.\n",
  652. 8 * current->payload_size,
  653. end_position - start_position);
  654. return AVERROR_INVALIDDATA;
  655. }
  656. #else
  657. end_position = put_bits_count(rw);
  658. current->payload_size = (end_position - start_position) / 8;
  659. #endif
  660. return 0;
  661. }
  662. static int FUNC(sei)(CodedBitstreamContext *ctx, RWContext *rw,
  663. H264RawSEI *current)
  664. {
  665. int err, k;
  666. HEADER("Supplemental Enhancement Information");
  667. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  668. 1 << H264_NAL_SEI));
  669. #ifdef READ
  670. for (k = 0; k < H264_MAX_SEI_PAYLOADS; k++) {
  671. uint32_t payload_type = 0;
  672. uint32_t payload_size = 0;
  673. uint32_t tmp;
  674. while (show_bits(rw, 8) == 0xff) {
  675. xu(8, ff_byte, tmp, 0xff, 0xff);
  676. payload_type += 255;
  677. }
  678. xu(8, last_payload_type_byte, tmp, 0, 254);
  679. payload_type += tmp;
  680. while (show_bits(rw, 8) == 0xff) {
  681. xu(8, ff_byte, tmp, 0xff, 0xff);
  682. payload_size += 255;
  683. }
  684. xu(8, last_payload_size_byte, tmp, 0, 254);
  685. payload_size += tmp;
  686. current->payload[k].payload_type = payload_type;
  687. current->payload[k].payload_size = payload_size;
  688. CHECK(FUNC(sei_payload)(ctx, rw, &current->payload[k]));
  689. if (!cbs_h2645_read_more_rbsp_data(rw))
  690. break;
  691. }
  692. if (k >= H264_MAX_SEI_PAYLOADS) {
  693. av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many payloads in "
  694. "SEI message: found %d.\n", k);
  695. return AVERROR_INVALIDDATA;
  696. }
  697. current->payload_count = k + 1;
  698. #else
  699. for (k = 0; k < current->payload_count; k++) {
  700. PutBitContext start_state;
  701. uint32_t tmp;
  702. int need_size, i;
  703. // Somewhat clumsy: we write the payload twice when
  704. // we don't know the size in advance. This will mess
  705. // with trace output, but is otherwise harmless.
  706. start_state = *rw;
  707. need_size = !current->payload[k].payload_size;
  708. for (i = 0; i < 1 + need_size; i++) {
  709. *rw = start_state;
  710. tmp = current->payload[k].payload_type;
  711. while (tmp >= 255) {
  712. xu(8, ff_byte, 0xff, 0xff, 0xff);
  713. tmp -= 255;
  714. }
  715. xu(8, last_payload_type_byte, tmp, 0, 254);
  716. tmp = current->payload[k].payload_size;
  717. while (tmp >= 255) {
  718. xu(8, ff_byte, 0xff, 0xff, 0xff);
  719. tmp -= 255;
  720. }
  721. xu(8, last_payload_size_byte, tmp, 0, 254);
  722. CHECK(FUNC(sei_payload)(ctx, rw, &current->payload[k]));
  723. }
  724. }
  725. #endif
  726. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  727. return 0;
  728. }
  729. static int FUNC(aud)(CodedBitstreamContext *ctx, RWContext *rw,
  730. H264RawAUD *current)
  731. {
  732. int err;
  733. HEADER("Access Unit Delimiter");
  734. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  735. 1 << H264_NAL_AUD));
  736. u(3, primary_pic_type, 0, 7);
  737. CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
  738. return 0;
  739. }
  740. static int FUNC(ref_pic_list_modification)(CodedBitstreamContext *ctx, RWContext *rw,
  741. H264RawSliceHeader *current)
  742. {
  743. CodedBitstreamH264Context *h264 = ctx->priv_data;
  744. const H264RawSPS *sps = h264->active_sps;
  745. int err, i, mopn;
  746. if (current->slice_type % 5 != 2 &&
  747. current->slice_type % 5 != 4) {
  748. flag(ref_pic_list_modification_flag_l0);
  749. if (current->ref_pic_list_modification_flag_l0) {
  750. for (i = 0; i < H264_MAX_RPLM_COUNT; i++) {
  751. xue(modification_of_pic_nums_idc,
  752. current->rplm_l0[i].modification_of_pic_nums_idc, 0, 3);
  753. mopn = current->rplm_l0[i].modification_of_pic_nums_idc;
  754. if (mopn == 3)
  755. break;
  756. if (mopn == 0 || mopn == 1)
  757. xue(abs_diff_pic_num_minus1,
  758. current->rplm_l0[i].abs_diff_pic_num_minus1,
  759. 0, (1 + current->field_pic_flag) *
  760. (1 << (sps->log2_max_frame_num_minus4 + 4)));
  761. else if (mopn == 2)
  762. xue(long_term_pic_num,
  763. current->rplm_l0[i].long_term_pic_num,
  764. 0, sps->max_num_ref_frames - 1);
  765. }
  766. }
  767. }
  768. if (current->slice_type % 5 == 1) {
  769. flag(ref_pic_list_modification_flag_l1);
  770. if (current->ref_pic_list_modification_flag_l1) {
  771. for (i = 0; i < H264_MAX_RPLM_COUNT; i++) {
  772. xue(modification_of_pic_nums_idc,
  773. current->rplm_l1[i].modification_of_pic_nums_idc, 0, 3);
  774. mopn = current->rplm_l1[i].modification_of_pic_nums_idc;
  775. if (mopn == 3)
  776. break;
  777. if (mopn == 0 || mopn == 1)
  778. xue(abs_diff_pic_num_minus1,
  779. current->rplm_l1[i].abs_diff_pic_num_minus1,
  780. 0, (1 + current->field_pic_flag) *
  781. (1 << (sps->log2_max_frame_num_minus4 + 4)));
  782. else if (mopn == 2)
  783. xue(long_term_pic_num,
  784. current->rplm_l1[i].long_term_pic_num,
  785. 0, sps->max_num_ref_frames - 1);
  786. }
  787. }
  788. }
  789. return 0;
  790. }
  791. static int FUNC(pred_weight_table)(CodedBitstreamContext *ctx, RWContext *rw,
  792. H264RawSliceHeader *current)
  793. {
  794. CodedBitstreamH264Context *h264 = ctx->priv_data;
  795. const H264RawSPS *sps = h264->active_sps;
  796. int chroma;
  797. int err, i, j;
  798. ue(luma_log2_weight_denom, 0, 7);
  799. chroma = !sps->separate_colour_plane_flag && sps->chroma_format_idc != 0;
  800. if (chroma)
  801. ue(chroma_log2_weight_denom, 0, 7);
  802. for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) {
  803. flag(luma_weight_l0_flag[i]);
  804. if (current->luma_weight_l0_flag[i]) {
  805. se(luma_weight_l0[i], -128, +127);
  806. se(luma_offset_l0[i], -128, +127);
  807. }
  808. if (chroma) {
  809. flag(chroma_weight_l0_flag[i]);
  810. if (current->chroma_weight_l0_flag[i]) {
  811. for (j = 0; j < 2; j++) {
  812. se(chroma_weight_l0[i][j], -128, +127);
  813. se(chroma_offset_l0[i][j], -128, +127);
  814. }
  815. }
  816. }
  817. }
  818. if (current->slice_type % 5 == 1) {
  819. for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) {
  820. flag(luma_weight_l1_flag[i]);
  821. if (current->luma_weight_l1_flag[i]) {
  822. se(luma_weight_l1[i], -128, +127);
  823. se(luma_offset_l1[i], -128, +127);
  824. }
  825. if (chroma) {
  826. flag(chroma_weight_l1_flag[i]);
  827. if (current->chroma_weight_l1_flag[i]) {
  828. for (j = 0; j < 2; j++) {
  829. se(chroma_weight_l1[i][j], -128, +127);
  830. se(chroma_offset_l1[i][j], -128, +127);
  831. }
  832. }
  833. }
  834. }
  835. }
  836. return 0;
  837. }
  838. static int FUNC(dec_ref_pic_marking)(CodedBitstreamContext *ctx, RWContext *rw,
  839. H264RawSliceHeader *current, int idr_pic_flag)
  840. {
  841. CodedBitstreamH264Context *h264 = ctx->priv_data;
  842. const H264RawSPS *sps = h264->active_sps;
  843. int err, i;
  844. uint32_t mmco;
  845. if (idr_pic_flag) {
  846. flag(no_output_of_prior_pics_flag);
  847. flag(long_term_reference_flag);
  848. } else {
  849. flag(adaptive_ref_pic_marking_mode_flag);
  850. if (current->adaptive_ref_pic_marking_mode_flag) {
  851. for (i = 0; i < H264_MAX_MMCO_COUNT; i++) {
  852. xue(memory_management_control_operation,
  853. current->mmco[i].memory_management_control_operation,
  854. 0, 6);
  855. mmco = current->mmco[i].memory_management_control_operation;
  856. if (mmco == 0)
  857. break;
  858. if (mmco == 1 || mmco == 3)
  859. xue(difference_of_pic_nums_minus1,
  860. current->mmco[i].difference_of_pic_nums_minus1,
  861. 0, INT32_MAX);
  862. if (mmco == 2)
  863. xue(long_term_pic_num,
  864. current->mmco[i].long_term_pic_num,
  865. 0, sps->max_num_ref_frames - 1);
  866. if (mmco == 3 || mmco == 6)
  867. xue(long_term_frame_idx,
  868. current->mmco[i].long_term_frame_idx,
  869. 0, sps->max_num_ref_frames - 1);
  870. if (mmco == 4)
  871. xue(max_long_term_frame_idx_plus1,
  872. current->mmco[i].max_long_term_frame_idx_plus1,
  873. 0, sps->max_num_ref_frames);
  874. }
  875. if (i == H264_MAX_MMCO_COUNT) {
  876. av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many "
  877. "memory management control operations.\n");
  878. return AVERROR_INVALIDDATA;
  879. }
  880. }
  881. }
  882. return 0;
  883. }
  884. static int FUNC(slice_header)(CodedBitstreamContext *ctx, RWContext *rw,
  885. H264RawSliceHeader *current)
  886. {
  887. CodedBitstreamH264Context *h264 = ctx->priv_data;
  888. const H264RawSPS *sps;
  889. const H264RawPPS *pps;
  890. int err;
  891. int idr_pic_flag;
  892. int slice_type_i, slice_type_p, slice_type_b;
  893. int slice_type_si, slice_type_sp;
  894. HEADER("Slice Header");
  895. CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
  896. 1 << H264_NAL_SLICE |
  897. 1 << H264_NAL_IDR_SLICE |
  898. 1 << H264_NAL_AUXILIARY_SLICE));
  899. if (current->nal_unit_header.nal_unit_type == H264_NAL_AUXILIARY_SLICE) {
  900. if (!h264->last_slice_nal_unit_type) {
  901. av_log(ctx->log_ctx, AV_LOG_ERROR, "Auxiliary slice "
  902. "is not decodable without the main picture "
  903. "in the same access unit.\n");
  904. return AVERROR_INVALIDDATA;
  905. }
  906. } else {
  907. h264->last_slice_nal_unit_type =
  908. current->nal_unit_header.nal_unit_type;
  909. }
  910. idr_pic_flag = h264->last_slice_nal_unit_type == H264_NAL_IDR_SLICE;
  911. ue(first_mb_in_slice, 0, H264_MAX_MB_PIC_SIZE - 1);
  912. ue(slice_type, 0, 9);
  913. slice_type_i = current->slice_type % 5 == 2;
  914. slice_type_p = current->slice_type % 5 == 0;
  915. slice_type_b = current->slice_type % 5 == 1;
  916. slice_type_si = current->slice_type % 5 == 4;
  917. slice_type_sp = current->slice_type % 5 == 3;
  918. if (idr_pic_flag && !(slice_type_i || slice_type_si)) {
  919. av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid slice type %d "
  920. "for IDR picture.\n", current->slice_type);
  921. return AVERROR_INVALIDDATA;
  922. }
  923. ue(pic_parameter_set_id, 0, 255);
  924. pps = h264->pps[current->pic_parameter_set_id];
  925. if (!pps) {
  926. av_log(ctx->log_ctx, AV_LOG_ERROR, "PPS id %d not available.\n",
  927. current->pic_parameter_set_id);
  928. return AVERROR_INVALIDDATA;
  929. }
  930. h264->active_pps = pps;
  931. sps = h264->sps[pps->seq_parameter_set_id];
  932. if (!sps) {
  933. av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n",
  934. pps->seq_parameter_set_id);
  935. return AVERROR_INVALIDDATA;
  936. }
  937. h264->active_sps = sps;
  938. if (sps->separate_colour_plane_flag)
  939. u(2, colour_plane_id, 0, 2);
  940. u(sps->log2_max_frame_num_minus4 + 4, frame_num,
  941. 0, (1 << (sps->log2_max_frame_num_minus4 + 4)) - 1);
  942. if (!sps->frame_mbs_only_flag) {
  943. flag(field_pic_flag);
  944. if (current->field_pic_flag)
  945. flag(bottom_field_flag);
  946. else
  947. infer(bottom_field_flag, 0);
  948. } else {
  949. infer(field_pic_flag, 0);
  950. infer(bottom_field_flag, 0);
  951. }
  952. if (idr_pic_flag)
  953. ue(idr_pic_id, 0, 65535);
  954. if (sps->pic_order_cnt_type == 0) {
  955. u(sps->log2_max_pic_order_cnt_lsb_minus4 + 4, pic_order_cnt_lsb,
  956. 0, (1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4)) - 1);
  957. if (pps->bottom_field_pic_order_in_frame_present_flag &&
  958. !current->field_pic_flag)
  959. se(delta_pic_order_cnt_bottom, INT32_MIN + 1, INT32_MAX);
  960. } else if (sps->pic_order_cnt_type == 1) {
  961. if (!sps->delta_pic_order_always_zero_flag) {
  962. se(delta_pic_order_cnt[0], INT32_MIN + 1, INT32_MAX);
  963. if (pps->bottom_field_pic_order_in_frame_present_flag &&
  964. !current->field_pic_flag)
  965. se(delta_pic_order_cnt[1], INT32_MIN + 1, INT32_MAX);
  966. else
  967. infer(delta_pic_order_cnt[1], 0);
  968. } else {
  969. infer(delta_pic_order_cnt[0], 0);
  970. infer(delta_pic_order_cnt[1], 0);
  971. }
  972. }
  973. if (pps->redundant_pic_cnt_present_flag)
  974. ue(redundant_pic_cnt, 0, 127);
  975. if (slice_type_b)
  976. flag(direct_spatial_mv_pred_flag);
  977. if (slice_type_p || slice_type_sp || slice_type_b) {
  978. flag(num_ref_idx_active_override_flag);
  979. if (current->num_ref_idx_active_override_flag) {
  980. ue(num_ref_idx_l0_active_minus1, 0, 31);
  981. if (slice_type_b)
  982. ue(num_ref_idx_l1_active_minus1, 0, 31);
  983. } else {
  984. infer(num_ref_idx_l0_active_minus1,
  985. pps->num_ref_idx_l0_default_active_minus1);
  986. infer(num_ref_idx_l1_active_minus1,
  987. pps->num_ref_idx_l1_default_active_minus1);
  988. }
  989. }
  990. if (current->nal_unit_header.nal_unit_type == 20 ||
  991. current->nal_unit_header.nal_unit_type == 21) {
  992. av_log(ctx->log_ctx, AV_LOG_ERROR, "MVC / 3DAVC not supported.\n");
  993. return AVERROR_PATCHWELCOME;
  994. } else {
  995. CHECK(FUNC(ref_pic_list_modification)(ctx, rw, current));
  996. }
  997. if ((pps->weighted_pred_flag && (slice_type_p || slice_type_sp)) ||
  998. (pps->weighted_bipred_idc == 1 && slice_type_b)) {
  999. CHECK(FUNC(pred_weight_table)(ctx, rw, current));
  1000. }
  1001. if (current->nal_unit_header.nal_ref_idc != 0) {
  1002. CHECK(FUNC(dec_ref_pic_marking)(ctx, rw, current, idr_pic_flag));
  1003. }
  1004. if (pps->entropy_coding_mode_flag &&
  1005. !slice_type_i && !slice_type_si) {
  1006. ue(cabac_init_idc, 0, 2);
  1007. }
  1008. se(slice_qp_delta, - 51 - 6 * sps->bit_depth_luma_minus8,
  1009. + 51 + 6 * sps->bit_depth_luma_minus8);
  1010. if (slice_type_sp || slice_type_si) {
  1011. if (slice_type_sp)
  1012. flag(sp_for_switch_flag);
  1013. se(slice_qs_delta, -51, +51);
  1014. }
  1015. if (pps->deblocking_filter_control_present_flag) {
  1016. ue(disable_deblocking_filter_idc, 0, 2);
  1017. if (current->disable_deblocking_filter_idc != 1) {
  1018. se(slice_alpha_c0_offset_div2, -6, +6);
  1019. se(slice_beta_offset_div2, -6, +6);
  1020. } else {
  1021. infer(slice_alpha_c0_offset_div2, 0);
  1022. infer(slice_beta_offset_div2, 0);
  1023. }
  1024. } else {
  1025. infer(disable_deblocking_filter_idc, 0);
  1026. infer(slice_alpha_c0_offset_div2, 0);
  1027. infer(slice_beta_offset_div2, 0);
  1028. }
  1029. if (pps->num_slice_groups_minus1 > 0 &&
  1030. pps->slice_group_map_type >= 3 &&
  1031. pps->slice_group_map_type <= 5) {
  1032. unsigned int pic_size, max, bits;
  1033. pic_size = (sps->pic_width_in_mbs_minus1 + 1) *
  1034. (sps->pic_height_in_map_units_minus1 + 1);
  1035. max = (pic_size + pps->slice_group_change_rate_minus1) /
  1036. (pps->slice_group_change_rate_minus1 + 1);
  1037. bits = av_log2(2 * max - 1);
  1038. u(bits, slice_group_change_cycle, 0, max);
  1039. }
  1040. if (pps->entropy_coding_mode_flag) {
  1041. av_unused int one = 1;
  1042. while (byte_alignment(rw))
  1043. xu(1, cabac_alignment_one_bit, one, 1, 1);
  1044. }
  1045. return 0;
  1046. }