You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1330 lines
50KB

  1. /*
  2. * HEVC Parameter Set decoding
  3. *
  4. * Copyright (C) 2012 - 2103 Guillaume Martres
  5. * Copyright (C) 2012 - 2103 Mickael Raulet
  6. * Copyright (C) 2012 - 2013 Gildas Cocherel
  7. * Copyright (C) 2013 Vittorio Giovara
  8. *
  9. * This file is part of Libav.
  10. *
  11. * Libav is free software; you can redistribute it and/or
  12. * modify it under the terms of the GNU Lesser General Public
  13. * License as published by the Free Software Foundation; either
  14. * version 2.1 of the License, or (at your option) any later version.
  15. *
  16. * Libav is distributed in the hope that it will be useful,
  17. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  19. * Lesser General Public License for more details.
  20. *
  21. * You should have received a copy of the GNU Lesser General Public
  22. * License along with Libav; if not, write to the Free Software
  23. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  24. */
  25. #include "libavutil/imgutils.h"
  26. #include "golomb.h"
  27. #include "hevc.h"
  28. static const uint8_t default_scaling_list_intra[] = {
  29. 16, 16, 16, 16, 17, 18, 21, 24,
  30. 16, 16, 16, 16, 17, 19, 22, 25,
  31. 16, 16, 17, 18, 20, 22, 25, 29,
  32. 16, 16, 18, 21, 24, 27, 31, 36,
  33. 17, 17, 20, 24, 30, 35, 41, 47,
  34. 18, 19, 22, 27, 35, 44, 54, 65,
  35. 21, 22, 25, 31, 41, 54, 70, 88,
  36. 24, 25, 29, 36, 47, 65, 88, 115
  37. };
  38. static const uint8_t default_scaling_list_inter[] = {
  39. 16, 16, 16, 16, 17, 18, 20, 24,
  40. 16, 16, 16, 17, 18, 20, 24, 25,
  41. 16, 16, 17, 18, 20, 24, 25, 28,
  42. 16, 17, 18, 20, 24, 25, 28, 33,
  43. 17, 18, 20, 24, 25, 28, 33, 41,
  44. 18, 20, 24, 25, 28, 33, 41, 54,
  45. 20, 24, 25, 28, 33, 41, 54, 71,
  46. 24, 25, 28, 33, 41, 54, 71, 91
  47. };
  48. static const AVRational vui_sar[] = {
  49. { 0, 1 },
  50. { 1, 1 },
  51. { 12, 11 },
  52. { 10, 11 },
  53. { 16, 11 },
  54. { 40, 33 },
  55. { 24, 11 },
  56. { 20, 11 },
  57. { 32, 11 },
  58. { 80, 33 },
  59. { 18, 11 },
  60. { 15, 11 },
  61. { 64, 33 },
  62. { 160, 99 },
  63. { 4, 3 },
  64. { 3, 2 },
  65. { 2, 1 },
  66. };
  67. int ff_hevc_decode_short_term_rps(HEVCContext *s, ShortTermRPS *rps,
  68. const HEVCSPS *sps, int is_slice_header)
  69. {
  70. HEVCLocalContext *lc = &s->HEVClc;
  71. uint8_t rps_predict = 0;
  72. int delta_poc;
  73. int k0 = 0;
  74. int k1 = 0;
  75. int k = 0;
  76. int i;
  77. GetBitContext *gb = &lc->gb;
  78. if (rps != sps->st_rps && sps->nb_st_rps)
  79. rps_predict = get_bits1(gb);
  80. if (rps_predict) {
  81. const ShortTermRPS *rps_ridx;
  82. int delta_rps, abs_delta_rps;
  83. uint8_t use_delta_flag = 0;
  84. uint8_t delta_rps_sign;
  85. if (is_slice_header) {
  86. int delta_idx = get_ue_golomb_long(gb) + 1;
  87. if (delta_idx > sps->nb_st_rps) {
  88. av_log(s->avctx, AV_LOG_ERROR,
  89. "Invalid value of delta_idx in slice header RPS: %d > %d.\n",
  90. delta_idx, sps->nb_st_rps);
  91. return AVERROR_INVALIDDATA;
  92. }
  93. rps_ridx = &sps->st_rps[sps->nb_st_rps - delta_idx];
  94. } else
  95. rps_ridx = &sps->st_rps[rps - sps->st_rps - 1];
  96. delta_rps_sign = get_bits1(gb);
  97. abs_delta_rps = get_ue_golomb_long(gb) + 1;
  98. delta_rps = (1 - (delta_rps_sign << 1)) * abs_delta_rps;
  99. for (i = 0; i <= rps_ridx->num_delta_pocs; i++) {
  100. int used = rps->used[k] = get_bits1(gb);
  101. if (!used)
  102. use_delta_flag = get_bits1(gb);
  103. if (used || use_delta_flag) {
  104. if (i < rps_ridx->num_delta_pocs)
  105. delta_poc = delta_rps + rps_ridx->delta_poc[i];
  106. else
  107. delta_poc = delta_rps;
  108. rps->delta_poc[k] = delta_poc;
  109. if (delta_poc < 0)
  110. k0++;
  111. else
  112. k1++;
  113. k++;
  114. }
  115. }
  116. rps->num_delta_pocs = k;
  117. rps->num_negative_pics = k0;
  118. // sort in increasing order (smallest first)
  119. if (rps->num_delta_pocs != 0) {
  120. int used, tmp;
  121. for (i = 1; i < rps->num_delta_pocs; i++) {
  122. delta_poc = rps->delta_poc[i];
  123. used = rps->used[i];
  124. for (k = i - 1; k >= 0; k--) {
  125. tmp = rps->delta_poc[k];
  126. if (delta_poc < tmp) {
  127. rps->delta_poc[k + 1] = tmp;
  128. rps->used[k + 1] = rps->used[k];
  129. rps->delta_poc[k] = delta_poc;
  130. rps->used[k] = used;
  131. }
  132. }
  133. }
  134. }
  135. if ((rps->num_negative_pics >> 1) != 0) {
  136. int used;
  137. k = rps->num_negative_pics - 1;
  138. // flip the negative values to largest first
  139. for (i = 0; i < rps->num_negative_pics >> 1; i++) {
  140. delta_poc = rps->delta_poc[i];
  141. used = rps->used[i];
  142. rps->delta_poc[i] = rps->delta_poc[k];
  143. rps->used[i] = rps->used[k];
  144. rps->delta_poc[k] = delta_poc;
  145. rps->used[k] = used;
  146. k--;
  147. }
  148. }
  149. } else {
  150. unsigned int prev, nb_positive_pics;
  151. rps->num_negative_pics = get_ue_golomb_long(gb);
  152. nb_positive_pics = get_ue_golomb_long(gb);
  153. if (rps->num_negative_pics >= MAX_REFS ||
  154. nb_positive_pics >= MAX_REFS) {
  155. av_log(s->avctx, AV_LOG_ERROR, "Too many refs in a short term RPS.\n");
  156. return AVERROR_INVALIDDATA;
  157. }
  158. rps->num_delta_pocs = rps->num_negative_pics + nb_positive_pics;
  159. if (rps->num_delta_pocs) {
  160. prev = 0;
  161. for (i = 0; i < rps->num_negative_pics; i++) {
  162. delta_poc = get_ue_golomb_long(gb) + 1;
  163. prev -= delta_poc;
  164. rps->delta_poc[i] = prev;
  165. rps->used[i] = get_bits1(gb);
  166. }
  167. prev = 0;
  168. for (i = 0; i < nb_positive_pics; i++) {
  169. delta_poc = get_ue_golomb_long(gb) + 1;
  170. prev += delta_poc;
  171. rps->delta_poc[rps->num_negative_pics + i] = prev;
  172. rps->used[rps->num_negative_pics + i] = get_bits1(gb);
  173. }
  174. }
  175. }
  176. return 0;
  177. }
  178. static int decode_profile_tier_level(HEVCLocalContext *lc, PTL *ptl,
  179. int max_num_sub_layers)
  180. {
  181. int i, j;
  182. GetBitContext *gb = &lc->gb;
  183. ptl->general_profile_space = get_bits(gb, 2);
  184. ptl->general_tier_flag = get_bits1(gb);
  185. ptl->general_profile_idc = get_bits(gb, 5);
  186. for (i = 0; i < 32; i++)
  187. ptl->general_profile_compatibility_flag[i] = get_bits1(gb);
  188. skip_bits1(gb); // general_progressive_source_flag
  189. skip_bits1(gb); // general_interlaced_source_flag
  190. skip_bits1(gb); // general_non_packed_constraint_flag
  191. skip_bits1(gb); // general_frame_only_constraint_flag
  192. if (get_bits(gb, 16) != 0) // XXX_reserved_zero_44bits[0..15]
  193. return -1;
  194. if (get_bits(gb, 16) != 0) // XXX_reserved_zero_44bits[16..31]
  195. return -1;
  196. if (get_bits(gb, 12) != 0) // XXX_reserved_zero_44bits[32..43]
  197. return -1;
  198. ptl->general_level_idc = get_bits(gb, 8);
  199. for (i = 0; i < max_num_sub_layers - 1; i++) {
  200. ptl->sub_layer_profile_present_flag[i] = get_bits1(gb);
  201. ptl->sub_layer_level_present_flag[i] = get_bits1(gb);
  202. }
  203. if (max_num_sub_layers - 1 > 0)
  204. for (i = max_num_sub_layers - 1; i < 8; i++)
  205. skip_bits(gb, 2); // reserved_zero_2bits[i]
  206. for (i = 0; i < max_num_sub_layers - 1; i++) {
  207. if (ptl->sub_layer_profile_present_flag[i]) {
  208. ptl->sub_layer_profile_space[i] = get_bits(gb, 2);
  209. ptl->sub_layer_tier_flag[i] = get_bits(gb, 1);
  210. ptl->sub_layer_profile_idc[i] = get_bits(gb, 5);
  211. for (j = 0; j < 32; j++)
  212. ptl->sub_layer_profile_compatibility_flags[i][j] = get_bits1(gb);
  213. skip_bits1(gb); // sub_layer_progressive_source_flag
  214. skip_bits1(gb); // sub_layer_interlaced_source_flag
  215. skip_bits1(gb); // sub_layer_non_packed_constraint_flag
  216. skip_bits1(gb); // sub_layer_frame_only_constraint_flag
  217. if (get_bits(gb, 16) != 0) // sub_layer_reserved_zero_44bits[0..15]
  218. return -1;
  219. if (get_bits(gb, 16) != 0) // sub_layer_reserved_zero_44bits[16..31]
  220. return -1;
  221. if (get_bits(gb, 12) != 0) // sub_layer_reserved_zero_44bits[32..43]
  222. return -1;
  223. }
  224. if (ptl->sub_layer_level_present_flag[i])
  225. ptl->sub_layer_level_idc[i] = get_bits(gb, 8);
  226. }
  227. return 0;
  228. }
  229. static void decode_sublayer_hrd(HEVCContext *s, int nb_cpb,
  230. int subpic_params_present)
  231. {
  232. GetBitContext *gb = &s->HEVClc.gb;
  233. int i;
  234. for (i = 0; i < nb_cpb; i++) {
  235. get_ue_golomb_long(gb); // bit_rate_value_minus1
  236. get_ue_golomb_long(gb); // cpb_size_value_minus1
  237. if (subpic_params_present) {
  238. get_ue_golomb_long(gb); // cpb_size_du_value_minus1
  239. get_ue_golomb_long(gb); // bit_rate_du_value_minus1
  240. }
  241. skip_bits1(gb); // cbr_flag
  242. }
  243. }
  244. static void decode_hrd(HEVCContext *s, int common_inf_present,
  245. int max_sublayers)
  246. {
  247. GetBitContext *gb = &s->HEVClc.gb;
  248. int nal_params_present = 0, vcl_params_present = 0;
  249. int subpic_params_present = 0;
  250. int i;
  251. if (common_inf_present) {
  252. nal_params_present = get_bits1(gb);
  253. vcl_params_present = get_bits1(gb);
  254. if (nal_params_present || vcl_params_present) {
  255. subpic_params_present = get_bits1(gb);
  256. if (subpic_params_present) {
  257. skip_bits(gb, 8); // tick_divisor_minus2
  258. skip_bits(gb, 5); // du_cpb_removal_delay_increment_length_minus1
  259. skip_bits(gb, 1); // sub_pic_cpb_params_in_pic_timing_sei_flag
  260. skip_bits(gb, 5); // dpb_output_delay_du_length_minus1
  261. }
  262. skip_bits(gb, 4); // bit_rate_scale
  263. skip_bits(gb, 4); // cpb_size_scale
  264. if (subpic_params_present)
  265. skip_bits(gb, 4); // cpb_size_du_scale
  266. skip_bits(gb, 5); // initial_cpb_removal_delay_length_minus1
  267. skip_bits(gb, 5); // au_cpb_removal_delay_length_minus1
  268. skip_bits(gb, 5); // dpb_output_delay_length_minus1
  269. }
  270. }
  271. for (i = 0; i < max_sublayers; i++) {
  272. int low_delay = 0;
  273. int nb_cpb = 1;
  274. int fixed_rate = get_bits1(gb);
  275. if (!fixed_rate)
  276. fixed_rate = get_bits1(gb);
  277. if (fixed_rate)
  278. get_ue_golomb_long(gb); // elemental_duration_in_tc_minus1
  279. else
  280. low_delay = get_bits1(gb);
  281. if (!low_delay)
  282. nb_cpb = get_ue_golomb_long(gb) + 1;
  283. if (nal_params_present)
  284. decode_sublayer_hrd(s, nb_cpb, subpic_params_present);
  285. if (vcl_params_present)
  286. decode_sublayer_hrd(s, nb_cpb, subpic_params_present);
  287. }
  288. }
  289. int ff_hevc_decode_nal_vps(HEVCContext *s)
  290. {
  291. int i,j;
  292. GetBitContext *gb = &s->HEVClc.gb;
  293. int vps_id = 0;
  294. HEVCVPS *vps;
  295. av_log(s->avctx, AV_LOG_DEBUG, "Decoding VPS\n");
  296. vps = av_mallocz(sizeof(*vps));
  297. if (!vps)
  298. return AVERROR(ENOMEM);
  299. vps_id = get_bits(gb, 4);
  300. if (vps_id >= MAX_VPS_COUNT) {
  301. av_log(s->avctx, AV_LOG_ERROR, "VPS id out of range: %d\n", vps_id);
  302. goto err;
  303. }
  304. if (get_bits(gb, 2) != 3) { // vps_reserved_three_2bits
  305. av_log(s->avctx, AV_LOG_ERROR, "vps_reserved_three_2bits is not three\n");
  306. goto err;
  307. }
  308. vps->vps_max_layers = get_bits(gb, 6) + 1;
  309. vps->vps_max_sub_layers = get_bits(gb, 3) + 1;
  310. vps->vps_temporal_id_nesting_flag = get_bits1(gb);
  311. if (get_bits(gb, 16) != 0xffff) { // vps_reserved_ffff_16bits
  312. av_log(s->avctx, AV_LOG_ERROR, "vps_reserved_ffff_16bits is not 0xffff\n");
  313. goto err;
  314. }
  315. if (vps->vps_max_sub_layers > MAX_SUB_LAYERS) {
  316. av_log(s->avctx, AV_LOG_ERROR, "vps_max_sub_layers out of range: %d\n",
  317. vps->vps_max_sub_layers);
  318. goto err;
  319. }
  320. if (decode_profile_tier_level(&s->HEVClc, &vps->ptl, vps->vps_max_sub_layers) < 0) {
  321. av_log(s->avctx, AV_LOG_ERROR, "Error decoding profile tier level.\n");
  322. goto err;
  323. }
  324. vps->vps_sub_layer_ordering_info_present_flag = get_bits1(gb);
  325. i = vps->vps_sub_layer_ordering_info_present_flag ? 0 : vps->vps_max_sub_layers - 1;
  326. for (; i < vps->vps_max_sub_layers; i++) {
  327. vps->vps_max_dec_pic_buffering[i] = get_ue_golomb_long(gb) + 1;
  328. vps->vps_num_reorder_pics[i] = get_ue_golomb_long(gb);
  329. vps->vps_max_latency_increase[i] = get_ue_golomb_long(gb) - 1;
  330. if (vps->vps_max_dec_pic_buffering[i] > MAX_DPB_SIZE) {
  331. av_log(s->avctx, AV_LOG_ERROR, "vps_max_dec_pic_buffering_minus1 out of range: %d\n",
  332. vps->vps_max_dec_pic_buffering[i] - 1);
  333. goto err;
  334. }
  335. if (vps->vps_num_reorder_pics[i] > vps->vps_max_dec_pic_buffering[i] - 1) {
  336. av_log(s->avctx, AV_LOG_ERROR, "vps_max_num_reorder_pics out of range: %d\n",
  337. vps->vps_num_reorder_pics[i]);
  338. goto err;
  339. }
  340. }
  341. vps->vps_max_layer_id = get_bits(gb, 6);
  342. vps->vps_num_layer_sets = get_ue_golomb_long(gb) + 1;
  343. for (i = 1; i < vps->vps_num_layer_sets; i++)
  344. for (j = 0; j <= vps->vps_max_layer_id; j++)
  345. skip_bits(gb, 1); // layer_id_included_flag[i][j]
  346. vps->vps_timing_info_present_flag = get_bits1(gb);
  347. if (vps->vps_timing_info_present_flag) {
  348. vps->vps_num_units_in_tick = get_bits_long(gb, 32);
  349. vps->vps_time_scale = get_bits_long(gb, 32);
  350. vps->vps_poc_proportional_to_timing_flag = get_bits1(gb);
  351. if (vps->vps_poc_proportional_to_timing_flag)
  352. vps->vps_num_ticks_poc_diff_one = get_ue_golomb_long(gb) + 1;
  353. vps->vps_num_hrd_parameters = get_ue_golomb_long(gb);
  354. for (i = 0; i < vps->vps_num_hrd_parameters; i++) {
  355. int common_inf_present = 1;
  356. get_ue_golomb_long(gb); // hrd_layer_set_idx
  357. if (i)
  358. common_inf_present = get_bits1(gb);
  359. decode_hrd(s, common_inf_present, vps->vps_max_sub_layers);
  360. }
  361. }
  362. get_bits1(gb); /* vps_extension_flag */
  363. av_free(s->vps_list[vps_id]);
  364. s->vps_list[vps_id] = vps;
  365. return 0;
  366. err:
  367. av_free(vps);
  368. return AVERROR_INVALIDDATA;
  369. }
  370. static void decode_vui(HEVCContext *s, HEVCSPS *sps)
  371. {
  372. VUI *vui = &sps->vui;
  373. GetBitContext *gb = &s->HEVClc.gb;
  374. int sar_present;
  375. av_log(s->avctx, AV_LOG_DEBUG, "Decoding VUI\n");
  376. sar_present = get_bits1(gb);
  377. if (sar_present) {
  378. uint8_t sar_idx = get_bits(gb, 8);
  379. if (sar_idx < FF_ARRAY_ELEMS(vui_sar))
  380. vui->sar = vui_sar[sar_idx];
  381. else if (sar_idx == 255) {
  382. vui->sar.num = get_bits(gb, 16);
  383. vui->sar.den = get_bits(gb, 16);
  384. } else
  385. av_log(s->avctx, AV_LOG_WARNING,
  386. "Unknown SAR index: %u.\n", sar_idx);
  387. }
  388. vui->overscan_info_present_flag = get_bits1(gb);
  389. if (vui->overscan_info_present_flag)
  390. vui->overscan_appropriate_flag = get_bits1(gb);
  391. vui->video_signal_type_present_flag = get_bits1(gb);
  392. if (vui->video_signal_type_present_flag) {
  393. vui->video_format = get_bits(gb, 3);
  394. vui->video_full_range_flag = get_bits1(gb);
  395. vui->colour_description_present_flag = get_bits1(gb);
  396. if (vui->colour_description_present_flag) {
  397. vui->colour_primaries = get_bits(gb, 8);
  398. vui->transfer_characteristic = get_bits(gb, 8);
  399. vui->matrix_coeffs = get_bits(gb, 8);
  400. }
  401. }
  402. vui->chroma_loc_info_present_flag = get_bits1(gb);
  403. if (vui->chroma_loc_info_present_flag) {
  404. vui->chroma_sample_loc_type_top_field = get_ue_golomb_long(gb);
  405. vui->chroma_sample_loc_type_bottom_field = get_ue_golomb_long(gb);
  406. }
  407. vui->neutra_chroma_indication_flag = get_bits1(gb);
  408. vui->field_seq_flag = get_bits1(gb);
  409. vui->frame_field_info_present_flag = get_bits1(gb);
  410. vui->default_display_window_flag = get_bits1(gb);
  411. if (vui->default_display_window_flag) {
  412. //TODO: * 2 is only valid for 420
  413. vui->def_disp_win.left_offset = get_ue_golomb_long(gb) * 2;
  414. vui->def_disp_win.right_offset = get_ue_golomb_long(gb) * 2;
  415. vui->def_disp_win.top_offset = get_ue_golomb_long(gb) * 2;
  416. vui->def_disp_win.bottom_offset = get_ue_golomb_long(gb) * 2;
  417. if (s->apply_defdispwin &&
  418. s->avctx->flags2 & CODEC_FLAG2_IGNORE_CROP) {
  419. av_log(s->avctx, AV_LOG_DEBUG,
  420. "discarding vui default display window, "
  421. "original values are l:%u r:%u t:%u b:%u\n",
  422. vui->def_disp_win.left_offset,
  423. vui->def_disp_win.right_offset,
  424. vui->def_disp_win.top_offset,
  425. vui->def_disp_win.bottom_offset);
  426. vui->def_disp_win.left_offset =
  427. vui->def_disp_win.right_offset =
  428. vui->def_disp_win.top_offset =
  429. vui->def_disp_win.bottom_offset = 0;
  430. }
  431. }
  432. vui->vui_timing_info_present_flag = get_bits1(gb);
  433. if (vui->vui_timing_info_present_flag) {
  434. vui->vui_num_units_in_tick = get_bits(gb, 32);
  435. vui->vui_time_scale = get_bits(gb, 32);
  436. vui->vui_poc_proportional_to_timing_flag = get_bits1(gb);
  437. if (vui->vui_poc_proportional_to_timing_flag)
  438. vui->vui_num_ticks_poc_diff_one_minus1 = get_ue_golomb_long(gb);
  439. vui->vui_hrd_parameters_present_flag = get_bits1(gb);
  440. if (vui->vui_hrd_parameters_present_flag)
  441. decode_hrd(s, 1, sps->max_sub_layers);
  442. }
  443. vui->bitstream_restriction_flag = get_bits1(gb);
  444. if (vui->bitstream_restriction_flag) {
  445. vui->tiles_fixed_structure_flag = get_bits1(gb);
  446. vui->motion_vectors_over_pic_boundaries_flag = get_bits1(gb);
  447. vui->restricted_ref_pic_lists_flag = get_bits1(gb);
  448. vui->min_spatial_segmentation_idc = get_ue_golomb_long(gb);
  449. vui->max_bytes_per_pic_denom = get_ue_golomb_long(gb);
  450. vui->max_bits_per_min_cu_denom = get_ue_golomb_long(gb);
  451. vui->log2_max_mv_length_horizontal = get_ue_golomb_long(gb);
  452. vui->log2_max_mv_length_vertical = get_ue_golomb_long(gb);
  453. }
  454. }
  455. static void set_default_scaling_list_data(ScalingList *sl)
  456. {
  457. int matrixId;
  458. for (matrixId = 0; matrixId < 6; matrixId++) {
  459. // 4x4 default is 16
  460. memset(sl->sl[0][matrixId], 16, 16);
  461. sl->sl_dc[0][matrixId] = 16; // default for 16x16
  462. sl->sl_dc[1][matrixId] = 16; // default for 32x32
  463. }
  464. memcpy(sl->sl[1][0], default_scaling_list_intra, 64);
  465. memcpy(sl->sl[1][1], default_scaling_list_intra, 64);
  466. memcpy(sl->sl[1][2], default_scaling_list_intra, 64);
  467. memcpy(sl->sl[1][3], default_scaling_list_inter, 64);
  468. memcpy(sl->sl[1][4], default_scaling_list_inter, 64);
  469. memcpy(sl->sl[1][5], default_scaling_list_inter, 64);
  470. memcpy(sl->sl[2][0], default_scaling_list_intra, 64);
  471. memcpy(sl->sl[2][1], default_scaling_list_intra, 64);
  472. memcpy(sl->sl[2][2], default_scaling_list_intra, 64);
  473. memcpy(sl->sl[2][3], default_scaling_list_inter, 64);
  474. memcpy(sl->sl[2][4], default_scaling_list_inter, 64);
  475. memcpy(sl->sl[2][5], default_scaling_list_inter, 64);
  476. memcpy(sl->sl[3][0], default_scaling_list_intra, 64);
  477. memcpy(sl->sl[3][1], default_scaling_list_inter, 64);
  478. }
  479. static int scaling_list_data(HEVCContext *s, ScalingList *sl)
  480. {
  481. GetBitContext *gb = &s->HEVClc.gb;
  482. uint8_t scaling_list_pred_mode_flag[4][6];
  483. int32_t scaling_list_dc_coef[2][6];
  484. int size_id, matrix_id, i, pos, delta;
  485. for (size_id = 0; size_id < 4; size_id++)
  486. for (matrix_id = 0; matrix_id < (size_id == 3 ? 2 : 6); matrix_id++) {
  487. scaling_list_pred_mode_flag[size_id][matrix_id] = get_bits1(gb);
  488. if (!scaling_list_pred_mode_flag[size_id][matrix_id]) {
  489. delta = get_ue_golomb_long(gb);
  490. /* Only need to handle non-zero delta. Zero means default,
  491. * which should already be in the arrays. */
  492. if (delta) {
  493. // Copy from previous array.
  494. if (matrix_id - delta < 0) {
  495. av_log(s->avctx, AV_LOG_ERROR,
  496. "Invalid delta in scaling list data: %d.\n", delta);
  497. return AVERROR_INVALIDDATA;
  498. }
  499. memcpy(sl->sl[size_id][matrix_id],
  500. sl->sl[size_id][matrix_id - delta],
  501. size_id > 0 ? 64 : 16);
  502. if (size_id > 1)
  503. sl->sl_dc[size_id - 2][matrix_id] = sl->sl_dc[size_id - 2][matrix_id - delta];
  504. }
  505. } else {
  506. int next_coef, coef_num;
  507. int32_t scaling_list_delta_coef;
  508. next_coef = 8;
  509. coef_num = FFMIN(64, 1 << (4 + (size_id << 1)));
  510. if (size_id > 1) {
  511. scaling_list_dc_coef[size_id - 2][matrix_id] = get_se_golomb(gb) + 8;
  512. next_coef = scaling_list_dc_coef[size_id - 2][matrix_id];
  513. sl->sl_dc[size_id - 2][matrix_id] = next_coef;
  514. }
  515. for (i = 0; i < coef_num; i++) {
  516. if (size_id == 0)
  517. pos = 4 * ff_hevc_diag_scan4x4_y[i] +
  518. ff_hevc_diag_scan4x4_x[i];
  519. else
  520. pos = 8 * ff_hevc_diag_scan8x8_y[i] +
  521. ff_hevc_diag_scan8x8_x[i];
  522. scaling_list_delta_coef = get_se_golomb(gb);
  523. next_coef = (next_coef + scaling_list_delta_coef + 256) % 256;
  524. sl->sl[size_id][matrix_id][pos] = next_coef;
  525. }
  526. }
  527. }
  528. return 0;
  529. }
  530. int ff_hevc_decode_nal_sps(HEVCContext *s)
  531. {
  532. const AVPixFmtDescriptor *desc;
  533. GetBitContext *gb = &s->HEVClc.gb;
  534. int ret = 0;
  535. int sps_id = 0;
  536. int log2_diff_max_min_transform_block_size;
  537. int bit_depth_chroma, start, vui_present, sublayer_ordering_info;
  538. int i;
  539. HEVCSPS *sps;
  540. AVBufferRef *sps_buf = av_buffer_allocz(sizeof(*sps));
  541. if (!sps_buf)
  542. return AVERROR(ENOMEM);
  543. sps = (HEVCSPS*)sps_buf->data;
  544. av_log(s->avctx, AV_LOG_DEBUG, "Decoding SPS\n");
  545. // Coded parameters
  546. sps->vps_id = get_bits(gb, 4);
  547. if (sps->vps_id >= MAX_VPS_COUNT) {
  548. av_log(s->avctx, AV_LOG_ERROR, "VPS id out of range: %d\n", sps->vps_id);
  549. ret = AVERROR_INVALIDDATA;
  550. goto err;
  551. }
  552. sps->max_sub_layers = get_bits(gb, 3) + 1;
  553. if (sps->max_sub_layers > MAX_SUB_LAYERS) {
  554. av_log(s->avctx, AV_LOG_ERROR, "sps_max_sub_layers out of range: %d\n",
  555. sps->max_sub_layers);
  556. ret = AVERROR_INVALIDDATA;
  557. goto err;
  558. }
  559. skip_bits1(gb); // temporal_id_nesting_flag
  560. if (decode_profile_tier_level(&s->HEVClc, &sps->ptl,
  561. sps->max_sub_layers) < 0) {
  562. av_log(s->avctx, AV_LOG_ERROR, "error decoding profile tier level\n");
  563. ret = AVERROR_INVALIDDATA;
  564. goto err;
  565. }
  566. sps_id = get_ue_golomb_long(gb);
  567. if (sps_id >= MAX_SPS_COUNT) {
  568. av_log(s->avctx, AV_LOG_ERROR, "SPS id out of range: %d\n", sps_id);
  569. ret = AVERROR_INVALIDDATA;
  570. goto err;
  571. }
  572. sps->chroma_format_idc = get_ue_golomb_long(gb);
  573. if (sps->chroma_format_idc != 1) {
  574. avpriv_report_missing_feature(s->avctx, "chroma_format_idc != 1\n");
  575. ret = AVERROR_PATCHWELCOME;
  576. goto err;
  577. }
  578. if (sps->chroma_format_idc == 3)
  579. sps->separate_colour_plane_flag = get_bits1(gb);
  580. sps->width = get_ue_golomb_long(gb);
  581. sps->height = get_ue_golomb_long(gb);
  582. if ((ret = av_image_check_size(sps->width,
  583. sps->height, 0, s->avctx)) < 0)
  584. goto err;
  585. if (get_bits1(gb)) { // pic_conformance_flag
  586. //TODO: * 2 is only valid for 420
  587. sps->pic_conf_win.left_offset = get_ue_golomb_long(gb) * 2;
  588. sps->pic_conf_win.right_offset = get_ue_golomb_long(gb) * 2;
  589. sps->pic_conf_win.top_offset = get_ue_golomb_long(gb) * 2;
  590. sps->pic_conf_win.bottom_offset = get_ue_golomb_long(gb) * 2;
  591. if (s->avctx->flags2 & CODEC_FLAG2_IGNORE_CROP) {
  592. av_log(s->avctx, AV_LOG_DEBUG,
  593. "discarding sps conformance window, "
  594. "original values are l:%u r:%u t:%u b:%u\n",
  595. sps->pic_conf_win.left_offset,
  596. sps->pic_conf_win.right_offset,
  597. sps->pic_conf_win.top_offset,
  598. sps->pic_conf_win.bottom_offset);
  599. sps->pic_conf_win.left_offset =
  600. sps->pic_conf_win.right_offset =
  601. sps->pic_conf_win.top_offset =
  602. sps->pic_conf_win.bottom_offset = 0;
  603. }
  604. sps->output_window = sps->pic_conf_win;
  605. }
  606. sps->bit_depth = get_ue_golomb_long(gb) + 8;
  607. bit_depth_chroma = get_ue_golomb_long(gb) + 8;
  608. if (bit_depth_chroma != sps->bit_depth) {
  609. av_log(s->avctx, AV_LOG_ERROR,
  610. "Luma bit depth (%d) is different from chroma bit depth (%d), "
  611. "this is unsupported.\n",
  612. sps->bit_depth, bit_depth_chroma);
  613. ret = AVERROR_INVALIDDATA;
  614. goto err;
  615. }
  616. if (sps->chroma_format_idc == 1) {
  617. switch (sps->bit_depth) {
  618. case 8: sps->pix_fmt = AV_PIX_FMT_YUV420P; break;
  619. case 9: sps->pix_fmt = AV_PIX_FMT_YUV420P9; break;
  620. case 10: sps->pix_fmt = AV_PIX_FMT_YUV420P10; break;
  621. default:
  622. av_log(s->avctx, AV_LOG_ERROR, "Unsupported bit depth: %d\n",
  623. sps->bit_depth);
  624. ret = AVERROR_PATCHWELCOME;
  625. goto err;
  626. }
  627. } else {
  628. av_log(s->avctx, AV_LOG_ERROR,
  629. "non-4:2:0 support is currently unspecified.\n");
  630. return AVERROR_PATCHWELCOME;
  631. }
  632. desc = av_pix_fmt_desc_get(sps->pix_fmt);
  633. if (!desc) {
  634. ret = AVERROR(EINVAL);
  635. goto err;
  636. }
  637. sps->hshift[0] = sps->vshift[0] = 0;
  638. sps->hshift[2] = sps->hshift[1] = desc->log2_chroma_w;
  639. sps->vshift[2] = sps->vshift[1] = desc->log2_chroma_h;
  640. sps->pixel_shift = sps->bit_depth > 8;
  641. sps->log2_max_poc_lsb = get_ue_golomb_long(gb) + 4;
  642. if (sps->log2_max_poc_lsb > 16) {
  643. av_log(s->avctx, AV_LOG_ERROR, "log2_max_pic_order_cnt_lsb_minus4 out range: %d\n",
  644. sps->log2_max_poc_lsb - 4);
  645. ret = AVERROR_INVALIDDATA;
  646. goto err;
  647. }
  648. sublayer_ordering_info = get_bits1(gb);
  649. start = sublayer_ordering_info ? 0 : sps->max_sub_layers - 1;
  650. for (i = start; i < sps->max_sub_layers; i++) {
  651. sps->temporal_layer[i].max_dec_pic_buffering = get_ue_golomb_long(gb) + 1;
  652. sps->temporal_layer[i].num_reorder_pics = get_ue_golomb_long(gb);
  653. sps->temporal_layer[i].max_latency_increase = get_ue_golomb_long(gb) - 1;
  654. if (sps->temporal_layer[i].max_dec_pic_buffering > MAX_DPB_SIZE) {
  655. av_log(s->avctx, AV_LOG_ERROR, "sps_max_dec_pic_buffering_minus1 out of range: %d\n",
  656. sps->temporal_layer[i].max_dec_pic_buffering - 1);
  657. ret = AVERROR_INVALIDDATA;
  658. goto err;
  659. }
  660. if (sps->temporal_layer[i].num_reorder_pics > sps->temporal_layer[i].max_dec_pic_buffering - 1) {
  661. av_log(s->avctx, AV_LOG_ERROR, "sps_max_num_reorder_pics out of range: %d\n",
  662. sps->temporal_layer[i].num_reorder_pics);
  663. ret = AVERROR_INVALIDDATA;
  664. goto err;
  665. }
  666. }
  667. if (!sublayer_ordering_info) {
  668. for (i = 0; i < start; i++) {
  669. sps->temporal_layer[i].max_dec_pic_buffering = sps->temporal_layer[start].max_dec_pic_buffering;
  670. sps->temporal_layer[i].num_reorder_pics = sps->temporal_layer[start].num_reorder_pics;
  671. sps->temporal_layer[i].max_latency_increase = sps->temporal_layer[start].max_latency_increase;
  672. }
  673. }
  674. sps->log2_min_cb_size = get_ue_golomb_long(gb) + 3;
  675. sps->log2_diff_max_min_coding_block_size = get_ue_golomb_long(gb);
  676. sps->log2_min_tb_size = get_ue_golomb_long(gb) + 2;
  677. log2_diff_max_min_transform_block_size = get_ue_golomb_long(gb);
  678. sps->log2_max_trafo_size = log2_diff_max_min_transform_block_size +
  679. sps->log2_min_tb_size;
  680. if (sps->log2_min_tb_size >= sps->log2_min_cb_size) {
  681. av_log(s->avctx, AV_LOG_ERROR, "Invalid value for log2_min_tb_size");
  682. ret = AVERROR_INVALIDDATA;
  683. goto err;
  684. }
  685. sps->max_transform_hierarchy_depth_inter = get_ue_golomb_long(gb);
  686. sps->max_transform_hierarchy_depth_intra = get_ue_golomb_long(gb);
  687. sps->scaling_list_enable_flag = get_bits1(gb);
  688. if (sps->scaling_list_enable_flag) {
  689. set_default_scaling_list_data(&sps->scaling_list);
  690. if (get_bits1(gb)) {
  691. ret = scaling_list_data(s, &sps->scaling_list);
  692. if (ret < 0)
  693. goto err;
  694. }
  695. }
  696. sps->amp_enabled_flag = get_bits1(gb);
  697. sps->sao_enabled = get_bits1(gb);
  698. sps->pcm_enabled_flag = get_bits1(gb);
  699. if (sps->pcm_enabled_flag) {
  700. int pcm_bit_depth_chroma;
  701. sps->pcm.bit_depth = get_bits(gb, 4) + 1;
  702. pcm_bit_depth_chroma = get_bits(gb, 4) + 1;
  703. if (pcm_bit_depth_chroma != sps->pcm.bit_depth) {
  704. av_log(s->avctx, AV_LOG_ERROR,
  705. "PCM Luma bit depth (%d) is different from PCM chroma"
  706. "bit depth (%d), this is unsupported.\n",
  707. sps->pcm.bit_depth, pcm_bit_depth_chroma);
  708. ret = AVERROR_INVALIDDATA;
  709. goto err;
  710. }
  711. sps->pcm.log2_min_pcm_cb_size = get_ue_golomb_long(gb) + 3;
  712. sps->pcm.log2_max_pcm_cb_size = sps->pcm.log2_min_pcm_cb_size +
  713. get_ue_golomb_long(gb);
  714. if (sps->pcm.bit_depth > sps->bit_depth) {
  715. av_log(s->avctx, AV_LOG_ERROR,
  716. "PCM bit depth (%d) is greater than normal bit depth (%d)\n",
  717. sps->pcm.bit_depth, sps->bit_depth);
  718. ret = AVERROR_INVALIDDATA;
  719. goto err;
  720. }
  721. sps->pcm.loop_filter_disable_flag = get_bits1(gb);
  722. }
  723. sps->nb_st_rps = get_ue_golomb_long(gb);
  724. if (sps->nb_st_rps > MAX_SHORT_TERM_RPS_COUNT) {
  725. av_log(s->avctx, AV_LOG_ERROR, "Too many short term RPS: %d.\n",
  726. sps->nb_st_rps);
  727. ret = AVERROR_INVALIDDATA;
  728. goto err;
  729. }
  730. for (i = 0; i < sps->nb_st_rps; i++) {
  731. if ((ret = ff_hevc_decode_short_term_rps(s, &sps->st_rps[i],
  732. sps, 0)) < 0)
  733. goto err;
  734. }
  735. sps->long_term_ref_pics_present_flag = get_bits1(gb);
  736. if (sps->long_term_ref_pics_present_flag) {
  737. sps->num_long_term_ref_pics_sps = get_ue_golomb_long(gb);
  738. for (i = 0; i < sps->num_long_term_ref_pics_sps; i++) {
  739. sps->lt_ref_pic_poc_lsb_sps[i] = get_bits(gb, sps->log2_max_poc_lsb);
  740. sps->used_by_curr_pic_lt_sps_flag[i] = get_bits1(gb);
  741. }
  742. }
  743. sps->sps_temporal_mvp_enabled_flag = get_bits1(gb);
  744. sps->sps_strong_intra_smoothing_enable_flag = get_bits1(gb);
  745. sps->vui.sar = (AVRational){0, 1};
  746. vui_present = get_bits1(gb);
  747. if (vui_present)
  748. decode_vui(s, sps);
  749. skip_bits1(gb); // sps_extension_flag
  750. if (s->apply_defdispwin) {
  751. sps->output_window.left_offset += sps->vui.def_disp_win.left_offset;
  752. sps->output_window.right_offset += sps->vui.def_disp_win.right_offset;
  753. sps->output_window.top_offset += sps->vui.def_disp_win.top_offset;
  754. sps->output_window.bottom_offset += sps->vui.def_disp_win.bottom_offset;
  755. }
  756. if (sps->output_window.left_offset & (0x1F >> (sps->pixel_shift)) &&
  757. !(s->avctx->flags & CODEC_FLAG_UNALIGNED)) {
  758. sps->output_window.left_offset &= ~(0x1F >> (sps->pixel_shift));
  759. av_log(s->avctx, AV_LOG_WARNING, "Reducing left output window to %d "
  760. "chroma samples to preserve alignment.\n",
  761. sps->output_window.left_offset);
  762. }
  763. sps->output_width = sps->width -
  764. (sps->output_window.left_offset + sps->output_window.right_offset);
  765. sps->output_height = sps->height -
  766. (sps->output_window.top_offset + sps->output_window.bottom_offset);
  767. if (sps->output_width <= 0 || sps->output_height <= 0) {
  768. av_log(s->avctx, AV_LOG_WARNING, "Invalid visible frame dimensions: %dx%d.\n",
  769. sps->output_width, sps->output_height);
  770. if (s->avctx->err_recognition & AV_EF_EXPLODE) {
  771. ret = AVERROR_INVALIDDATA;
  772. goto err;
  773. }
  774. av_log(s->avctx, AV_LOG_WARNING,
  775. "Displaying the whole video surface.\n");
  776. sps->pic_conf_win.left_offset =
  777. sps->pic_conf_win.right_offset =
  778. sps->pic_conf_win.top_offset =
  779. sps->pic_conf_win.bottom_offset = 0;
  780. sps->output_width = sps->width;
  781. sps->output_height = sps->height;
  782. }
  783. // Inferred parameters
  784. sps->log2_ctb_size = sps->log2_min_cb_size +
  785. sps->log2_diff_max_min_coding_block_size;
  786. sps->log2_min_pu_size = sps->log2_min_cb_size - 1;
  787. sps->ctb_width = (sps->width + (1 << sps->log2_ctb_size) - 1) >> sps->log2_ctb_size;
  788. sps->ctb_height = (sps->height + (1 << sps->log2_ctb_size) - 1) >> sps->log2_ctb_size;
  789. sps->ctb_size = sps->ctb_width * sps->ctb_height;
  790. sps->min_cb_width = sps->width >> sps->log2_min_cb_size;
  791. sps->min_cb_height = sps->height >> sps->log2_min_cb_size;
  792. sps->min_tb_width = sps->width >> sps->log2_min_tb_size;
  793. sps->min_tb_height = sps->height >> sps->log2_min_tb_size;
  794. sps->min_pu_width = sps->width >> sps->log2_min_pu_size;
  795. sps->min_pu_height = sps->height >> sps->log2_min_pu_size;
  796. sps->qp_bd_offset = 6 * (sps->bit_depth - 8);
  797. if (sps->width & ((1 << sps->log2_min_cb_size) - 1) ||
  798. sps->height & ((1 << sps->log2_min_cb_size) - 1)) {
  799. av_log(s->avctx, AV_LOG_ERROR, "Invalid coded frame dimensions.\n");
  800. goto err;
  801. }
  802. if (sps->log2_ctb_size > MAX_LOG2_CTB_SIZE) {
  803. av_log(s->avctx, AV_LOG_ERROR, "CTB size out of range: 2^%d\n", sps->log2_ctb_size);
  804. goto err;
  805. }
  806. if (sps->max_transform_hierarchy_depth_inter > sps->log2_ctb_size - sps->log2_min_tb_size) {
  807. av_log(s->avctx, AV_LOG_ERROR, "max_transform_hierarchy_depth_inter out of range: %d\n",
  808. sps->max_transform_hierarchy_depth_inter);
  809. goto err;
  810. }
  811. if (sps->max_transform_hierarchy_depth_intra > sps->log2_ctb_size - sps->log2_min_tb_size) {
  812. av_log(s->avctx, AV_LOG_ERROR, "max_transform_hierarchy_depth_intra out of range: %d\n",
  813. sps->max_transform_hierarchy_depth_intra);
  814. goto err;
  815. }
  816. if (sps->log2_max_trafo_size > FFMIN(sps->log2_ctb_size, 5)) {
  817. av_log(s->avctx, AV_LOG_ERROR,
  818. "max transform block size out of range: %d\n",
  819. sps->log2_max_trafo_size);
  820. goto err;
  821. }
  822. if (s->avctx->debug & FF_DEBUG_BITSTREAM) {
  823. av_log(s->avctx, AV_LOG_DEBUG,
  824. "Parsed SPS: id %d; coded wxh: %dx%d; "
  825. "cropped wxh: %dx%d; pix_fmt: %s.\n",
  826. sps_id, sps->width, sps->height,
  827. sps->output_width, sps->output_height,
  828. av_get_pix_fmt_name(sps->pix_fmt));
  829. }
  830. /* check if this is a repeat of an already parsed SPS, then keep the
  831. * original one.
  832. * otherwise drop all PPSes that depend on it */
  833. if (s->sps_list[sps_id] &&
  834. !memcmp(s->sps_list[sps_id]->data, sps_buf->data, sps_buf->size)) {
  835. av_buffer_unref(&sps_buf);
  836. } else {
  837. for (i = 0; i < FF_ARRAY_ELEMS(s->pps_list); i++) {
  838. if (s->pps_list[i] && ((HEVCPPS*)s->pps_list[i]->data)->sps_id == sps_id)
  839. av_buffer_unref(&s->pps_list[i]);
  840. }
  841. av_buffer_unref(&s->sps_list[sps_id]);
  842. s->sps_list[sps_id] = sps_buf;
  843. }
  844. return 0;
  845. err:
  846. av_buffer_unref(&sps_buf);
  847. return ret;
  848. }
  849. static void hevc_pps_free(void *opaque, uint8_t *data)
  850. {
  851. HEVCPPS *pps = (HEVCPPS*)data;
  852. av_freep(&pps->column_width);
  853. av_freep(&pps->row_height);
  854. av_freep(&pps->col_bd);
  855. av_freep(&pps->row_bd);
  856. av_freep(&pps->col_idxX);
  857. av_freep(&pps->ctb_addr_rs_to_ts);
  858. av_freep(&pps->ctb_addr_ts_to_rs);
  859. av_freep(&pps->tile_pos_rs);
  860. av_freep(&pps->tile_id);
  861. av_freep(&pps->min_cb_addr_zs);
  862. av_freep(&pps->min_tb_addr_zs);
  863. av_freep(&pps);
  864. }
  865. int ff_hevc_decode_nal_pps(HEVCContext *s)
  866. {
  867. GetBitContext *gb = &s->HEVClc.gb;
  868. HEVCSPS *sps = NULL;
  869. int pic_area_in_ctbs, pic_area_in_min_cbs, pic_area_in_min_tbs;
  870. int log2_diff_ctb_min_tb_size;
  871. int i, j, x, y, ctb_addr_rs, tile_id;
  872. int ret = 0;
  873. int pps_id = 0;
  874. AVBufferRef *pps_buf;
  875. HEVCPPS *pps = av_mallocz(sizeof(*pps));
  876. if (!pps)
  877. return AVERROR(ENOMEM);
  878. pps_buf = av_buffer_create((uint8_t *)pps, sizeof(*pps),
  879. hevc_pps_free, NULL, 0);
  880. if (!pps_buf) {
  881. av_freep(&pps);
  882. return AVERROR(ENOMEM);
  883. }
  884. av_log(s->avctx, AV_LOG_DEBUG, "Decoding PPS\n");
  885. // Default values
  886. pps->loop_filter_across_tiles_enabled_flag = 1;
  887. pps->num_tile_columns = 1;
  888. pps->num_tile_rows = 1;
  889. pps->uniform_spacing_flag = 1;
  890. pps->disable_dbf = 0;
  891. pps->beta_offset = 0;
  892. pps->tc_offset = 0;
  893. // Coded parameters
  894. pps_id = get_ue_golomb_long(gb);
  895. if (pps_id >= MAX_PPS_COUNT) {
  896. av_log(s->avctx, AV_LOG_ERROR, "PPS id out of range: %d\n", pps_id);
  897. ret = AVERROR_INVALIDDATA;
  898. goto err;
  899. }
  900. pps->sps_id = get_ue_golomb_long(gb);
  901. if (pps->sps_id >= MAX_SPS_COUNT) {
  902. av_log(s->avctx, AV_LOG_ERROR, "SPS id out of range: %d\n", pps->sps_id);
  903. ret = AVERROR_INVALIDDATA;
  904. goto err;
  905. }
  906. if (!s->sps_list[pps->sps_id]) {
  907. av_log(s->avctx, AV_LOG_ERROR, "SPS does not exist \n");
  908. ret = AVERROR_INVALIDDATA;
  909. goto err;
  910. }
  911. sps = (HEVCSPS *)s->sps_list[pps->sps_id]->data;
  912. pps->dependent_slice_segments_enabled_flag = get_bits1(gb);
  913. pps->output_flag_present_flag = get_bits1(gb);
  914. pps->num_extra_slice_header_bits = get_bits(gb, 3);
  915. pps->sign_data_hiding_flag = get_bits1(gb);
  916. pps->cabac_init_present_flag = get_bits1(gb);
  917. pps->num_ref_idx_l0_default_active = get_ue_golomb_long(gb) + 1;
  918. pps->num_ref_idx_l1_default_active = get_ue_golomb_long(gb) + 1;
  919. pps->pic_init_qp_minus26 = get_se_golomb(gb);
  920. pps->constrained_intra_pred_flag = get_bits1(gb);
  921. pps->transform_skip_enabled_flag = get_bits1(gb);
  922. pps->cu_qp_delta_enabled_flag = get_bits1(gb);
  923. pps->diff_cu_qp_delta_depth = 0;
  924. if (pps->cu_qp_delta_enabled_flag)
  925. pps->diff_cu_qp_delta_depth = get_ue_golomb_long(gb);
  926. pps->cb_qp_offset = get_se_golomb(gb);
  927. if (pps->cb_qp_offset < -12 || pps->cb_qp_offset > 12) {
  928. av_log(s->avctx, AV_LOG_ERROR, "pps_cb_qp_offset out of range: %d\n",
  929. pps->cb_qp_offset);
  930. ret = AVERROR_INVALIDDATA;
  931. goto err;
  932. }
  933. pps->cr_qp_offset = get_se_golomb(gb);
  934. if (pps->cr_qp_offset < -12 || pps->cr_qp_offset > 12) {
  935. av_log(s->avctx, AV_LOG_ERROR, "pps_cr_qp_offset out of range: %d\n",
  936. pps->cr_qp_offset);
  937. ret = AVERROR_INVALIDDATA;
  938. goto err;
  939. }
  940. pps->pic_slice_level_chroma_qp_offsets_present_flag = get_bits1(gb);
  941. pps->weighted_pred_flag = get_bits1(gb);
  942. pps->weighted_bipred_flag = get_bits1(gb);
  943. pps->transquant_bypass_enable_flag = get_bits1(gb);
  944. pps->tiles_enabled_flag = get_bits1(gb);
  945. pps->entropy_coding_sync_enabled_flag = get_bits1(gb);
  946. if (pps->tiles_enabled_flag) {
  947. pps->num_tile_columns = get_ue_golomb_long(gb) + 1;
  948. pps->num_tile_rows = get_ue_golomb_long(gb) + 1;
  949. if (pps->num_tile_columns == 0 ||
  950. pps->num_tile_columns >= sps->width) {
  951. av_log(s->avctx, AV_LOG_ERROR, "num_tile_columns_minus1 out of range: %d\n",
  952. pps->num_tile_columns - 1);
  953. ret = AVERROR_INVALIDDATA;
  954. goto err;
  955. }
  956. if (pps->num_tile_rows == 0 ||
  957. pps->num_tile_rows >= sps->height) {
  958. av_log(s->avctx, AV_LOG_ERROR, "num_tile_rows_minus1 out of range: %d\n",
  959. pps->num_tile_rows - 1);
  960. ret = AVERROR_INVALIDDATA;
  961. goto err;
  962. }
  963. pps->column_width = av_malloc_array(pps->num_tile_columns, sizeof(*pps->column_width));
  964. pps->row_height = av_malloc_array(pps->num_tile_rows, sizeof(*pps->row_height));
  965. if (!pps->column_width || !pps->row_height) {
  966. ret = AVERROR(ENOMEM);
  967. goto err;
  968. }
  969. pps->uniform_spacing_flag = get_bits1(gb);
  970. if (!pps->uniform_spacing_flag) {
  971. int sum = 0;
  972. for (i = 0; i < pps->num_tile_columns - 1; i++) {
  973. pps->column_width[i] = get_ue_golomb_long(gb) + 1;
  974. sum += pps->column_width[i];
  975. }
  976. if (sum >= sps->ctb_width) {
  977. av_log(s->avctx, AV_LOG_ERROR, "Invalid tile widths.\n");
  978. ret = AVERROR_INVALIDDATA;
  979. goto err;
  980. }
  981. pps->column_width[pps->num_tile_columns - 1] = sps->ctb_width - sum;
  982. sum = 0;
  983. for (i = 0; i < pps->num_tile_rows - 1; i++) {
  984. pps->row_height[i] = get_ue_golomb_long(gb) + 1;
  985. sum += pps->row_height[i];
  986. }
  987. if (sum >= sps->ctb_height) {
  988. av_log(s->avctx, AV_LOG_ERROR, "Invalid tile heights.\n");
  989. ret = AVERROR_INVALIDDATA;
  990. goto err;
  991. }
  992. pps->row_height[pps->num_tile_rows - 1] = sps->ctb_height - sum;
  993. }
  994. pps->loop_filter_across_tiles_enabled_flag = get_bits1(gb);
  995. }
  996. pps->seq_loop_filter_across_slices_enabled_flag = get_bits1(gb);
  997. pps->deblocking_filter_control_present_flag = get_bits1(gb);
  998. if (pps->deblocking_filter_control_present_flag) {
  999. pps->deblocking_filter_override_enabled_flag = get_bits1(gb);
  1000. pps->disable_dbf = get_bits1(gb);
  1001. if (!pps->disable_dbf) {
  1002. pps->beta_offset = get_se_golomb(gb) * 2;
  1003. pps->tc_offset = get_se_golomb(gb) * 2;
  1004. if (pps->beta_offset/2 < -6 || pps->beta_offset/2 > 6) {
  1005. av_log(s->avctx, AV_LOG_ERROR, "pps_beta_offset_div2 out of range: %d\n",
  1006. pps->beta_offset/2);
  1007. ret = AVERROR_INVALIDDATA;
  1008. goto err;
  1009. }
  1010. if (pps->tc_offset/2 < -6 || pps->tc_offset/2 > 6) {
  1011. av_log(s->avctx, AV_LOG_ERROR, "pps_tc_offset_div2 out of range: %d\n",
  1012. pps->tc_offset/2);
  1013. ret = AVERROR_INVALIDDATA;
  1014. goto err;
  1015. }
  1016. }
  1017. }
  1018. pps->scaling_list_data_present_flag = get_bits1(gb);
  1019. if (pps->scaling_list_data_present_flag) {
  1020. set_default_scaling_list_data(&pps->scaling_list);
  1021. ret = scaling_list_data(s, &pps->scaling_list);
  1022. if (ret < 0)
  1023. goto err;
  1024. }
  1025. pps->lists_modification_present_flag = get_bits1(gb);
  1026. pps->log2_parallel_merge_level = get_ue_golomb_long(gb) + 2;
  1027. if (pps->log2_parallel_merge_level > sps->log2_ctb_size) {
  1028. av_log(s->avctx, AV_LOG_ERROR, "log2_parallel_merge_level_minus2 out of range: %d\n",
  1029. pps->log2_parallel_merge_level - 2);
  1030. ret = AVERROR_INVALIDDATA;
  1031. goto err;
  1032. }
  1033. pps->slice_header_extension_present_flag = get_bits1(gb);
  1034. skip_bits1(gb); // pps_extension_flag
  1035. // Inferred parameters
  1036. pps->col_bd = av_malloc_array(pps->num_tile_columns + 1, sizeof(*pps->col_bd));
  1037. pps->row_bd = av_malloc_array(pps->num_tile_rows + 1, sizeof(*pps->row_bd));
  1038. pps->col_idxX = av_malloc_array(sps->ctb_width, sizeof(*pps->col_idxX));
  1039. if (!pps->col_bd || !pps->row_bd || !pps->col_idxX) {
  1040. ret = AVERROR(ENOMEM);
  1041. goto err;
  1042. }
  1043. if (pps->uniform_spacing_flag) {
  1044. if (!pps->column_width) {
  1045. pps->column_width = av_malloc_array(pps->num_tile_columns, sizeof(*pps->column_width));
  1046. pps->row_height = av_malloc_array(pps->num_tile_rows, sizeof(*pps->row_height));
  1047. }
  1048. if (!pps->column_width || !pps->row_height) {
  1049. ret = AVERROR(ENOMEM);
  1050. goto err;
  1051. }
  1052. for (i = 0; i < pps->num_tile_columns; i++) {
  1053. pps->column_width[i] = ((i + 1) * sps->ctb_width) / pps->num_tile_columns -
  1054. (i * sps->ctb_width) / pps->num_tile_columns;
  1055. }
  1056. for (i = 0; i < pps->num_tile_rows; i++) {
  1057. pps->row_height[i] = ((i + 1) * sps->ctb_height) / pps->num_tile_rows -
  1058. (i * sps->ctb_height) / pps->num_tile_rows;
  1059. }
  1060. }
  1061. pps->col_bd[0] = 0;
  1062. for (i = 0; i < pps->num_tile_columns; i++)
  1063. pps->col_bd[i + 1] = pps->col_bd[i] + pps->column_width[i];
  1064. pps->row_bd[0] = 0;
  1065. for (i = 0; i < pps->num_tile_rows; i++)
  1066. pps->row_bd[i + 1] = pps->row_bd[i] + pps->row_height[i];
  1067. for (i = 0, j = 0; i < sps->ctb_width; i++) {
  1068. if (i > pps->col_bd[j])
  1069. j++;
  1070. pps->col_idxX[i] = j;
  1071. }
  1072. /**
  1073. * 6.5
  1074. */
  1075. pic_area_in_ctbs = sps->ctb_width * sps->ctb_height;
  1076. pic_area_in_min_cbs = sps->min_cb_width * sps->min_cb_height;
  1077. pic_area_in_min_tbs = sps->min_tb_width * sps->min_tb_height;
  1078. pps->ctb_addr_rs_to_ts = av_malloc_array(pic_area_in_ctbs, sizeof(*pps->ctb_addr_rs_to_ts));
  1079. pps->ctb_addr_ts_to_rs = av_malloc_array(pic_area_in_ctbs, sizeof(*pps->ctb_addr_ts_to_rs));
  1080. pps->tile_id = av_malloc_array(pic_area_in_ctbs, sizeof(*pps->tile_id));
  1081. pps->min_cb_addr_zs = av_malloc_array(pic_area_in_min_cbs, sizeof(*pps->min_cb_addr_zs));
  1082. pps->min_tb_addr_zs = av_malloc_array(pic_area_in_min_tbs, sizeof(*pps->min_tb_addr_zs));
  1083. if (!pps->ctb_addr_rs_to_ts || !pps->ctb_addr_ts_to_rs ||
  1084. !pps->tile_id || !pps->min_cb_addr_zs || !pps->min_tb_addr_zs) {
  1085. ret = AVERROR(ENOMEM);
  1086. goto err;
  1087. }
  1088. for (ctb_addr_rs = 0; ctb_addr_rs < pic_area_in_ctbs; ctb_addr_rs++) {
  1089. int tb_x = ctb_addr_rs % sps->ctb_width;
  1090. int tb_y = ctb_addr_rs / sps->ctb_width;
  1091. int tile_x = 0;
  1092. int tile_y = 0;
  1093. int val = 0;
  1094. for (i = 0; i < pps->num_tile_columns; i++) {
  1095. if (tb_x < pps->col_bd[i + 1]) {
  1096. tile_x = i;
  1097. break;
  1098. }
  1099. }
  1100. for (i = 0; i < pps->num_tile_rows; i++) {
  1101. if (tb_y < pps->row_bd[i + 1]) {
  1102. tile_y = i;
  1103. break;
  1104. }
  1105. }
  1106. for (i = 0; i < tile_x; i++)
  1107. val += pps->row_height[tile_y] * pps->column_width[i];
  1108. for (i = 0; i < tile_y; i++)
  1109. val += sps->ctb_width * pps->row_height[i];
  1110. val += (tb_y - pps->row_bd[tile_y]) * pps->column_width[tile_x] +
  1111. tb_x - pps->col_bd[tile_x];
  1112. pps->ctb_addr_rs_to_ts[ctb_addr_rs] = val;
  1113. pps->ctb_addr_ts_to_rs[val] = ctb_addr_rs;
  1114. }
  1115. for (j = 0, tile_id = 0; j < pps->num_tile_rows; j++)
  1116. for (i = 0; i < pps->num_tile_columns; i++, tile_id++)
  1117. for (y = pps->row_bd[j]; y < pps->row_bd[j + 1]; y++)
  1118. for (x = pps->col_bd[i]; x < pps->col_bd[i + 1]; x++)
  1119. pps->tile_id[pps->ctb_addr_rs_to_ts[y * sps->ctb_width + x]] = tile_id;
  1120. pps->tile_pos_rs = av_malloc_array(tile_id, sizeof(*pps->tile_pos_rs));
  1121. if (!pps->tile_pos_rs) {
  1122. ret = AVERROR(ENOMEM);
  1123. goto err;
  1124. }
  1125. for (j = 0; j < pps->num_tile_rows; j++)
  1126. for (i = 0; i < pps->num_tile_columns; i++)
  1127. pps->tile_pos_rs[j * pps->num_tile_columns + i] = pps->row_bd[j] * sps->ctb_width + pps->col_bd[i];
  1128. for (y = 0; y < sps->min_cb_height; y++) {
  1129. for (x = 0; x < sps->min_cb_width; x++) {
  1130. int tb_x = x >> sps->log2_diff_max_min_coding_block_size;
  1131. int tb_y = y >> sps->log2_diff_max_min_coding_block_size;
  1132. int ctb_addr_rs = sps->ctb_width * tb_y + tb_x;
  1133. int val = pps->ctb_addr_rs_to_ts[ctb_addr_rs] <<
  1134. (sps->log2_diff_max_min_coding_block_size * 2);
  1135. for (i = 0; i < sps->log2_diff_max_min_coding_block_size; i++) {
  1136. int m = 1 << i;
  1137. val += (m & x ? m * m : 0) + (m & y ? 2 * m * m : 0);
  1138. }
  1139. pps->min_cb_addr_zs[y * sps->min_cb_width + x] = val;
  1140. }
  1141. }
  1142. log2_diff_ctb_min_tb_size = sps->log2_ctb_size - sps->log2_min_tb_size;
  1143. for (y = 0; y < sps->min_tb_height; y++) {
  1144. for (x = 0; x < sps->min_tb_width; x++) {
  1145. int tb_x = x >> log2_diff_ctb_min_tb_size;
  1146. int tb_y = y >> log2_diff_ctb_min_tb_size;
  1147. int ctb_addr_rs = sps->ctb_width * tb_y + tb_x;
  1148. int val = pps->ctb_addr_rs_to_ts[ctb_addr_rs] <<
  1149. (log2_diff_ctb_min_tb_size * 2);
  1150. for (i = 0; i < log2_diff_ctb_min_tb_size; i++) {
  1151. int m = 1 << i;
  1152. val += (m & x ? m * m : 0) + (m & y ? 2 * m * m : 0);
  1153. }
  1154. pps->min_tb_addr_zs[y * sps->min_tb_width + x] = val;
  1155. }
  1156. }
  1157. av_buffer_unref(&s->pps_list[pps_id]);
  1158. s->pps_list[pps_id] = pps_buf;
  1159. return 0;
  1160. err:
  1161. av_buffer_unref(&pps_buf);
  1162. return ret;
  1163. }