You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1330 lines
50KB

  1. /*
  2. * HEVC Parameter Set decoding
  3. *
  4. * Copyright (C) 2012 - 2103 Guillaume Martres
  5. * Copyright (C) 2012 - 2103 Mickael Raulet
  6. * Copyright (C) 2012 - 2013 Gildas Cocherel
  7. * Copyright (C) 2013 Vittorio Giovara
  8. *
  9. * This file is part of Libav.
  10. *
  11. * Libav is free software; you can redistribute it and/or
  12. * modify it under the terms of the GNU Lesser General Public
  13. * License as published by the Free Software Foundation; either
  14. * version 2.1 of the License, or (at your option) any later version.
  15. *
  16. * Libav is distributed in the hope that it will be useful,
  17. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  19. * Lesser General Public License for more details.
  20. *
  21. * You should have received a copy of the GNU Lesser General Public
  22. * License along with Libav; if not, write to the Free Software
  23. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  24. */
  25. #include "libavutil/imgutils.h"
  26. #include "golomb.h"
  27. #include "hevc.h"
  28. static const uint8_t default_scaling_list_intra[] = {
  29. 16, 16, 16, 16, 17, 18, 21, 24,
  30. 16, 16, 16, 16, 17, 19, 22, 25,
  31. 16, 16, 17, 18, 20, 22, 25, 29,
  32. 16, 16, 18, 21, 24, 27, 31, 36,
  33. 17, 17, 20, 24, 30, 35, 41, 47,
  34. 18, 19, 22, 27, 35, 44, 54, 65,
  35. 21, 22, 25, 31, 41, 54, 70, 88,
  36. 24, 25, 29, 36, 47, 65, 88, 115
  37. };
  38. static const uint8_t default_scaling_list_inter[] = {
  39. 16, 16, 16, 16, 17, 18, 20, 24,
  40. 16, 16, 16, 17, 18, 20, 24, 25,
  41. 16, 16, 17, 18, 20, 24, 25, 28,
  42. 16, 17, 18, 20, 24, 25, 28, 33,
  43. 17, 18, 20, 24, 25, 28, 33, 41,
  44. 18, 20, 24, 25, 28, 33, 41, 54,
  45. 20, 24, 25, 28, 33, 41, 54, 71,
  46. 24, 25, 28, 33, 41, 54, 71, 91
  47. };
  48. static const AVRational vui_sar[] = {
  49. { 0, 1 },
  50. { 1, 1 },
  51. { 12, 11 },
  52. { 10, 11 },
  53. { 16, 11 },
  54. { 40, 33 },
  55. { 24, 11 },
  56. { 20, 11 },
  57. { 32, 11 },
  58. { 80, 33 },
  59. { 18, 11 },
  60. { 15, 11 },
  61. { 64, 33 },
  62. { 160, 99 },
  63. { 4, 3 },
  64. { 3, 2 },
  65. { 2, 1 },
  66. };
  67. int ff_hevc_decode_short_term_rps(HEVCContext *s, ShortTermRPS *rps,
  68. const HEVCSPS *sps, int is_slice_header)
  69. {
  70. HEVCLocalContext *lc = &s->HEVClc;
  71. uint8_t rps_predict = 0;
  72. int delta_poc;
  73. int k0 = 0;
  74. int k1 = 0;
  75. int k = 0;
  76. int i;
  77. GetBitContext *gb = &lc->gb;
  78. if (rps != sps->st_rps && sps->nb_st_rps)
  79. rps_predict = get_bits1(gb);
  80. if (rps_predict) {
  81. const ShortTermRPS *rps_ridx;
  82. int delta_rps, abs_delta_rps;
  83. uint8_t use_delta_flag = 0;
  84. uint8_t delta_rps_sign;
  85. if (is_slice_header) {
  86. unsigned int delta_idx = get_ue_golomb_long(gb) + 1;
  87. if (delta_idx > sps->nb_st_rps) {
  88. av_log(s->avctx, AV_LOG_ERROR,
  89. "Invalid value of delta_idx in slice header RPS: %d > %d.\n",
  90. delta_idx, sps->nb_st_rps);
  91. return AVERROR_INVALIDDATA;
  92. }
  93. rps_ridx = &sps->st_rps[sps->nb_st_rps - delta_idx];
  94. } else
  95. rps_ridx = &sps->st_rps[rps - sps->st_rps - 1];
  96. delta_rps_sign = get_bits1(gb);
  97. abs_delta_rps = get_ue_golomb_long(gb) + 1;
  98. delta_rps = (1 - (delta_rps_sign << 1)) * abs_delta_rps;
  99. for (i = 0; i <= rps_ridx->num_delta_pocs; i++) {
  100. int used = rps->used[k] = get_bits1(gb);
  101. if (!used)
  102. use_delta_flag = get_bits1(gb);
  103. if (used || use_delta_flag) {
  104. if (i < rps_ridx->num_delta_pocs)
  105. delta_poc = delta_rps + rps_ridx->delta_poc[i];
  106. else
  107. delta_poc = delta_rps;
  108. rps->delta_poc[k] = delta_poc;
  109. if (delta_poc < 0)
  110. k0++;
  111. else
  112. k1++;
  113. k++;
  114. }
  115. }
  116. rps->num_delta_pocs = k;
  117. rps->num_negative_pics = k0;
  118. // sort in increasing order (smallest first)
  119. if (rps->num_delta_pocs != 0) {
  120. int used, tmp;
  121. for (i = 1; i < rps->num_delta_pocs; i++) {
  122. delta_poc = rps->delta_poc[i];
  123. used = rps->used[i];
  124. for (k = i - 1; k >= 0; k--) {
  125. tmp = rps->delta_poc[k];
  126. if (delta_poc < tmp) {
  127. rps->delta_poc[k + 1] = tmp;
  128. rps->used[k + 1] = rps->used[k];
  129. rps->delta_poc[k] = delta_poc;
  130. rps->used[k] = used;
  131. }
  132. }
  133. }
  134. }
  135. if ((rps->num_negative_pics >> 1) != 0) {
  136. int used;
  137. k = rps->num_negative_pics - 1;
  138. // flip the negative values to largest first
  139. for (i = 0; i < rps->num_negative_pics >> 1; i++) {
  140. delta_poc = rps->delta_poc[i];
  141. used = rps->used[i];
  142. rps->delta_poc[i] = rps->delta_poc[k];
  143. rps->used[i] = rps->used[k];
  144. rps->delta_poc[k] = delta_poc;
  145. rps->used[k] = used;
  146. k--;
  147. }
  148. }
  149. } else {
  150. unsigned int prev, nb_positive_pics;
  151. rps->num_negative_pics = get_ue_golomb_long(gb);
  152. nb_positive_pics = get_ue_golomb_long(gb);
  153. if (rps->num_negative_pics >= MAX_REFS ||
  154. nb_positive_pics >= MAX_REFS) {
  155. av_log(s->avctx, AV_LOG_ERROR, "Too many refs in a short term RPS.\n");
  156. return AVERROR_INVALIDDATA;
  157. }
  158. rps->num_delta_pocs = rps->num_negative_pics + nb_positive_pics;
  159. if (rps->num_delta_pocs) {
  160. prev = 0;
  161. for (i = 0; i < rps->num_negative_pics; i++) {
  162. delta_poc = get_ue_golomb_long(gb) + 1;
  163. prev -= delta_poc;
  164. rps->delta_poc[i] = prev;
  165. rps->used[i] = get_bits1(gb);
  166. }
  167. prev = 0;
  168. for (i = 0; i < nb_positive_pics; i++) {
  169. delta_poc = get_ue_golomb_long(gb) + 1;
  170. prev += delta_poc;
  171. rps->delta_poc[rps->num_negative_pics + i] = prev;
  172. rps->used[rps->num_negative_pics + i] = get_bits1(gb);
  173. }
  174. }
  175. }
  176. return 0;
  177. }
  178. static void decode_profile_tier_level(HEVCContext *s, PTLCommon *ptl)
  179. {
  180. int i;
  181. GetBitContext *gb = &s->HEVClc.gb;
  182. ptl->profile_space = get_bits(gb, 2);
  183. ptl->tier_flag = get_bits1(gb);
  184. ptl->profile_idc = get_bits(gb, 5);
  185. if (ptl->profile_idc == FF_PROFILE_HEVC_MAIN)
  186. av_log(s->avctx, AV_LOG_DEBUG, "Main profile bitstream\n");
  187. else if (ptl->profile_idc == FF_PROFILE_HEVC_MAIN_10)
  188. av_log(s->avctx, AV_LOG_DEBUG, "Main 10 profile bitstream\n");
  189. else if (ptl->profile_idc == FF_PROFILE_HEVC_MAIN_STILL_PICTURE)
  190. av_log(s->avctx, AV_LOG_DEBUG, "Main Still Picture profile bitstream\n");
  191. else
  192. av_log(s->avctx, AV_LOG_WARNING, "Unknown HEVC profile: %d\n", ptl->profile_idc);
  193. for (i = 0; i < 32; i++)
  194. ptl->profile_compatibility_flag[i] = get_bits1(gb);
  195. ptl->progressive_source_flag = get_bits1(gb);
  196. ptl->interlaced_source_flag = get_bits1(gb);
  197. ptl->non_packed_constraint_flag = get_bits1(gb);
  198. ptl->frame_only_constraint_flag = get_bits1(gb);
  199. skip_bits(gb, 16); // XXX_reserved_zero_44bits[0..15]
  200. skip_bits(gb, 16); // XXX_reserved_zero_44bits[16..31]
  201. skip_bits(gb, 12); // XXX_reserved_zero_44bits[32..43]
  202. }
  203. static void parse_ptl(HEVCContext *s, PTL *ptl, int max_num_sub_layers)
  204. {
  205. int i;
  206. GetBitContext *gb = &s->HEVClc.gb;
  207. decode_profile_tier_level(s, &ptl->general_ptl);
  208. ptl->general_ptl.level_idc = get_bits(gb, 8);
  209. for (i = 0; i < max_num_sub_layers - 1; i++) {
  210. ptl->sub_layer_profile_present_flag[i] = get_bits1(gb);
  211. ptl->sub_layer_level_present_flag[i] = get_bits1(gb);
  212. }
  213. if (max_num_sub_layers - 1 > 0)
  214. for (i = max_num_sub_layers - 1; i < 8; i++)
  215. skip_bits(gb, 2); // reserved_zero_2bits[i]
  216. for (i = 0; i < max_num_sub_layers - 1; i++) {
  217. if (ptl->sub_layer_profile_present_flag[i])
  218. decode_profile_tier_level(s, &ptl->sub_layer_ptl[i]);
  219. if (ptl->sub_layer_level_present_flag[i])
  220. ptl->sub_layer_ptl[i].level_idc = get_bits(gb, 8);
  221. }
  222. }
  223. static void decode_sublayer_hrd(HEVCContext *s, unsigned int nb_cpb,
  224. int subpic_params_present)
  225. {
  226. GetBitContext *gb = &s->HEVClc.gb;
  227. int i;
  228. for (i = 0; i < nb_cpb; i++) {
  229. get_ue_golomb_long(gb); // bit_rate_value_minus1
  230. get_ue_golomb_long(gb); // cpb_size_value_minus1
  231. if (subpic_params_present) {
  232. get_ue_golomb_long(gb); // cpb_size_du_value_minus1
  233. get_ue_golomb_long(gb); // bit_rate_du_value_minus1
  234. }
  235. skip_bits1(gb); // cbr_flag
  236. }
  237. }
  238. static void decode_hrd(HEVCContext *s, int common_inf_present,
  239. int max_sublayers)
  240. {
  241. GetBitContext *gb = &s->HEVClc.gb;
  242. int nal_params_present = 0, vcl_params_present = 0;
  243. int subpic_params_present = 0;
  244. int i;
  245. if (common_inf_present) {
  246. nal_params_present = get_bits1(gb);
  247. vcl_params_present = get_bits1(gb);
  248. if (nal_params_present || vcl_params_present) {
  249. subpic_params_present = get_bits1(gb);
  250. if (subpic_params_present) {
  251. skip_bits(gb, 8); // tick_divisor_minus2
  252. skip_bits(gb, 5); // du_cpb_removal_delay_increment_length_minus1
  253. skip_bits(gb, 1); // sub_pic_cpb_params_in_pic_timing_sei_flag
  254. skip_bits(gb, 5); // dpb_output_delay_du_length_minus1
  255. }
  256. skip_bits(gb, 4); // bit_rate_scale
  257. skip_bits(gb, 4); // cpb_size_scale
  258. if (subpic_params_present)
  259. skip_bits(gb, 4); // cpb_size_du_scale
  260. skip_bits(gb, 5); // initial_cpb_removal_delay_length_minus1
  261. skip_bits(gb, 5); // au_cpb_removal_delay_length_minus1
  262. skip_bits(gb, 5); // dpb_output_delay_length_minus1
  263. }
  264. }
  265. for (i = 0; i < max_sublayers; i++) {
  266. int low_delay = 0;
  267. unsigned int nb_cpb = 1;
  268. int fixed_rate = get_bits1(gb);
  269. if (!fixed_rate)
  270. fixed_rate = get_bits1(gb);
  271. if (fixed_rate)
  272. get_ue_golomb_long(gb); // elemental_duration_in_tc_minus1
  273. else
  274. low_delay = get_bits1(gb);
  275. if (!low_delay)
  276. nb_cpb = get_ue_golomb_long(gb) + 1;
  277. if (nal_params_present)
  278. decode_sublayer_hrd(s, nb_cpb, subpic_params_present);
  279. if (vcl_params_present)
  280. decode_sublayer_hrd(s, nb_cpb, subpic_params_present);
  281. }
  282. }
  283. int ff_hevc_decode_nal_vps(HEVCContext *s)
  284. {
  285. int i,j;
  286. GetBitContext *gb = &s->HEVClc.gb;
  287. int vps_id = 0;
  288. HEVCVPS *vps;
  289. AVBufferRef *vps_buf = av_buffer_allocz(sizeof(*vps));
  290. if (!vps_buf)
  291. return AVERROR(ENOMEM);
  292. vps = (HEVCVPS*)vps_buf->data;
  293. av_log(s->avctx, AV_LOG_DEBUG, "Decoding VPS\n");
  294. vps_id = get_bits(gb, 4);
  295. if (vps_id >= MAX_VPS_COUNT) {
  296. av_log(s->avctx, AV_LOG_ERROR, "VPS id out of range: %d\n", vps_id);
  297. goto err;
  298. }
  299. if (get_bits(gb, 2) != 3) { // vps_reserved_three_2bits
  300. av_log(s->avctx, AV_LOG_ERROR, "vps_reserved_three_2bits is not three\n");
  301. goto err;
  302. }
  303. vps->vps_max_layers = get_bits(gb, 6) + 1;
  304. vps->vps_max_sub_layers = get_bits(gb, 3) + 1;
  305. vps->vps_temporal_id_nesting_flag = get_bits1(gb);
  306. if (get_bits(gb, 16) != 0xffff) { // vps_reserved_ffff_16bits
  307. av_log(s->avctx, AV_LOG_ERROR, "vps_reserved_ffff_16bits is not 0xffff\n");
  308. goto err;
  309. }
  310. if (vps->vps_max_sub_layers > MAX_SUB_LAYERS) {
  311. av_log(s->avctx, AV_LOG_ERROR, "vps_max_sub_layers out of range: %d\n",
  312. vps->vps_max_sub_layers);
  313. goto err;
  314. }
  315. parse_ptl(s, &vps->ptl, vps->vps_max_sub_layers);
  316. vps->vps_sub_layer_ordering_info_present_flag = get_bits1(gb);
  317. i = vps->vps_sub_layer_ordering_info_present_flag ? 0 : vps->vps_max_sub_layers - 1;
  318. for (; i < vps->vps_max_sub_layers; i++) {
  319. vps->vps_max_dec_pic_buffering[i] = get_ue_golomb_long(gb) + 1;
  320. vps->vps_num_reorder_pics[i] = get_ue_golomb_long(gb);
  321. vps->vps_max_latency_increase[i] = get_ue_golomb_long(gb) - 1;
  322. if (vps->vps_max_dec_pic_buffering[i] > MAX_DPB_SIZE) {
  323. av_log(s->avctx, AV_LOG_ERROR, "vps_max_dec_pic_buffering_minus1 out of range: %d\n",
  324. vps->vps_max_dec_pic_buffering[i] - 1);
  325. goto err;
  326. }
  327. if (vps->vps_num_reorder_pics[i] > vps->vps_max_dec_pic_buffering[i] - 1) {
  328. av_log(s->avctx, AV_LOG_ERROR, "vps_max_num_reorder_pics out of range: %d\n",
  329. vps->vps_num_reorder_pics[i]);
  330. goto err;
  331. }
  332. }
  333. vps->vps_max_layer_id = get_bits(gb, 6);
  334. vps->vps_num_layer_sets = get_ue_golomb_long(gb) + 1;
  335. for (i = 1; i < vps->vps_num_layer_sets; i++)
  336. for (j = 0; j <= vps->vps_max_layer_id; j++)
  337. skip_bits(gb, 1); // layer_id_included_flag[i][j]
  338. vps->vps_timing_info_present_flag = get_bits1(gb);
  339. if (vps->vps_timing_info_present_flag) {
  340. vps->vps_num_units_in_tick = get_bits_long(gb, 32);
  341. vps->vps_time_scale = get_bits_long(gb, 32);
  342. vps->vps_poc_proportional_to_timing_flag = get_bits1(gb);
  343. if (vps->vps_poc_proportional_to_timing_flag)
  344. vps->vps_num_ticks_poc_diff_one = get_ue_golomb_long(gb) + 1;
  345. vps->vps_num_hrd_parameters = get_ue_golomb_long(gb);
  346. for (i = 0; i < vps->vps_num_hrd_parameters; i++) {
  347. int common_inf_present = 1;
  348. get_ue_golomb_long(gb); // hrd_layer_set_idx
  349. if (i)
  350. common_inf_present = get_bits1(gb);
  351. decode_hrd(s, common_inf_present, vps->vps_max_sub_layers);
  352. }
  353. }
  354. get_bits1(gb); /* vps_extension_flag */
  355. av_buffer_unref(&s->vps_list[vps_id]);
  356. s->vps_list[vps_id] = vps_buf;
  357. return 0;
  358. err:
  359. av_buffer_unref(&vps_buf);
  360. return AVERROR_INVALIDDATA;
  361. }
  362. static void decode_vui(HEVCContext *s, HEVCSPS *sps)
  363. {
  364. VUI *vui = &sps->vui;
  365. GetBitContext *gb = &s->HEVClc.gb;
  366. int sar_present;
  367. av_log(s->avctx, AV_LOG_DEBUG, "Decoding VUI\n");
  368. sar_present = get_bits1(gb);
  369. if (sar_present) {
  370. uint8_t sar_idx = get_bits(gb, 8);
  371. if (sar_idx < FF_ARRAY_ELEMS(vui_sar))
  372. vui->sar = vui_sar[sar_idx];
  373. else if (sar_idx == 255) {
  374. vui->sar.num = get_bits(gb, 16);
  375. vui->sar.den = get_bits(gb, 16);
  376. } else
  377. av_log(s->avctx, AV_LOG_WARNING,
  378. "Unknown SAR index: %u.\n", sar_idx);
  379. }
  380. vui->overscan_info_present_flag = get_bits1(gb);
  381. if (vui->overscan_info_present_flag)
  382. vui->overscan_appropriate_flag = get_bits1(gb);
  383. vui->video_signal_type_present_flag = get_bits1(gb);
  384. if (vui->video_signal_type_present_flag) {
  385. vui->video_format = get_bits(gb, 3);
  386. vui->video_full_range_flag = get_bits1(gb);
  387. vui->colour_description_present_flag = get_bits1(gb);
  388. if (vui->video_full_range_flag && sps->pix_fmt == AV_PIX_FMT_YUV420P)
  389. sps->pix_fmt = AV_PIX_FMT_YUVJ420P;
  390. if (vui->colour_description_present_flag) {
  391. vui->colour_primaries = get_bits(gb, 8);
  392. vui->transfer_characteristic = get_bits(gb, 8);
  393. vui->matrix_coeffs = get_bits(gb, 8);
  394. // Set invalid values to "unspecified"
  395. if (vui->colour_primaries >= AVCOL_PRI_NB)
  396. vui->colour_primaries = AVCOL_PRI_UNSPECIFIED;
  397. if (vui->transfer_characteristic >= AVCOL_TRC_NB)
  398. vui->transfer_characteristic = AVCOL_TRC_UNSPECIFIED;
  399. if (vui->matrix_coeffs >= AVCOL_SPC_NB)
  400. vui->matrix_coeffs = AVCOL_SPC_UNSPECIFIED;
  401. }
  402. }
  403. vui->chroma_loc_info_present_flag = get_bits1(gb);
  404. if (vui->chroma_loc_info_present_flag) {
  405. vui->chroma_sample_loc_type_top_field = get_ue_golomb_long(gb);
  406. vui->chroma_sample_loc_type_bottom_field = get_ue_golomb_long(gb);
  407. }
  408. vui->neutra_chroma_indication_flag = get_bits1(gb);
  409. vui->field_seq_flag = get_bits1(gb);
  410. vui->frame_field_info_present_flag = get_bits1(gb);
  411. vui->default_display_window_flag = get_bits1(gb);
  412. if (vui->default_display_window_flag) {
  413. //TODO: * 2 is only valid for 420
  414. vui->def_disp_win.left_offset = get_ue_golomb_long(gb) * 2;
  415. vui->def_disp_win.right_offset = get_ue_golomb_long(gb) * 2;
  416. vui->def_disp_win.top_offset = get_ue_golomb_long(gb) * 2;
  417. vui->def_disp_win.bottom_offset = get_ue_golomb_long(gb) * 2;
  418. if (s->apply_defdispwin &&
  419. s->avctx->flags2 & CODEC_FLAG2_IGNORE_CROP) {
  420. av_log(s->avctx, AV_LOG_DEBUG,
  421. "discarding vui default display window, "
  422. "original values are l:%u r:%u t:%u b:%u\n",
  423. vui->def_disp_win.left_offset,
  424. vui->def_disp_win.right_offset,
  425. vui->def_disp_win.top_offset,
  426. vui->def_disp_win.bottom_offset);
  427. vui->def_disp_win.left_offset =
  428. vui->def_disp_win.right_offset =
  429. vui->def_disp_win.top_offset =
  430. vui->def_disp_win.bottom_offset = 0;
  431. }
  432. }
  433. vui->vui_timing_info_present_flag = get_bits1(gb);
  434. if (vui->vui_timing_info_present_flag) {
  435. vui->vui_num_units_in_tick = get_bits_long(gb, 32);
  436. vui->vui_time_scale = get_bits_long(gb, 32);
  437. vui->vui_poc_proportional_to_timing_flag = get_bits1(gb);
  438. if (vui->vui_poc_proportional_to_timing_flag)
  439. vui->vui_num_ticks_poc_diff_one_minus1 = get_ue_golomb_long(gb);
  440. vui->vui_hrd_parameters_present_flag = get_bits1(gb);
  441. if (vui->vui_hrd_parameters_present_flag)
  442. decode_hrd(s, 1, sps->max_sub_layers);
  443. }
  444. vui->bitstream_restriction_flag = get_bits1(gb);
  445. if (vui->bitstream_restriction_flag) {
  446. vui->tiles_fixed_structure_flag = get_bits1(gb);
  447. vui->motion_vectors_over_pic_boundaries_flag = get_bits1(gb);
  448. vui->restricted_ref_pic_lists_flag = get_bits1(gb);
  449. vui->min_spatial_segmentation_idc = get_ue_golomb_long(gb);
  450. vui->max_bytes_per_pic_denom = get_ue_golomb_long(gb);
  451. vui->max_bits_per_min_cu_denom = get_ue_golomb_long(gb);
  452. vui->log2_max_mv_length_horizontal = get_ue_golomb_long(gb);
  453. vui->log2_max_mv_length_vertical = get_ue_golomb_long(gb);
  454. }
  455. }
  456. static void set_default_scaling_list_data(ScalingList *sl)
  457. {
  458. int matrixId;
  459. for (matrixId = 0; matrixId < 6; matrixId++) {
  460. // 4x4 default is 16
  461. memset(sl->sl[0][matrixId], 16, 16);
  462. sl->sl_dc[0][matrixId] = 16; // default for 16x16
  463. sl->sl_dc[1][matrixId] = 16; // default for 32x32
  464. }
  465. memcpy(sl->sl[1][0], default_scaling_list_intra, 64);
  466. memcpy(sl->sl[1][1], default_scaling_list_intra, 64);
  467. memcpy(sl->sl[1][2], default_scaling_list_intra, 64);
  468. memcpy(sl->sl[1][3], default_scaling_list_inter, 64);
  469. memcpy(sl->sl[1][4], default_scaling_list_inter, 64);
  470. memcpy(sl->sl[1][5], default_scaling_list_inter, 64);
  471. memcpy(sl->sl[2][0], default_scaling_list_intra, 64);
  472. memcpy(sl->sl[2][1], default_scaling_list_intra, 64);
  473. memcpy(sl->sl[2][2], default_scaling_list_intra, 64);
  474. memcpy(sl->sl[2][3], default_scaling_list_inter, 64);
  475. memcpy(sl->sl[2][4], default_scaling_list_inter, 64);
  476. memcpy(sl->sl[2][5], default_scaling_list_inter, 64);
  477. memcpy(sl->sl[3][0], default_scaling_list_intra, 64);
  478. memcpy(sl->sl[3][1], default_scaling_list_inter, 64);
  479. }
  480. static int scaling_list_data(HEVCContext *s, ScalingList *sl)
  481. {
  482. GetBitContext *gb = &s->HEVClc.gb;
  483. uint8_t scaling_list_pred_mode_flag[4][6];
  484. int32_t scaling_list_dc_coef[2][6];
  485. int size_id, matrix_id, i, pos;
  486. for (size_id = 0; size_id < 4; size_id++)
  487. for (matrix_id = 0; matrix_id < (size_id == 3 ? 2 : 6); matrix_id++) {
  488. scaling_list_pred_mode_flag[size_id][matrix_id] = get_bits1(gb);
  489. if (!scaling_list_pred_mode_flag[size_id][matrix_id]) {
  490. unsigned int delta = get_ue_golomb_long(gb);
  491. /* Only need to handle non-zero delta. Zero means default,
  492. * which should already be in the arrays. */
  493. if (delta) {
  494. // Copy from previous array.
  495. if (matrix_id < delta) {
  496. av_log(s->avctx, AV_LOG_ERROR,
  497. "Invalid delta in scaling list data: %d.\n", delta);
  498. return AVERROR_INVALIDDATA;
  499. }
  500. memcpy(sl->sl[size_id][matrix_id],
  501. sl->sl[size_id][matrix_id - delta],
  502. size_id > 0 ? 64 : 16);
  503. if (size_id > 1)
  504. sl->sl_dc[size_id - 2][matrix_id] = sl->sl_dc[size_id - 2][matrix_id - delta];
  505. }
  506. } else {
  507. int next_coef, coef_num;
  508. int32_t scaling_list_delta_coef;
  509. next_coef = 8;
  510. coef_num = FFMIN(64, 1 << (4 + (size_id << 1)));
  511. if (size_id > 1) {
  512. scaling_list_dc_coef[size_id - 2][matrix_id] = get_se_golomb(gb) + 8;
  513. next_coef = scaling_list_dc_coef[size_id - 2][matrix_id];
  514. sl->sl_dc[size_id - 2][matrix_id] = next_coef;
  515. }
  516. for (i = 0; i < coef_num; i++) {
  517. if (size_id == 0)
  518. pos = 4 * ff_hevc_diag_scan4x4_y[i] +
  519. ff_hevc_diag_scan4x4_x[i];
  520. else
  521. pos = 8 * ff_hevc_diag_scan8x8_y[i] +
  522. ff_hevc_diag_scan8x8_x[i];
  523. scaling_list_delta_coef = get_se_golomb(gb);
  524. next_coef = (next_coef + scaling_list_delta_coef + 256) % 256;
  525. sl->sl[size_id][matrix_id][pos] = next_coef;
  526. }
  527. }
  528. }
  529. return 0;
  530. }
  531. int ff_hevc_decode_nal_sps(HEVCContext *s)
  532. {
  533. const AVPixFmtDescriptor *desc;
  534. GetBitContext *gb = &s->HEVClc.gb;
  535. int ret = 0;
  536. unsigned int sps_id = 0;
  537. int log2_diff_max_min_transform_block_size;
  538. int bit_depth_chroma, start, vui_present, sublayer_ordering_info;
  539. int i;
  540. HEVCSPS *sps;
  541. AVBufferRef *sps_buf = av_buffer_allocz(sizeof(*sps));
  542. if (!sps_buf)
  543. return AVERROR(ENOMEM);
  544. sps = (HEVCSPS*)sps_buf->data;
  545. av_log(s->avctx, AV_LOG_DEBUG, "Decoding SPS\n");
  546. // Coded parameters
  547. sps->vps_id = get_bits(gb, 4);
  548. if (sps->vps_id >= MAX_VPS_COUNT) {
  549. av_log(s->avctx, AV_LOG_ERROR, "VPS id out of range: %d\n", sps->vps_id);
  550. ret = AVERROR_INVALIDDATA;
  551. goto err;
  552. }
  553. if (!s->vps_list[sps->vps_id]) {
  554. av_log(s->avctx, AV_LOG_ERROR, "VPS %d does not exist\n",
  555. sps->vps_id);
  556. ret = AVERROR_INVALIDDATA;
  557. goto err;
  558. }
  559. sps->max_sub_layers = get_bits(gb, 3) + 1;
  560. if (sps->max_sub_layers > MAX_SUB_LAYERS) {
  561. av_log(s->avctx, AV_LOG_ERROR, "sps_max_sub_layers out of range: %d\n",
  562. sps->max_sub_layers);
  563. ret = AVERROR_INVALIDDATA;
  564. goto err;
  565. }
  566. skip_bits1(gb); // temporal_id_nesting_flag
  567. parse_ptl(s, &sps->ptl, sps->max_sub_layers);
  568. sps_id = get_ue_golomb_long(gb);
  569. if (sps_id >= MAX_SPS_COUNT) {
  570. av_log(s->avctx, AV_LOG_ERROR, "SPS id out of range: %d\n", sps_id);
  571. ret = AVERROR_INVALIDDATA;
  572. goto err;
  573. }
  574. sps->chroma_format_idc = get_ue_golomb_long(gb);
  575. if (sps->chroma_format_idc != 1) {
  576. avpriv_report_missing_feature(s->avctx, "chroma_format_idc != 1\n");
  577. ret = AVERROR_PATCHWELCOME;
  578. goto err;
  579. }
  580. if (sps->chroma_format_idc == 3)
  581. sps->separate_colour_plane_flag = get_bits1(gb);
  582. sps->width = get_ue_golomb_long(gb);
  583. sps->height = get_ue_golomb_long(gb);
  584. if ((ret = av_image_check_size(sps->width,
  585. sps->height, 0, s->avctx)) < 0)
  586. goto err;
  587. if (get_bits1(gb)) { // pic_conformance_flag
  588. //TODO: * 2 is only valid for 420
  589. sps->pic_conf_win.left_offset = get_ue_golomb_long(gb) * 2;
  590. sps->pic_conf_win.right_offset = get_ue_golomb_long(gb) * 2;
  591. sps->pic_conf_win.top_offset = get_ue_golomb_long(gb) * 2;
  592. sps->pic_conf_win.bottom_offset = get_ue_golomb_long(gb) * 2;
  593. if (s->avctx->flags2 & CODEC_FLAG2_IGNORE_CROP) {
  594. av_log(s->avctx, AV_LOG_DEBUG,
  595. "discarding sps conformance window, "
  596. "original values are l:%u r:%u t:%u b:%u\n",
  597. sps->pic_conf_win.left_offset,
  598. sps->pic_conf_win.right_offset,
  599. sps->pic_conf_win.top_offset,
  600. sps->pic_conf_win.bottom_offset);
  601. sps->pic_conf_win.left_offset =
  602. sps->pic_conf_win.right_offset =
  603. sps->pic_conf_win.top_offset =
  604. sps->pic_conf_win.bottom_offset = 0;
  605. }
  606. sps->output_window = sps->pic_conf_win;
  607. }
  608. sps->bit_depth = get_ue_golomb_long(gb) + 8;
  609. bit_depth_chroma = get_ue_golomb_long(gb) + 8;
  610. if (bit_depth_chroma != sps->bit_depth) {
  611. av_log(s->avctx, AV_LOG_ERROR,
  612. "Luma bit depth (%d) is different from chroma bit depth (%d), "
  613. "this is unsupported.\n",
  614. sps->bit_depth, bit_depth_chroma);
  615. ret = AVERROR_INVALIDDATA;
  616. goto err;
  617. }
  618. if (sps->chroma_format_idc == 1) {
  619. switch (sps->bit_depth) {
  620. case 8: sps->pix_fmt = AV_PIX_FMT_YUV420P; break;
  621. case 9: sps->pix_fmt = AV_PIX_FMT_YUV420P9; break;
  622. case 10: sps->pix_fmt = AV_PIX_FMT_YUV420P10; break;
  623. default:
  624. av_log(s->avctx, AV_LOG_ERROR, "Unsupported bit depth: %d\n",
  625. sps->bit_depth);
  626. ret = AVERROR_PATCHWELCOME;
  627. goto err;
  628. }
  629. } else {
  630. av_log(s->avctx, AV_LOG_ERROR,
  631. "non-4:2:0 support is currently unspecified.\n");
  632. return AVERROR_PATCHWELCOME;
  633. }
  634. desc = av_pix_fmt_desc_get(sps->pix_fmt);
  635. if (!desc) {
  636. ret = AVERROR(EINVAL);
  637. goto err;
  638. }
  639. sps->hshift[0] = sps->vshift[0] = 0;
  640. sps->hshift[2] = sps->hshift[1] = desc->log2_chroma_w;
  641. sps->vshift[2] = sps->vshift[1] = desc->log2_chroma_h;
  642. sps->pixel_shift = sps->bit_depth > 8;
  643. sps->log2_max_poc_lsb = get_ue_golomb_long(gb) + 4;
  644. if (sps->log2_max_poc_lsb > 16) {
  645. av_log(s->avctx, AV_LOG_ERROR, "log2_max_pic_order_cnt_lsb_minus4 out range: %d\n",
  646. sps->log2_max_poc_lsb - 4);
  647. ret = AVERROR_INVALIDDATA;
  648. goto err;
  649. }
  650. sublayer_ordering_info = get_bits1(gb);
  651. start = sublayer_ordering_info ? 0 : sps->max_sub_layers - 1;
  652. for (i = start; i < sps->max_sub_layers; i++) {
  653. sps->temporal_layer[i].max_dec_pic_buffering = get_ue_golomb_long(gb) + 1;
  654. sps->temporal_layer[i].num_reorder_pics = get_ue_golomb_long(gb);
  655. sps->temporal_layer[i].max_latency_increase = get_ue_golomb_long(gb) - 1;
  656. if (sps->temporal_layer[i].max_dec_pic_buffering > MAX_DPB_SIZE) {
  657. av_log(s->avctx, AV_LOG_ERROR, "sps_max_dec_pic_buffering_minus1 out of range: %d\n",
  658. sps->temporal_layer[i].max_dec_pic_buffering - 1);
  659. ret = AVERROR_INVALIDDATA;
  660. goto err;
  661. }
  662. if (sps->temporal_layer[i].num_reorder_pics > sps->temporal_layer[i].max_dec_pic_buffering - 1) {
  663. av_log(s->avctx, AV_LOG_ERROR, "sps_max_num_reorder_pics out of range: %d\n",
  664. sps->temporal_layer[i].num_reorder_pics);
  665. ret = AVERROR_INVALIDDATA;
  666. goto err;
  667. }
  668. }
  669. if (!sublayer_ordering_info) {
  670. for (i = 0; i < start; i++) {
  671. sps->temporal_layer[i].max_dec_pic_buffering = sps->temporal_layer[start].max_dec_pic_buffering;
  672. sps->temporal_layer[i].num_reorder_pics = sps->temporal_layer[start].num_reorder_pics;
  673. sps->temporal_layer[i].max_latency_increase = sps->temporal_layer[start].max_latency_increase;
  674. }
  675. }
  676. sps->log2_min_cb_size = get_ue_golomb_long(gb) + 3;
  677. sps->log2_diff_max_min_coding_block_size = get_ue_golomb_long(gb);
  678. sps->log2_min_tb_size = get_ue_golomb_long(gb) + 2;
  679. log2_diff_max_min_transform_block_size = get_ue_golomb_long(gb);
  680. sps->log2_max_trafo_size = log2_diff_max_min_transform_block_size +
  681. sps->log2_min_tb_size;
  682. if (sps->log2_min_tb_size >= sps->log2_min_cb_size) {
  683. av_log(s->avctx, AV_LOG_ERROR, "Invalid value for log2_min_tb_size");
  684. ret = AVERROR_INVALIDDATA;
  685. goto err;
  686. }
  687. sps->max_transform_hierarchy_depth_inter = get_ue_golomb_long(gb);
  688. sps->max_transform_hierarchy_depth_intra = get_ue_golomb_long(gb);
  689. sps->scaling_list_enable_flag = get_bits1(gb);
  690. if (sps->scaling_list_enable_flag) {
  691. set_default_scaling_list_data(&sps->scaling_list);
  692. if (get_bits1(gb)) {
  693. ret = scaling_list_data(s, &sps->scaling_list);
  694. if (ret < 0)
  695. goto err;
  696. }
  697. }
  698. sps->amp_enabled_flag = get_bits1(gb);
  699. sps->sao_enabled = get_bits1(gb);
  700. sps->pcm_enabled_flag = get_bits1(gb);
  701. if (sps->pcm_enabled_flag) {
  702. sps->pcm.bit_depth = get_bits(gb, 4) + 1;
  703. sps->pcm.bit_depth_chroma = get_bits(gb, 4) + 1;
  704. sps->pcm.log2_min_pcm_cb_size = get_ue_golomb_long(gb) + 3;
  705. sps->pcm.log2_max_pcm_cb_size = sps->pcm.log2_min_pcm_cb_size +
  706. get_ue_golomb_long(gb);
  707. if (sps->pcm.bit_depth > sps->bit_depth) {
  708. av_log(s->avctx, AV_LOG_ERROR,
  709. "PCM bit depth (%d) is greater than normal bit depth (%d)\n",
  710. sps->pcm.bit_depth, sps->bit_depth);
  711. ret = AVERROR_INVALIDDATA;
  712. goto err;
  713. }
  714. sps->pcm.loop_filter_disable_flag = get_bits1(gb);
  715. }
  716. sps->nb_st_rps = get_ue_golomb_long(gb);
  717. if (sps->nb_st_rps > MAX_SHORT_TERM_RPS_COUNT) {
  718. av_log(s->avctx, AV_LOG_ERROR, "Too many short term RPS: %d.\n",
  719. sps->nb_st_rps);
  720. ret = AVERROR_INVALIDDATA;
  721. goto err;
  722. }
  723. for (i = 0; i < sps->nb_st_rps; i++) {
  724. if ((ret = ff_hevc_decode_short_term_rps(s, &sps->st_rps[i],
  725. sps, 0)) < 0)
  726. goto err;
  727. }
  728. sps->long_term_ref_pics_present_flag = get_bits1(gb);
  729. if (sps->long_term_ref_pics_present_flag) {
  730. sps->num_long_term_ref_pics_sps = get_ue_golomb_long(gb);
  731. for (i = 0; i < sps->num_long_term_ref_pics_sps; i++) {
  732. sps->lt_ref_pic_poc_lsb_sps[i] = get_bits(gb, sps->log2_max_poc_lsb);
  733. sps->used_by_curr_pic_lt_sps_flag[i] = get_bits1(gb);
  734. }
  735. }
  736. sps->sps_temporal_mvp_enabled_flag = get_bits1(gb);
  737. sps->sps_strong_intra_smoothing_enable_flag = get_bits1(gb);
  738. sps->vui.sar = (AVRational){0, 1};
  739. vui_present = get_bits1(gb);
  740. if (vui_present)
  741. decode_vui(s, sps);
  742. skip_bits1(gb); // sps_extension_flag
  743. if (s->apply_defdispwin) {
  744. sps->output_window.left_offset += sps->vui.def_disp_win.left_offset;
  745. sps->output_window.right_offset += sps->vui.def_disp_win.right_offset;
  746. sps->output_window.top_offset += sps->vui.def_disp_win.top_offset;
  747. sps->output_window.bottom_offset += sps->vui.def_disp_win.bottom_offset;
  748. }
  749. if (sps->output_window.left_offset & (0x1F >> (sps->pixel_shift)) &&
  750. !(s->avctx->flags & CODEC_FLAG_UNALIGNED)) {
  751. sps->output_window.left_offset &= ~(0x1F >> (sps->pixel_shift));
  752. av_log(s->avctx, AV_LOG_WARNING, "Reducing left output window to %d "
  753. "chroma samples to preserve alignment.\n",
  754. sps->output_window.left_offset);
  755. }
  756. sps->output_width = sps->width -
  757. (sps->output_window.left_offset + sps->output_window.right_offset);
  758. sps->output_height = sps->height -
  759. (sps->output_window.top_offset + sps->output_window.bottom_offset);
  760. if (sps->output_width <= 0 || sps->output_height <= 0) {
  761. av_log(s->avctx, AV_LOG_WARNING, "Invalid visible frame dimensions: %dx%d.\n",
  762. sps->output_width, sps->output_height);
  763. if (s->avctx->err_recognition & AV_EF_EXPLODE) {
  764. ret = AVERROR_INVALIDDATA;
  765. goto err;
  766. }
  767. av_log(s->avctx, AV_LOG_WARNING,
  768. "Displaying the whole video surface.\n");
  769. sps->pic_conf_win.left_offset =
  770. sps->pic_conf_win.right_offset =
  771. sps->pic_conf_win.top_offset =
  772. sps->pic_conf_win.bottom_offset = 0;
  773. sps->output_width = sps->width;
  774. sps->output_height = sps->height;
  775. }
  776. // Inferred parameters
  777. sps->log2_ctb_size = sps->log2_min_cb_size +
  778. sps->log2_diff_max_min_coding_block_size;
  779. sps->log2_min_pu_size = sps->log2_min_cb_size - 1;
  780. sps->ctb_width = (sps->width + (1 << sps->log2_ctb_size) - 1) >> sps->log2_ctb_size;
  781. sps->ctb_height = (sps->height + (1 << sps->log2_ctb_size) - 1) >> sps->log2_ctb_size;
  782. sps->ctb_size = sps->ctb_width * sps->ctb_height;
  783. sps->min_cb_width = sps->width >> sps->log2_min_cb_size;
  784. sps->min_cb_height = sps->height >> sps->log2_min_cb_size;
  785. sps->min_tb_width = sps->width >> sps->log2_min_tb_size;
  786. sps->min_tb_height = sps->height >> sps->log2_min_tb_size;
  787. sps->min_pu_width = sps->width >> sps->log2_min_pu_size;
  788. sps->min_pu_height = sps->height >> sps->log2_min_pu_size;
  789. sps->qp_bd_offset = 6 * (sps->bit_depth - 8);
  790. if (sps->width & ((1 << sps->log2_min_cb_size) - 1) ||
  791. sps->height & ((1 << sps->log2_min_cb_size) - 1)) {
  792. av_log(s->avctx, AV_LOG_ERROR, "Invalid coded frame dimensions.\n");
  793. goto err;
  794. }
  795. if (sps->log2_ctb_size > MAX_LOG2_CTB_SIZE) {
  796. av_log(s->avctx, AV_LOG_ERROR, "CTB size out of range: 2^%d\n", sps->log2_ctb_size);
  797. goto err;
  798. }
  799. if (sps->max_transform_hierarchy_depth_inter > sps->log2_ctb_size - sps->log2_min_tb_size) {
  800. av_log(s->avctx, AV_LOG_ERROR, "max_transform_hierarchy_depth_inter out of range: %d\n",
  801. sps->max_transform_hierarchy_depth_inter);
  802. goto err;
  803. }
  804. if (sps->max_transform_hierarchy_depth_intra > sps->log2_ctb_size - sps->log2_min_tb_size) {
  805. av_log(s->avctx, AV_LOG_ERROR, "max_transform_hierarchy_depth_intra out of range: %d\n",
  806. sps->max_transform_hierarchy_depth_intra);
  807. goto err;
  808. }
  809. if (sps->log2_max_trafo_size > FFMIN(sps->log2_ctb_size, 5)) {
  810. av_log(s->avctx, AV_LOG_ERROR,
  811. "max transform block size out of range: %d\n",
  812. sps->log2_max_trafo_size);
  813. goto err;
  814. }
  815. if (s->avctx->debug & FF_DEBUG_BITSTREAM) {
  816. av_log(s->avctx, AV_LOG_DEBUG,
  817. "Parsed SPS: id %d; coded wxh: %dx%d; "
  818. "cropped wxh: %dx%d; pix_fmt: %s.\n",
  819. sps_id, sps->width, sps->height,
  820. sps->output_width, sps->output_height,
  821. av_get_pix_fmt_name(sps->pix_fmt));
  822. }
  823. /* check if this is a repeat of an already parsed SPS, then keep the
  824. * original one.
  825. * otherwise drop all PPSes that depend on it */
  826. if (s->sps_list[sps_id] &&
  827. !memcmp(s->sps_list[sps_id]->data, sps_buf->data, sps_buf->size)) {
  828. av_buffer_unref(&sps_buf);
  829. } else {
  830. for (i = 0; i < FF_ARRAY_ELEMS(s->pps_list); i++) {
  831. if (s->pps_list[i] && ((HEVCPPS*)s->pps_list[i]->data)->sps_id == sps_id)
  832. av_buffer_unref(&s->pps_list[i]);
  833. }
  834. av_buffer_unref(&s->sps_list[sps_id]);
  835. s->sps_list[sps_id] = sps_buf;
  836. }
  837. return 0;
  838. err:
  839. av_buffer_unref(&sps_buf);
  840. return ret;
  841. }
  842. static void hevc_pps_free(void *opaque, uint8_t *data)
  843. {
  844. HEVCPPS *pps = (HEVCPPS*)data;
  845. av_freep(&pps->column_width);
  846. av_freep(&pps->row_height);
  847. av_freep(&pps->col_bd);
  848. av_freep(&pps->row_bd);
  849. av_freep(&pps->col_idxX);
  850. av_freep(&pps->ctb_addr_rs_to_ts);
  851. av_freep(&pps->ctb_addr_ts_to_rs);
  852. av_freep(&pps->tile_pos_rs);
  853. av_freep(&pps->tile_id);
  854. av_freep(&pps->min_cb_addr_zs);
  855. av_freep(&pps->min_tb_addr_zs);
  856. av_freep(&pps);
  857. }
  858. int ff_hevc_decode_nal_pps(HEVCContext *s)
  859. {
  860. GetBitContext *gb = &s->HEVClc.gb;
  861. HEVCSPS *sps = NULL;
  862. int pic_area_in_ctbs, pic_area_in_min_cbs, pic_area_in_min_tbs;
  863. int log2_diff_ctb_min_tb_size;
  864. int i, j, x, y, ctb_addr_rs, tile_id;
  865. int ret = 0;
  866. unsigned int pps_id = 0;
  867. AVBufferRef *pps_buf;
  868. HEVCPPS *pps = av_mallocz(sizeof(*pps));
  869. if (!pps)
  870. return AVERROR(ENOMEM);
  871. pps_buf = av_buffer_create((uint8_t *)pps, sizeof(*pps),
  872. hevc_pps_free, NULL, 0);
  873. if (!pps_buf) {
  874. av_freep(&pps);
  875. return AVERROR(ENOMEM);
  876. }
  877. av_log(s->avctx, AV_LOG_DEBUG, "Decoding PPS\n");
  878. // Default values
  879. pps->loop_filter_across_tiles_enabled_flag = 1;
  880. pps->num_tile_columns = 1;
  881. pps->num_tile_rows = 1;
  882. pps->uniform_spacing_flag = 1;
  883. pps->disable_dbf = 0;
  884. pps->beta_offset = 0;
  885. pps->tc_offset = 0;
  886. // Coded parameters
  887. pps_id = get_ue_golomb_long(gb);
  888. if (pps_id >= MAX_PPS_COUNT) {
  889. av_log(s->avctx, AV_LOG_ERROR, "PPS id out of range: %d\n", pps_id);
  890. ret = AVERROR_INVALIDDATA;
  891. goto err;
  892. }
  893. pps->sps_id = get_ue_golomb_long(gb);
  894. if (pps->sps_id >= MAX_SPS_COUNT) {
  895. av_log(s->avctx, AV_LOG_ERROR, "SPS id out of range: %d\n", pps->sps_id);
  896. ret = AVERROR_INVALIDDATA;
  897. goto err;
  898. }
  899. if (!s->sps_list[pps->sps_id]) {
  900. av_log(s->avctx, AV_LOG_ERROR, "SPS %u does not exist.\n", pps->sps_id);
  901. ret = AVERROR_INVALIDDATA;
  902. goto err;
  903. }
  904. sps = (HEVCSPS *)s->sps_list[pps->sps_id]->data;
  905. pps->dependent_slice_segments_enabled_flag = get_bits1(gb);
  906. pps->output_flag_present_flag = get_bits1(gb);
  907. pps->num_extra_slice_header_bits = get_bits(gb, 3);
  908. pps->sign_data_hiding_flag = get_bits1(gb);
  909. pps->cabac_init_present_flag = get_bits1(gb);
  910. pps->num_ref_idx_l0_default_active = get_ue_golomb_long(gb) + 1;
  911. pps->num_ref_idx_l1_default_active = get_ue_golomb_long(gb) + 1;
  912. pps->pic_init_qp_minus26 = get_se_golomb(gb);
  913. pps->constrained_intra_pred_flag = get_bits1(gb);
  914. pps->transform_skip_enabled_flag = get_bits1(gb);
  915. pps->cu_qp_delta_enabled_flag = get_bits1(gb);
  916. pps->diff_cu_qp_delta_depth = 0;
  917. if (pps->cu_qp_delta_enabled_flag)
  918. pps->diff_cu_qp_delta_depth = get_ue_golomb_long(gb);
  919. pps->cb_qp_offset = get_se_golomb(gb);
  920. if (pps->cb_qp_offset < -12 || pps->cb_qp_offset > 12) {
  921. av_log(s->avctx, AV_LOG_ERROR, "pps_cb_qp_offset out of range: %d\n",
  922. pps->cb_qp_offset);
  923. ret = AVERROR_INVALIDDATA;
  924. goto err;
  925. }
  926. pps->cr_qp_offset = get_se_golomb(gb);
  927. if (pps->cr_qp_offset < -12 || pps->cr_qp_offset > 12) {
  928. av_log(s->avctx, AV_LOG_ERROR, "pps_cr_qp_offset out of range: %d\n",
  929. pps->cr_qp_offset);
  930. ret = AVERROR_INVALIDDATA;
  931. goto err;
  932. }
  933. pps->pic_slice_level_chroma_qp_offsets_present_flag = get_bits1(gb);
  934. pps->weighted_pred_flag = get_bits1(gb);
  935. pps->weighted_bipred_flag = get_bits1(gb);
  936. pps->transquant_bypass_enable_flag = get_bits1(gb);
  937. pps->tiles_enabled_flag = get_bits1(gb);
  938. pps->entropy_coding_sync_enabled_flag = get_bits1(gb);
  939. if (pps->tiles_enabled_flag) {
  940. pps->num_tile_columns = get_ue_golomb_long(gb) + 1;
  941. pps->num_tile_rows = get_ue_golomb_long(gb) + 1;
  942. if (pps->num_tile_columns == 0 ||
  943. pps->num_tile_columns >= sps->width) {
  944. av_log(s->avctx, AV_LOG_ERROR, "num_tile_columns_minus1 out of range: %d\n",
  945. pps->num_tile_columns - 1);
  946. ret = AVERROR_INVALIDDATA;
  947. goto err;
  948. }
  949. if (pps->num_tile_rows == 0 ||
  950. pps->num_tile_rows >= sps->height) {
  951. av_log(s->avctx, AV_LOG_ERROR, "num_tile_rows_minus1 out of range: %d\n",
  952. pps->num_tile_rows - 1);
  953. ret = AVERROR_INVALIDDATA;
  954. goto err;
  955. }
  956. pps->column_width = av_malloc_array(pps->num_tile_columns, sizeof(*pps->column_width));
  957. pps->row_height = av_malloc_array(pps->num_tile_rows, sizeof(*pps->row_height));
  958. if (!pps->column_width || !pps->row_height) {
  959. ret = AVERROR(ENOMEM);
  960. goto err;
  961. }
  962. pps->uniform_spacing_flag = get_bits1(gb);
  963. if (!pps->uniform_spacing_flag) {
  964. uint64_t sum = 0;
  965. for (i = 0; i < pps->num_tile_columns - 1; i++) {
  966. pps->column_width[i] = get_ue_golomb_long(gb) + 1;
  967. sum += pps->column_width[i];
  968. }
  969. if (sum >= sps->ctb_width) {
  970. av_log(s->avctx, AV_LOG_ERROR, "Invalid tile widths.\n");
  971. ret = AVERROR_INVALIDDATA;
  972. goto err;
  973. }
  974. pps->column_width[pps->num_tile_columns - 1] = sps->ctb_width - sum;
  975. sum = 0;
  976. for (i = 0; i < pps->num_tile_rows - 1; i++) {
  977. pps->row_height[i] = get_ue_golomb_long(gb) + 1;
  978. sum += pps->row_height[i];
  979. }
  980. if (sum >= sps->ctb_height) {
  981. av_log(s->avctx, AV_LOG_ERROR, "Invalid tile heights.\n");
  982. ret = AVERROR_INVALIDDATA;
  983. goto err;
  984. }
  985. pps->row_height[pps->num_tile_rows - 1] = sps->ctb_height - sum;
  986. }
  987. pps->loop_filter_across_tiles_enabled_flag = get_bits1(gb);
  988. }
  989. pps->seq_loop_filter_across_slices_enabled_flag = get_bits1(gb);
  990. pps->deblocking_filter_control_present_flag = get_bits1(gb);
  991. if (pps->deblocking_filter_control_present_flag) {
  992. pps->deblocking_filter_override_enabled_flag = get_bits1(gb);
  993. pps->disable_dbf = get_bits1(gb);
  994. if (!pps->disable_dbf) {
  995. pps->beta_offset = get_se_golomb(gb) * 2;
  996. pps->tc_offset = get_se_golomb(gb) * 2;
  997. if (pps->beta_offset/2 < -6 || pps->beta_offset/2 > 6) {
  998. av_log(s->avctx, AV_LOG_ERROR, "pps_beta_offset_div2 out of range: %d\n",
  999. pps->beta_offset/2);
  1000. ret = AVERROR_INVALIDDATA;
  1001. goto err;
  1002. }
  1003. if (pps->tc_offset/2 < -6 || pps->tc_offset/2 > 6) {
  1004. av_log(s->avctx, AV_LOG_ERROR, "pps_tc_offset_div2 out of range: %d\n",
  1005. pps->tc_offset/2);
  1006. ret = AVERROR_INVALIDDATA;
  1007. goto err;
  1008. }
  1009. }
  1010. }
  1011. pps->scaling_list_data_present_flag = get_bits1(gb);
  1012. if (pps->scaling_list_data_present_flag) {
  1013. set_default_scaling_list_data(&pps->scaling_list);
  1014. ret = scaling_list_data(s, &pps->scaling_list);
  1015. if (ret < 0)
  1016. goto err;
  1017. }
  1018. pps->lists_modification_present_flag = get_bits1(gb);
  1019. pps->log2_parallel_merge_level = get_ue_golomb_long(gb) + 2;
  1020. if (pps->log2_parallel_merge_level > sps->log2_ctb_size) {
  1021. av_log(s->avctx, AV_LOG_ERROR, "log2_parallel_merge_level_minus2 out of range: %d\n",
  1022. pps->log2_parallel_merge_level - 2);
  1023. ret = AVERROR_INVALIDDATA;
  1024. goto err;
  1025. }
  1026. pps->slice_header_extension_present_flag = get_bits1(gb);
  1027. skip_bits1(gb); // pps_extension_flag
  1028. // Inferred parameters
  1029. pps->col_bd = av_malloc_array(pps->num_tile_columns + 1, sizeof(*pps->col_bd));
  1030. pps->row_bd = av_malloc_array(pps->num_tile_rows + 1, sizeof(*pps->row_bd));
  1031. pps->col_idxX = av_malloc_array(sps->ctb_width, sizeof(*pps->col_idxX));
  1032. if (!pps->col_bd || !pps->row_bd || !pps->col_idxX) {
  1033. ret = AVERROR(ENOMEM);
  1034. goto err;
  1035. }
  1036. if (pps->uniform_spacing_flag) {
  1037. if (!pps->column_width) {
  1038. pps->column_width = av_malloc_array(pps->num_tile_columns, sizeof(*pps->column_width));
  1039. pps->row_height = av_malloc_array(pps->num_tile_rows, sizeof(*pps->row_height));
  1040. }
  1041. if (!pps->column_width || !pps->row_height) {
  1042. ret = AVERROR(ENOMEM);
  1043. goto err;
  1044. }
  1045. for (i = 0; i < pps->num_tile_columns; i++) {
  1046. pps->column_width[i] = ((i + 1) * sps->ctb_width) / pps->num_tile_columns -
  1047. (i * sps->ctb_width) / pps->num_tile_columns;
  1048. }
  1049. for (i = 0; i < pps->num_tile_rows; i++) {
  1050. pps->row_height[i] = ((i + 1) * sps->ctb_height) / pps->num_tile_rows -
  1051. (i * sps->ctb_height) / pps->num_tile_rows;
  1052. }
  1053. }
  1054. pps->col_bd[0] = 0;
  1055. for (i = 0; i < pps->num_tile_columns; i++)
  1056. pps->col_bd[i + 1] = pps->col_bd[i] + pps->column_width[i];
  1057. pps->row_bd[0] = 0;
  1058. for (i = 0; i < pps->num_tile_rows; i++)
  1059. pps->row_bd[i + 1] = pps->row_bd[i] + pps->row_height[i];
  1060. for (i = 0, j = 0; i < sps->ctb_width; i++) {
  1061. if (i > pps->col_bd[j])
  1062. j++;
  1063. pps->col_idxX[i] = j;
  1064. }
  1065. /**
  1066. * 6.5
  1067. */
  1068. pic_area_in_ctbs = sps->ctb_width * sps->ctb_height;
  1069. pic_area_in_min_cbs = sps->min_cb_width * sps->min_cb_height;
  1070. pic_area_in_min_tbs = sps->min_tb_width * sps->min_tb_height;
  1071. pps->ctb_addr_rs_to_ts = av_malloc_array(pic_area_in_ctbs, sizeof(*pps->ctb_addr_rs_to_ts));
  1072. pps->ctb_addr_ts_to_rs = av_malloc_array(pic_area_in_ctbs, sizeof(*pps->ctb_addr_ts_to_rs));
  1073. pps->tile_id = av_malloc_array(pic_area_in_ctbs, sizeof(*pps->tile_id));
  1074. pps->min_cb_addr_zs = av_malloc_array(pic_area_in_min_cbs, sizeof(*pps->min_cb_addr_zs));
  1075. pps->min_tb_addr_zs = av_malloc_array(pic_area_in_min_tbs, sizeof(*pps->min_tb_addr_zs));
  1076. if (!pps->ctb_addr_rs_to_ts || !pps->ctb_addr_ts_to_rs ||
  1077. !pps->tile_id || !pps->min_cb_addr_zs || !pps->min_tb_addr_zs) {
  1078. ret = AVERROR(ENOMEM);
  1079. goto err;
  1080. }
  1081. for (ctb_addr_rs = 0; ctb_addr_rs < pic_area_in_ctbs; ctb_addr_rs++) {
  1082. int tb_x = ctb_addr_rs % sps->ctb_width;
  1083. int tb_y = ctb_addr_rs / sps->ctb_width;
  1084. int tile_x = 0;
  1085. int tile_y = 0;
  1086. int val = 0;
  1087. for (i = 0; i < pps->num_tile_columns; i++) {
  1088. if (tb_x < pps->col_bd[i + 1]) {
  1089. tile_x = i;
  1090. break;
  1091. }
  1092. }
  1093. for (i = 0; i < pps->num_tile_rows; i++) {
  1094. if (tb_y < pps->row_bd[i + 1]) {
  1095. tile_y = i;
  1096. break;
  1097. }
  1098. }
  1099. for (i = 0; i < tile_x; i++)
  1100. val += pps->row_height[tile_y] * pps->column_width[i];
  1101. for (i = 0; i < tile_y; i++)
  1102. val += sps->ctb_width * pps->row_height[i];
  1103. val += (tb_y - pps->row_bd[tile_y]) * pps->column_width[tile_x] +
  1104. tb_x - pps->col_bd[tile_x];
  1105. pps->ctb_addr_rs_to_ts[ctb_addr_rs] = val;
  1106. pps->ctb_addr_ts_to_rs[val] = ctb_addr_rs;
  1107. }
  1108. for (j = 0, tile_id = 0; j < pps->num_tile_rows; j++)
  1109. for (i = 0; i < pps->num_tile_columns; i++, tile_id++)
  1110. for (y = pps->row_bd[j]; y < pps->row_bd[j + 1]; y++)
  1111. for (x = pps->col_bd[i]; x < pps->col_bd[i + 1]; x++)
  1112. pps->tile_id[pps->ctb_addr_rs_to_ts[y * sps->ctb_width + x]] = tile_id;
  1113. pps->tile_pos_rs = av_malloc_array(tile_id, sizeof(*pps->tile_pos_rs));
  1114. if (!pps->tile_pos_rs) {
  1115. ret = AVERROR(ENOMEM);
  1116. goto err;
  1117. }
  1118. for (j = 0; j < pps->num_tile_rows; j++)
  1119. for (i = 0; i < pps->num_tile_columns; i++)
  1120. pps->tile_pos_rs[j * pps->num_tile_columns + i] = pps->row_bd[j] * sps->ctb_width + pps->col_bd[i];
  1121. for (y = 0; y < sps->min_cb_height; y++) {
  1122. for (x = 0; x < sps->min_cb_width; x++) {
  1123. int tb_x = x >> sps->log2_diff_max_min_coding_block_size;
  1124. int tb_y = y >> sps->log2_diff_max_min_coding_block_size;
  1125. int ctb_addr_rs = sps->ctb_width * tb_y + tb_x;
  1126. int val = pps->ctb_addr_rs_to_ts[ctb_addr_rs] <<
  1127. (sps->log2_diff_max_min_coding_block_size * 2);
  1128. for (i = 0; i < sps->log2_diff_max_min_coding_block_size; i++) {
  1129. int m = 1 << i;
  1130. val += (m & x ? m * m : 0) + (m & y ? 2 * m * m : 0);
  1131. }
  1132. pps->min_cb_addr_zs[y * sps->min_cb_width + x] = val;
  1133. }
  1134. }
  1135. log2_diff_ctb_min_tb_size = sps->log2_ctb_size - sps->log2_min_tb_size;
  1136. for (y = 0; y < sps->min_tb_height; y++) {
  1137. for (x = 0; x < sps->min_tb_width; x++) {
  1138. int tb_x = x >> log2_diff_ctb_min_tb_size;
  1139. int tb_y = y >> log2_diff_ctb_min_tb_size;
  1140. int ctb_addr_rs = sps->ctb_width * tb_y + tb_x;
  1141. int val = pps->ctb_addr_rs_to_ts[ctb_addr_rs] <<
  1142. (log2_diff_ctb_min_tb_size * 2);
  1143. for (i = 0; i < log2_diff_ctb_min_tb_size; i++) {
  1144. int m = 1 << i;
  1145. val += (m & x ? m * m : 0) + (m & y ? 2 * m * m : 0);
  1146. }
  1147. pps->min_tb_addr_zs[y * sps->min_tb_width + x] = val;
  1148. }
  1149. }
  1150. av_buffer_unref(&s->pps_list[pps_id]);
  1151. s->pps_list[pps_id] = pps_buf;
  1152. return 0;
  1153. err:
  1154. av_buffer_unref(&pps_buf);
  1155. return ret;
  1156. }