You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1371 lines
50KB

  1. /*
  2. * HEVC Parameter Set decoding
  3. *
  4. * Copyright (C) 2012 - 2103 Guillaume Martres
  5. * Copyright (C) 2012 - 2103 Mickael Raulet
  6. * Copyright (C) 2012 - 2013 Gildas Cocherel
  7. * Copyright (C) 2013 Vittorio Giovara
  8. *
  9. * This file is part of Libav.
  10. *
  11. * Libav is free software; you can redistribute it and/or
  12. * modify it under the terms of the GNU Lesser General Public
  13. * License as published by the Free Software Foundation; either
  14. * version 2.1 of the License, or (at your option) any later version.
  15. *
  16. * Libav is distributed in the hope that it will be useful,
  17. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  19. * Lesser General Public License for more details.
  20. *
  21. * You should have received a copy of the GNU Lesser General Public
  22. * License along with Libav; if not, write to the Free Software
  23. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  24. */
  25. #include "libavutil/imgutils.h"
  26. #include "golomb_legacy.h"
  27. #include "hevc_data.h"
  28. #include "hevc_ps.h"
  29. static const uint8_t default_scaling_list_intra[] = {
  30. 16, 16, 16, 16, 17, 18, 21, 24,
  31. 16, 16, 16, 16, 17, 19, 22, 25,
  32. 16, 16, 17, 18, 20, 22, 25, 29,
  33. 16, 16, 18, 21, 24, 27, 31, 36,
  34. 17, 17, 20, 24, 30, 35, 41, 47,
  35. 18, 19, 22, 27, 35, 44, 54, 65,
  36. 21, 22, 25, 31, 41, 54, 70, 88,
  37. 24, 25, 29, 36, 47, 65, 88, 115
  38. };
  39. static const uint8_t default_scaling_list_inter[] = {
  40. 16, 16, 16, 16, 17, 18, 20, 24,
  41. 16, 16, 16, 17, 18, 20, 24, 25,
  42. 16, 16, 17, 18, 20, 24, 25, 28,
  43. 16, 17, 18, 20, 24, 25, 28, 33,
  44. 17, 18, 20, 24, 25, 28, 33, 41,
  45. 18, 20, 24, 25, 28, 33, 41, 54,
  46. 20, 24, 25, 28, 33, 41, 54, 71,
  47. 24, 25, 28, 33, 41, 54, 71, 91
  48. };
  49. static const AVRational vui_sar[] = {
  50. { 0, 1 },
  51. { 1, 1 },
  52. { 12, 11 },
  53. { 10, 11 },
  54. { 16, 11 },
  55. { 40, 33 },
  56. { 24, 11 },
  57. { 20, 11 },
  58. { 32, 11 },
  59. { 80, 33 },
  60. { 18, 11 },
  61. { 15, 11 },
  62. { 64, 33 },
  63. { 160, 99 },
  64. { 4, 3 },
  65. { 3, 2 },
  66. { 2, 1 },
  67. };
  68. static void remove_pps(HEVCParamSets *s, int id)
  69. {
  70. if (s->pps_list[id] && s->pps == (const HEVCPPS*)s->pps_list[id]->data)
  71. s->pps = NULL;
  72. av_buffer_unref(&s->pps_list[id]);
  73. }
  74. static void remove_sps(HEVCParamSets *s, int id)
  75. {
  76. int i;
  77. if (s->sps_list[id]) {
  78. if (s->sps == (const HEVCSPS*)s->sps_list[id]->data)
  79. s->sps = NULL;
  80. /* drop all PPS that depend on this SPS */
  81. for (i = 0; i < FF_ARRAY_ELEMS(s->pps_list); i++)
  82. if (s->pps_list[i] && ((HEVCPPS*)s->pps_list[i]->data)->sps_id == id)
  83. remove_pps(s, i);
  84. }
  85. av_buffer_unref(&s->sps_list[id]);
  86. }
  87. static void remove_vps(HEVCParamSets *s, int id)
  88. {
  89. int i;
  90. if (s->vps_list[id]) {
  91. if (s->vps == (const HEVCVPS*)s->vps_list[id]->data)
  92. s->vps = NULL;
  93. for (i = 0; i < FF_ARRAY_ELEMS(s->sps_list); i++)
  94. if (s->sps_list[i] && ((HEVCSPS*)s->sps_list[i]->data)->vps_id == id)
  95. remove_sps(s, i);
  96. }
  97. av_buffer_unref(&s->vps_list[id]);
  98. }
  99. int ff_hevc_decode_short_term_rps(GetBitContext *gb, AVCodecContext *avctx,
  100. ShortTermRPS *rps, const HEVCSPS *sps, int is_slice_header)
  101. {
  102. uint8_t rps_predict = 0;
  103. int delta_poc;
  104. int k0 = 0;
  105. int k1 = 0;
  106. int k = 0;
  107. int i;
  108. if (rps != sps->st_rps && sps->nb_st_rps)
  109. rps_predict = get_bits1(gb);
  110. if (rps_predict) {
  111. const ShortTermRPS *rps_ridx;
  112. int delta_rps, abs_delta_rps;
  113. uint8_t use_delta_flag = 0;
  114. uint8_t delta_rps_sign;
  115. if (is_slice_header) {
  116. unsigned int delta_idx = get_ue_golomb_long(gb) + 1;
  117. if (delta_idx > sps->nb_st_rps) {
  118. av_log(avctx, AV_LOG_ERROR,
  119. "Invalid value of delta_idx in slice header RPS: %d > %d.\n",
  120. delta_idx, sps->nb_st_rps);
  121. return AVERROR_INVALIDDATA;
  122. }
  123. rps_ridx = &sps->st_rps[sps->nb_st_rps - delta_idx];
  124. rps->rps_idx_num_delta_pocs = rps_ridx->num_delta_pocs;
  125. } else
  126. rps_ridx = &sps->st_rps[rps - sps->st_rps - 1];
  127. delta_rps_sign = get_bits1(gb);
  128. abs_delta_rps = get_ue_golomb_long(gb) + 1;
  129. delta_rps = (1 - (delta_rps_sign << 1)) * abs_delta_rps;
  130. for (i = 0; i <= rps_ridx->num_delta_pocs; i++) {
  131. int used = rps->used[k] = get_bits1(gb);
  132. if (!used)
  133. use_delta_flag = get_bits1(gb);
  134. if (used || use_delta_flag) {
  135. if (i < rps_ridx->num_delta_pocs)
  136. delta_poc = delta_rps + rps_ridx->delta_poc[i];
  137. else
  138. delta_poc = delta_rps;
  139. rps->delta_poc[k] = delta_poc;
  140. if (delta_poc < 0)
  141. k0++;
  142. else
  143. k1++;
  144. k++;
  145. }
  146. }
  147. rps->num_delta_pocs = k;
  148. rps->num_negative_pics = k0;
  149. // sort in increasing order (smallest first)
  150. if (rps->num_delta_pocs != 0) {
  151. int used, tmp;
  152. for (i = 1; i < rps->num_delta_pocs; i++) {
  153. delta_poc = rps->delta_poc[i];
  154. used = rps->used[i];
  155. for (k = i - 1; k >= 0; k--) {
  156. tmp = rps->delta_poc[k];
  157. if (delta_poc < tmp) {
  158. rps->delta_poc[k + 1] = tmp;
  159. rps->used[k + 1] = rps->used[k];
  160. rps->delta_poc[k] = delta_poc;
  161. rps->used[k] = used;
  162. }
  163. }
  164. }
  165. }
  166. if ((rps->num_negative_pics >> 1) != 0) {
  167. int used;
  168. k = rps->num_negative_pics - 1;
  169. // flip the negative values to largest first
  170. for (i = 0; i < rps->num_negative_pics >> 1; i++) {
  171. delta_poc = rps->delta_poc[i];
  172. used = rps->used[i];
  173. rps->delta_poc[i] = rps->delta_poc[k];
  174. rps->used[i] = rps->used[k];
  175. rps->delta_poc[k] = delta_poc;
  176. rps->used[k] = used;
  177. k--;
  178. }
  179. }
  180. } else {
  181. unsigned int prev, nb_positive_pics;
  182. rps->num_negative_pics = get_ue_golomb_long(gb);
  183. nb_positive_pics = get_ue_golomb_long(gb);
  184. if (rps->num_negative_pics >= HEVC_MAX_REFS ||
  185. nb_positive_pics >= HEVC_MAX_REFS) {
  186. av_log(avctx, AV_LOG_ERROR, "Too many refs in a short term RPS.\n");
  187. return AVERROR_INVALIDDATA;
  188. }
  189. rps->num_delta_pocs = rps->num_negative_pics + nb_positive_pics;
  190. if (rps->num_delta_pocs) {
  191. prev = 0;
  192. for (i = 0; i < rps->num_negative_pics; i++) {
  193. delta_poc = get_ue_golomb_long(gb) + 1;
  194. prev -= delta_poc;
  195. rps->delta_poc[i] = prev;
  196. rps->used[i] = get_bits1(gb);
  197. }
  198. prev = 0;
  199. for (i = 0; i < nb_positive_pics; i++) {
  200. delta_poc = get_ue_golomb_long(gb) + 1;
  201. prev += delta_poc;
  202. rps->delta_poc[rps->num_negative_pics + i] = prev;
  203. rps->used[rps->num_negative_pics + i] = get_bits1(gb);
  204. }
  205. }
  206. }
  207. return 0;
  208. }
  209. static void decode_profile_tier_level(GetBitContext *gb, AVCodecContext *avctx,
  210. PTLCommon *ptl)
  211. {
  212. int i;
  213. ptl->profile_space = get_bits(gb, 2);
  214. ptl->tier_flag = get_bits1(gb);
  215. ptl->profile_idc = get_bits(gb, 5);
  216. if (ptl->profile_idc == FF_PROFILE_HEVC_MAIN)
  217. av_log(avctx, AV_LOG_DEBUG, "Main profile bitstream\n");
  218. else if (ptl->profile_idc == FF_PROFILE_HEVC_MAIN_10)
  219. av_log(avctx, AV_LOG_DEBUG, "Main 10 profile bitstream\n");
  220. else if (ptl->profile_idc == FF_PROFILE_HEVC_MAIN_STILL_PICTURE)
  221. av_log(avctx, AV_LOG_DEBUG, "Main Still Picture profile bitstream\n");
  222. else
  223. av_log(avctx, AV_LOG_WARNING, "Unknown HEVC profile: %d\n", ptl->profile_idc);
  224. for (i = 0; i < 32; i++) {
  225. ptl->profile_compatibility_flag[i] = get_bits1(gb);
  226. if (ptl->profile_idc == 0 && i > 0 && ptl->profile_compatibility_flag[i])
  227. ptl->profile_idc = i;
  228. }
  229. ptl->progressive_source_flag = get_bits1(gb);
  230. ptl->interlaced_source_flag = get_bits1(gb);
  231. ptl->non_packed_constraint_flag = get_bits1(gb);
  232. ptl->frame_only_constraint_flag = get_bits1(gb);
  233. skip_bits(gb, 16); // XXX_reserved_zero_44bits[0..15]
  234. skip_bits(gb, 16); // XXX_reserved_zero_44bits[16..31]
  235. skip_bits(gb, 12); // XXX_reserved_zero_44bits[32..43]
  236. }
  237. static void parse_ptl(GetBitContext *gb, AVCodecContext *avctx,
  238. PTL *ptl, int max_num_sub_layers)
  239. {
  240. int i;
  241. decode_profile_tier_level(gb, avctx, &ptl->general_ptl);
  242. ptl->general_ptl.level_idc = get_bits(gb, 8);
  243. for (i = 0; i < max_num_sub_layers - 1; i++) {
  244. ptl->sub_layer_profile_present_flag[i] = get_bits1(gb);
  245. ptl->sub_layer_level_present_flag[i] = get_bits1(gb);
  246. }
  247. if (max_num_sub_layers - 1 > 0)
  248. for (i = max_num_sub_layers - 1; i < 8; i++)
  249. skip_bits(gb, 2); // reserved_zero_2bits[i]
  250. for (i = 0; i < max_num_sub_layers - 1; i++) {
  251. if (ptl->sub_layer_profile_present_flag[i])
  252. decode_profile_tier_level(gb, avctx, &ptl->sub_layer_ptl[i]);
  253. if (ptl->sub_layer_level_present_flag[i])
  254. ptl->sub_layer_ptl[i].level_idc = get_bits(gb, 8);
  255. }
  256. }
  257. static void decode_sublayer_hrd(GetBitContext *gb, unsigned int nb_cpb,
  258. int subpic_params_present)
  259. {
  260. int i;
  261. for (i = 0; i < nb_cpb; i++) {
  262. get_ue_golomb_long(gb); // bit_rate_value_minus1
  263. get_ue_golomb_long(gb); // cpb_size_value_minus1
  264. if (subpic_params_present) {
  265. get_ue_golomb_long(gb); // cpb_size_du_value_minus1
  266. get_ue_golomb_long(gb); // bit_rate_du_value_minus1
  267. }
  268. skip_bits1(gb); // cbr_flag
  269. }
  270. }
  271. static void decode_hrd(GetBitContext *gb, int common_inf_present,
  272. int max_sublayers)
  273. {
  274. int nal_params_present = 0, vcl_params_present = 0;
  275. int subpic_params_present = 0;
  276. int i;
  277. if (common_inf_present) {
  278. nal_params_present = get_bits1(gb);
  279. vcl_params_present = get_bits1(gb);
  280. if (nal_params_present || vcl_params_present) {
  281. subpic_params_present = get_bits1(gb);
  282. if (subpic_params_present) {
  283. skip_bits(gb, 8); // tick_divisor_minus2
  284. skip_bits(gb, 5); // du_cpb_removal_delay_increment_length_minus1
  285. skip_bits(gb, 1); // sub_pic_cpb_params_in_pic_timing_sei_flag
  286. skip_bits(gb, 5); // dpb_output_delay_du_length_minus1
  287. }
  288. skip_bits(gb, 4); // bit_rate_scale
  289. skip_bits(gb, 4); // cpb_size_scale
  290. if (subpic_params_present)
  291. skip_bits(gb, 4); // cpb_size_du_scale
  292. skip_bits(gb, 5); // initial_cpb_removal_delay_length_minus1
  293. skip_bits(gb, 5); // au_cpb_removal_delay_length_minus1
  294. skip_bits(gb, 5); // dpb_output_delay_length_minus1
  295. }
  296. }
  297. for (i = 0; i < max_sublayers; i++) {
  298. int low_delay = 0;
  299. unsigned int nb_cpb = 1;
  300. int fixed_rate = get_bits1(gb);
  301. if (!fixed_rate)
  302. fixed_rate = get_bits1(gb);
  303. if (fixed_rate)
  304. get_ue_golomb_long(gb); // elemental_duration_in_tc_minus1
  305. else
  306. low_delay = get_bits1(gb);
  307. if (!low_delay)
  308. nb_cpb = get_ue_golomb_long(gb) + 1;
  309. if (nal_params_present)
  310. decode_sublayer_hrd(gb, nb_cpb, subpic_params_present);
  311. if (vcl_params_present)
  312. decode_sublayer_hrd(gb, nb_cpb, subpic_params_present);
  313. }
  314. }
  315. int ff_hevc_decode_nal_vps(GetBitContext *gb, AVCodecContext *avctx,
  316. HEVCParamSets *ps)
  317. {
  318. int i,j;
  319. int vps_id = 0;
  320. HEVCVPS *vps;
  321. AVBufferRef *vps_buf = av_buffer_allocz(sizeof(*vps));
  322. if (!vps_buf)
  323. return AVERROR(ENOMEM);
  324. vps = (HEVCVPS*)vps_buf->data;
  325. av_log(avctx, AV_LOG_DEBUG, "Decoding VPS\n");
  326. vps_id = get_bits(gb, 4);
  327. if (vps_id >= HEVC_MAX_VPS_COUNT) {
  328. av_log(avctx, AV_LOG_ERROR, "VPS id out of range: %d\n", vps_id);
  329. goto err;
  330. }
  331. if (get_bits(gb, 2) != 3) { // vps_reserved_three_2bits
  332. av_log(avctx, AV_LOG_ERROR, "vps_reserved_three_2bits is not three\n");
  333. goto err;
  334. }
  335. vps->vps_max_layers = get_bits(gb, 6) + 1;
  336. vps->vps_max_sub_layers = get_bits(gb, 3) + 1;
  337. vps->vps_temporal_id_nesting_flag = get_bits1(gb);
  338. if (get_bits(gb, 16) != 0xffff) { // vps_reserved_ffff_16bits
  339. av_log(avctx, AV_LOG_ERROR, "vps_reserved_ffff_16bits is not 0xffff\n");
  340. goto err;
  341. }
  342. if (vps->vps_max_sub_layers > HEVC_MAX_SUB_LAYERS) {
  343. av_log(avctx, AV_LOG_ERROR, "vps_max_sub_layers out of range: %d\n",
  344. vps->vps_max_sub_layers);
  345. goto err;
  346. }
  347. parse_ptl(gb, avctx, &vps->ptl, vps->vps_max_sub_layers);
  348. vps->vps_sub_layer_ordering_info_present_flag = get_bits1(gb);
  349. i = vps->vps_sub_layer_ordering_info_present_flag ? 0 : vps->vps_max_sub_layers - 1;
  350. for (; i < vps->vps_max_sub_layers; i++) {
  351. vps->vps_max_dec_pic_buffering[i] = get_ue_golomb_long(gb) + 1;
  352. vps->vps_num_reorder_pics[i] = get_ue_golomb_long(gb);
  353. vps->vps_max_latency_increase[i] = get_ue_golomb_long(gb) - 1;
  354. if (vps->vps_max_dec_pic_buffering[i] > HEVC_MAX_DPB_SIZE) {
  355. av_log(avctx, AV_LOG_ERROR, "vps_max_dec_pic_buffering_minus1 out of range: %d\n",
  356. vps->vps_max_dec_pic_buffering[i] - 1);
  357. goto err;
  358. }
  359. if (vps->vps_num_reorder_pics[i] > vps->vps_max_dec_pic_buffering[i] - 1) {
  360. av_log(avctx, AV_LOG_WARNING, "vps_max_num_reorder_pics out of range: %d\n",
  361. vps->vps_num_reorder_pics[i]);
  362. if (avctx->err_recognition & AV_EF_EXPLODE)
  363. goto err;
  364. }
  365. }
  366. vps->vps_max_layer_id = get_bits(gb, 6);
  367. vps->vps_num_layer_sets = get_ue_golomb_long(gb) + 1;
  368. for (i = 1; i < vps->vps_num_layer_sets; i++)
  369. for (j = 0; j <= vps->vps_max_layer_id; j++)
  370. skip_bits(gb, 1); // layer_id_included_flag[i][j]
  371. vps->vps_timing_info_present_flag = get_bits1(gb);
  372. if (vps->vps_timing_info_present_flag) {
  373. vps->vps_num_units_in_tick = get_bits_long(gb, 32);
  374. vps->vps_time_scale = get_bits_long(gb, 32);
  375. vps->vps_poc_proportional_to_timing_flag = get_bits1(gb);
  376. if (vps->vps_poc_proportional_to_timing_flag)
  377. vps->vps_num_ticks_poc_diff_one = get_ue_golomb_long(gb) + 1;
  378. vps->vps_num_hrd_parameters = get_ue_golomb_long(gb);
  379. for (i = 0; i < vps->vps_num_hrd_parameters; i++) {
  380. int common_inf_present = 1;
  381. get_ue_golomb_long(gb); // hrd_layer_set_idx
  382. if (i)
  383. common_inf_present = get_bits1(gb);
  384. decode_hrd(gb, common_inf_present, vps->vps_max_sub_layers);
  385. }
  386. }
  387. get_bits1(gb); /* vps_extension_flag */
  388. if (ps->vps_list[vps_id] &&
  389. !memcmp(ps->vps_list[vps_id]->data, vps_buf->data, vps_buf->size)) {
  390. av_buffer_unref(&vps_buf);
  391. } else {
  392. remove_vps(ps, vps_id);
  393. ps->vps_list[vps_id] = vps_buf;
  394. }
  395. return 0;
  396. err:
  397. av_buffer_unref(&vps_buf);
  398. return AVERROR_INVALIDDATA;
  399. }
  400. static void decode_vui(GetBitContext *gb, AVCodecContext *avctx,
  401. int apply_defdispwin, HEVCSPS *sps)
  402. {
  403. VUI *vui = &sps->vui;
  404. int sar_present;
  405. av_log(avctx, AV_LOG_DEBUG, "Decoding VUI\n");
  406. sar_present = get_bits1(gb);
  407. if (sar_present) {
  408. uint8_t sar_idx = get_bits(gb, 8);
  409. if (sar_idx < FF_ARRAY_ELEMS(vui_sar))
  410. vui->sar = vui_sar[sar_idx];
  411. else if (sar_idx == 255) {
  412. vui->sar.num = get_bits(gb, 16);
  413. vui->sar.den = get_bits(gb, 16);
  414. } else
  415. av_log(avctx, AV_LOG_WARNING,
  416. "Unknown SAR index: %u.\n", sar_idx);
  417. }
  418. vui->overscan_info_present_flag = get_bits1(gb);
  419. if (vui->overscan_info_present_flag)
  420. vui->overscan_appropriate_flag = get_bits1(gb);
  421. vui->video_signal_type_present_flag = get_bits1(gb);
  422. if (vui->video_signal_type_present_flag) {
  423. vui->video_format = get_bits(gb, 3);
  424. vui->video_full_range_flag = get_bits1(gb);
  425. vui->colour_description_present_flag = get_bits1(gb);
  426. if (vui->video_full_range_flag && sps->pix_fmt == AV_PIX_FMT_YUV420P)
  427. sps->pix_fmt = AV_PIX_FMT_YUVJ420P;
  428. if (vui->colour_description_present_flag) {
  429. vui->colour_primaries = get_bits(gb, 8);
  430. vui->transfer_characteristic = get_bits(gb, 8);
  431. vui->matrix_coeffs = get_bits(gb, 8);
  432. // Set invalid values to "unspecified"
  433. if (!av_color_primaries_name(vui->colour_primaries))
  434. vui->colour_primaries = AVCOL_PRI_UNSPECIFIED;
  435. if (!av_color_transfer_name(vui->transfer_characteristic))
  436. vui->transfer_characteristic = AVCOL_TRC_UNSPECIFIED;
  437. if (!av_color_space_name(vui->matrix_coeffs))
  438. vui->matrix_coeffs = AVCOL_SPC_UNSPECIFIED;
  439. }
  440. }
  441. vui->chroma_loc_info_present_flag = get_bits1(gb);
  442. if (vui->chroma_loc_info_present_flag) {
  443. vui->chroma_sample_loc_type_top_field = get_ue_golomb_long(gb);
  444. vui->chroma_sample_loc_type_bottom_field = get_ue_golomb_long(gb);
  445. }
  446. vui->neutra_chroma_indication_flag = get_bits1(gb);
  447. vui->field_seq_flag = get_bits1(gb);
  448. vui->frame_field_info_present_flag = get_bits1(gb);
  449. vui->default_display_window_flag = get_bits1(gb);
  450. if (vui->default_display_window_flag) {
  451. //TODO: * 2 is only valid for 420
  452. vui->def_disp_win.left_offset = get_ue_golomb_long(gb) * 2;
  453. vui->def_disp_win.right_offset = get_ue_golomb_long(gb) * 2;
  454. vui->def_disp_win.top_offset = get_ue_golomb_long(gb) * 2;
  455. vui->def_disp_win.bottom_offset = get_ue_golomb_long(gb) * 2;
  456. if (apply_defdispwin &&
  457. avctx->flags2 & AV_CODEC_FLAG2_IGNORE_CROP) {
  458. av_log(avctx, AV_LOG_DEBUG,
  459. "discarding vui default display window, "
  460. "original values are l:%u r:%u t:%u b:%u\n",
  461. vui->def_disp_win.left_offset,
  462. vui->def_disp_win.right_offset,
  463. vui->def_disp_win.top_offset,
  464. vui->def_disp_win.bottom_offset);
  465. vui->def_disp_win.left_offset =
  466. vui->def_disp_win.right_offset =
  467. vui->def_disp_win.top_offset =
  468. vui->def_disp_win.bottom_offset = 0;
  469. }
  470. }
  471. vui->vui_timing_info_present_flag = get_bits1(gb);
  472. if (vui->vui_timing_info_present_flag) {
  473. vui->vui_num_units_in_tick = get_bits_long(gb, 32);
  474. vui->vui_time_scale = get_bits_long(gb, 32);
  475. vui->vui_poc_proportional_to_timing_flag = get_bits1(gb);
  476. if (vui->vui_poc_proportional_to_timing_flag)
  477. vui->vui_num_ticks_poc_diff_one_minus1 = get_ue_golomb_long(gb);
  478. vui->vui_hrd_parameters_present_flag = get_bits1(gb);
  479. if (vui->vui_hrd_parameters_present_flag)
  480. decode_hrd(gb, 1, sps->max_sub_layers);
  481. }
  482. vui->bitstream_restriction_flag = get_bits1(gb);
  483. if (vui->bitstream_restriction_flag) {
  484. vui->tiles_fixed_structure_flag = get_bits1(gb);
  485. vui->motion_vectors_over_pic_boundaries_flag = get_bits1(gb);
  486. vui->restricted_ref_pic_lists_flag = get_bits1(gb);
  487. vui->min_spatial_segmentation_idc = get_ue_golomb_long(gb);
  488. vui->max_bytes_per_pic_denom = get_ue_golomb_long(gb);
  489. vui->max_bits_per_min_cu_denom = get_ue_golomb_long(gb);
  490. vui->log2_max_mv_length_horizontal = get_ue_golomb_long(gb);
  491. vui->log2_max_mv_length_vertical = get_ue_golomb_long(gb);
  492. }
  493. }
  494. static void set_default_scaling_list_data(ScalingList *sl)
  495. {
  496. int matrixId;
  497. for (matrixId = 0; matrixId < 6; matrixId++) {
  498. // 4x4 default is 16
  499. memset(sl->sl[0][matrixId], 16, 16);
  500. sl->sl_dc[0][matrixId] = 16; // default for 16x16
  501. sl->sl_dc[1][matrixId] = 16; // default for 32x32
  502. }
  503. memcpy(sl->sl[1][0], default_scaling_list_intra, 64);
  504. memcpy(sl->sl[1][1], default_scaling_list_intra, 64);
  505. memcpy(sl->sl[1][2], default_scaling_list_intra, 64);
  506. memcpy(sl->sl[1][3], default_scaling_list_inter, 64);
  507. memcpy(sl->sl[1][4], default_scaling_list_inter, 64);
  508. memcpy(sl->sl[1][5], default_scaling_list_inter, 64);
  509. memcpy(sl->sl[2][0], default_scaling_list_intra, 64);
  510. memcpy(sl->sl[2][1], default_scaling_list_intra, 64);
  511. memcpy(sl->sl[2][2], default_scaling_list_intra, 64);
  512. memcpy(sl->sl[2][3], default_scaling_list_inter, 64);
  513. memcpy(sl->sl[2][4], default_scaling_list_inter, 64);
  514. memcpy(sl->sl[2][5], default_scaling_list_inter, 64);
  515. memcpy(sl->sl[3][0], default_scaling_list_intra, 64);
  516. memcpy(sl->sl[3][1], default_scaling_list_inter, 64);
  517. }
  518. static int scaling_list_data(GetBitContext *gb, AVCodecContext *avctx, ScalingList *sl)
  519. {
  520. uint8_t scaling_list_pred_mode_flag[4][6];
  521. int32_t scaling_list_dc_coef[2][6];
  522. int size_id, matrix_id, i, pos;
  523. for (size_id = 0; size_id < 4; size_id++)
  524. for (matrix_id = 0; matrix_id < (size_id == 3 ? 2 : 6); matrix_id++) {
  525. scaling_list_pred_mode_flag[size_id][matrix_id] = get_bits1(gb);
  526. if (!scaling_list_pred_mode_flag[size_id][matrix_id]) {
  527. unsigned int delta = get_ue_golomb_long(gb);
  528. /* Only need to handle non-zero delta. Zero means default,
  529. * which should already be in the arrays. */
  530. if (delta) {
  531. // Copy from previous array.
  532. if (matrix_id < delta) {
  533. av_log(avctx, AV_LOG_ERROR,
  534. "Invalid delta in scaling list data: %d.\n", delta);
  535. return AVERROR_INVALIDDATA;
  536. }
  537. memcpy(sl->sl[size_id][matrix_id],
  538. sl->sl[size_id][matrix_id - delta],
  539. size_id > 0 ? 64 : 16);
  540. if (size_id > 1)
  541. sl->sl_dc[size_id - 2][matrix_id] = sl->sl_dc[size_id - 2][matrix_id - delta];
  542. }
  543. } else {
  544. int next_coef, coef_num;
  545. int32_t scaling_list_delta_coef;
  546. next_coef = 8;
  547. coef_num = FFMIN(64, 1 << (4 + (size_id << 1)));
  548. if (size_id > 1) {
  549. scaling_list_dc_coef[size_id - 2][matrix_id] = get_se_golomb(gb) + 8;
  550. next_coef = scaling_list_dc_coef[size_id - 2][matrix_id];
  551. sl->sl_dc[size_id - 2][matrix_id] = next_coef;
  552. }
  553. for (i = 0; i < coef_num; i++) {
  554. if (size_id == 0)
  555. pos = 4 * ff_hevc_diag_scan4x4_y[i] +
  556. ff_hevc_diag_scan4x4_x[i];
  557. else
  558. pos = 8 * ff_hevc_diag_scan8x8_y[i] +
  559. ff_hevc_diag_scan8x8_x[i];
  560. scaling_list_delta_coef = get_se_golomb(gb);
  561. next_coef = (next_coef + scaling_list_delta_coef + 256) % 256;
  562. sl->sl[size_id][matrix_id][pos] = next_coef;
  563. }
  564. }
  565. }
  566. return 0;
  567. }
  568. static int map_pixel_format(AVCodecContext *avctx, HEVCSPS *sps)
  569. {
  570. const AVPixFmtDescriptor *desc;
  571. if (sps->chroma_format_idc == 1) {
  572. switch (sps->bit_depth) {
  573. case 8: sps->pix_fmt = AV_PIX_FMT_YUV420P; break;
  574. case 9: sps->pix_fmt = AV_PIX_FMT_YUV420P9; break;
  575. case 10: sps->pix_fmt = AV_PIX_FMT_YUV420P10; break;
  576. default:
  577. avpriv_report_missing_feature(avctx, "Bit depth %d",
  578. sps->bit_depth);
  579. return AVERROR_PATCHWELCOME;
  580. }
  581. } else {
  582. avpriv_report_missing_feature(avctx, "Non-4:2:0 support");
  583. return AVERROR_PATCHWELCOME;
  584. }
  585. desc = av_pix_fmt_desc_get(sps->pix_fmt);
  586. if (!desc)
  587. return AVERROR(EINVAL);
  588. sps->hshift[0] = sps->vshift[0] = 0;
  589. sps->hshift[2] = sps->hshift[1] = desc->log2_chroma_w;
  590. sps->vshift[2] = sps->vshift[1] = desc->log2_chroma_h;
  591. sps->pixel_shift = sps->bit_depth > 8;
  592. return 0;
  593. }
  594. int ff_hevc_parse_sps(HEVCSPS *sps, GetBitContext *gb, unsigned int *sps_id,
  595. int apply_defdispwin, AVBufferRef **vps_list, AVCodecContext *avctx)
  596. {
  597. HEVCWindow *ow;
  598. int ret = 0;
  599. int log2_diff_max_min_transform_block_size;
  600. int bit_depth_chroma, start, vui_present, sublayer_ordering_info;
  601. int i;
  602. // Coded parameters
  603. sps->vps_id = get_bits(gb, 4);
  604. if (sps->vps_id >= HEVC_MAX_VPS_COUNT) {
  605. av_log(avctx, AV_LOG_ERROR, "VPS id out of range: %d\n", sps->vps_id);
  606. ret = AVERROR_INVALIDDATA;
  607. goto err;
  608. }
  609. if (vps_list && !vps_list[sps->vps_id]) {
  610. av_log(avctx, AV_LOG_ERROR, "VPS %d does not exist\n",
  611. sps->vps_id);
  612. ret = AVERROR_INVALIDDATA;
  613. goto err;
  614. }
  615. sps->max_sub_layers = get_bits(gb, 3) + 1;
  616. if (sps->max_sub_layers > HEVC_MAX_SUB_LAYERS) {
  617. av_log(avctx, AV_LOG_ERROR, "sps_max_sub_layers out of range: %d\n",
  618. sps->max_sub_layers);
  619. ret = AVERROR_INVALIDDATA;
  620. goto err;
  621. }
  622. skip_bits1(gb); // temporal_id_nesting_flag
  623. parse_ptl(gb, avctx, &sps->ptl, sps->max_sub_layers);
  624. *sps_id = get_ue_golomb_long(gb);
  625. if (*sps_id >= HEVC_MAX_SPS_COUNT) {
  626. av_log(avctx, AV_LOG_ERROR, "SPS id out of range: %d\n", *sps_id);
  627. ret = AVERROR_INVALIDDATA;
  628. goto err;
  629. }
  630. sps->chroma_format_idc = get_ue_golomb_long(gb);
  631. if (sps->chroma_format_idc != 1) {
  632. avpriv_report_missing_feature(avctx, "chroma_format_idc %d",
  633. sps->chroma_format_idc);
  634. ret = AVERROR_PATCHWELCOME;
  635. goto err;
  636. }
  637. if (sps->chroma_format_idc == 3)
  638. sps->separate_colour_plane_flag = get_bits1(gb);
  639. sps->width = get_ue_golomb_long(gb);
  640. sps->height = get_ue_golomb_long(gb);
  641. if ((ret = av_image_check_size(sps->width,
  642. sps->height, 0, avctx)) < 0)
  643. goto err;
  644. if (get_bits1(gb)) { // pic_conformance_flag
  645. //TODO: * 2 is only valid for 420
  646. sps->pic_conf_win.left_offset = get_ue_golomb_long(gb) * 2;
  647. sps->pic_conf_win.right_offset = get_ue_golomb_long(gb) * 2;
  648. sps->pic_conf_win.top_offset = get_ue_golomb_long(gb) * 2;
  649. sps->pic_conf_win.bottom_offset = get_ue_golomb_long(gb) * 2;
  650. if (avctx->flags2 & AV_CODEC_FLAG2_IGNORE_CROP) {
  651. av_log(avctx, AV_LOG_DEBUG,
  652. "discarding sps conformance window, "
  653. "original values are l:%u r:%u t:%u b:%u\n",
  654. sps->pic_conf_win.left_offset,
  655. sps->pic_conf_win.right_offset,
  656. sps->pic_conf_win.top_offset,
  657. sps->pic_conf_win.bottom_offset);
  658. sps->pic_conf_win.left_offset =
  659. sps->pic_conf_win.right_offset =
  660. sps->pic_conf_win.top_offset =
  661. sps->pic_conf_win.bottom_offset = 0;
  662. }
  663. sps->output_window = sps->pic_conf_win;
  664. }
  665. sps->bit_depth = get_ue_golomb_long(gb) + 8;
  666. bit_depth_chroma = get_ue_golomb_long(gb) + 8;
  667. if (bit_depth_chroma != sps->bit_depth) {
  668. av_log(avctx, AV_LOG_ERROR,
  669. "Luma bit depth (%d) is different from chroma bit depth (%d), "
  670. "this is unsupported.\n",
  671. sps->bit_depth, bit_depth_chroma);
  672. ret = AVERROR_INVALIDDATA;
  673. goto err;
  674. }
  675. ret = map_pixel_format(avctx, sps);
  676. if (ret < 0)
  677. goto err;
  678. sps->log2_max_poc_lsb = get_ue_golomb_long(gb) + 4;
  679. if (sps->log2_max_poc_lsb > 16) {
  680. av_log(avctx, AV_LOG_ERROR, "log2_max_pic_order_cnt_lsb_minus4 out range: %d\n",
  681. sps->log2_max_poc_lsb - 4);
  682. ret = AVERROR_INVALIDDATA;
  683. goto err;
  684. }
  685. sublayer_ordering_info = get_bits1(gb);
  686. start = sublayer_ordering_info ? 0 : sps->max_sub_layers - 1;
  687. for (i = start; i < sps->max_sub_layers; i++) {
  688. sps->temporal_layer[i].max_dec_pic_buffering = get_ue_golomb_long(gb) + 1;
  689. sps->temporal_layer[i].num_reorder_pics = get_ue_golomb_long(gb);
  690. sps->temporal_layer[i].max_latency_increase = get_ue_golomb_long(gb) - 1;
  691. if (sps->temporal_layer[i].max_dec_pic_buffering > HEVC_MAX_DPB_SIZE) {
  692. av_log(avctx, AV_LOG_ERROR, "sps_max_dec_pic_buffering_minus1 out of range: %d\n",
  693. sps->temporal_layer[i].max_dec_pic_buffering - 1);
  694. ret = AVERROR_INVALIDDATA;
  695. goto err;
  696. }
  697. if (sps->temporal_layer[i].num_reorder_pics > sps->temporal_layer[i].max_dec_pic_buffering - 1) {
  698. av_log(avctx, AV_LOG_WARNING, "sps_max_num_reorder_pics out of range: %d\n",
  699. sps->temporal_layer[i].num_reorder_pics);
  700. if (avctx->err_recognition & AV_EF_EXPLODE ||
  701. sps->temporal_layer[i].num_reorder_pics > HEVC_MAX_DPB_SIZE - 1) {
  702. ret = AVERROR_INVALIDDATA;
  703. goto err;
  704. }
  705. sps->temporal_layer[i].max_dec_pic_buffering = sps->temporal_layer[i].num_reorder_pics + 1;
  706. }
  707. }
  708. if (!sublayer_ordering_info) {
  709. for (i = 0; i < start; i++) {
  710. sps->temporal_layer[i].max_dec_pic_buffering = sps->temporal_layer[start].max_dec_pic_buffering;
  711. sps->temporal_layer[i].num_reorder_pics = sps->temporal_layer[start].num_reorder_pics;
  712. sps->temporal_layer[i].max_latency_increase = sps->temporal_layer[start].max_latency_increase;
  713. }
  714. }
  715. sps->log2_min_cb_size = get_ue_golomb_long(gb) + 3;
  716. sps->log2_diff_max_min_coding_block_size = get_ue_golomb_long(gb);
  717. sps->log2_min_tb_size = get_ue_golomb_long(gb) + 2;
  718. log2_diff_max_min_transform_block_size = get_ue_golomb_long(gb);
  719. sps->log2_max_trafo_size = log2_diff_max_min_transform_block_size +
  720. sps->log2_min_tb_size;
  721. if (sps->log2_min_tb_size >= sps->log2_min_cb_size) {
  722. av_log(avctx, AV_LOG_ERROR, "Invalid value for log2_min_tb_size");
  723. ret = AVERROR_INVALIDDATA;
  724. goto err;
  725. }
  726. sps->max_transform_hierarchy_depth_inter = get_ue_golomb_long(gb);
  727. sps->max_transform_hierarchy_depth_intra = get_ue_golomb_long(gb);
  728. sps->scaling_list_enable_flag = get_bits1(gb);
  729. if (sps->scaling_list_enable_flag) {
  730. set_default_scaling_list_data(&sps->scaling_list);
  731. if (get_bits1(gb)) {
  732. ret = scaling_list_data(gb, avctx, &sps->scaling_list);
  733. if (ret < 0)
  734. goto err;
  735. }
  736. }
  737. sps->amp_enabled_flag = get_bits1(gb);
  738. sps->sao_enabled = get_bits1(gb);
  739. sps->pcm_enabled_flag = get_bits1(gb);
  740. if (sps->pcm_enabled_flag) {
  741. sps->pcm.bit_depth = get_bits(gb, 4) + 1;
  742. sps->pcm.bit_depth_chroma = get_bits(gb, 4) + 1;
  743. sps->pcm.log2_min_pcm_cb_size = get_ue_golomb_long(gb) + 3;
  744. sps->pcm.log2_max_pcm_cb_size = sps->pcm.log2_min_pcm_cb_size +
  745. get_ue_golomb_long(gb);
  746. if (sps->pcm.bit_depth > sps->bit_depth) {
  747. av_log(avctx, AV_LOG_ERROR,
  748. "PCM bit depth (%d) is greater than normal bit depth (%d)\n",
  749. sps->pcm.bit_depth, sps->bit_depth);
  750. ret = AVERROR_INVALIDDATA;
  751. goto err;
  752. }
  753. sps->pcm.loop_filter_disable_flag = get_bits1(gb);
  754. }
  755. sps->nb_st_rps = get_ue_golomb_long(gb);
  756. if (sps->nb_st_rps > HEVC_MAX_SHORT_TERM_RPS_COUNT) {
  757. av_log(avctx, AV_LOG_ERROR, "Too many short term RPS: %d.\n",
  758. sps->nb_st_rps);
  759. ret = AVERROR_INVALIDDATA;
  760. goto err;
  761. }
  762. for (i = 0; i < sps->nb_st_rps; i++) {
  763. if ((ret = ff_hevc_decode_short_term_rps(gb, avctx, &sps->st_rps[i],
  764. sps, 0)) < 0)
  765. goto err;
  766. }
  767. sps->long_term_ref_pics_present_flag = get_bits1(gb);
  768. if (sps->long_term_ref_pics_present_flag) {
  769. sps->num_long_term_ref_pics_sps = get_ue_golomb_long(gb);
  770. for (i = 0; i < sps->num_long_term_ref_pics_sps; i++) {
  771. sps->lt_ref_pic_poc_lsb_sps[i] = get_bits(gb, sps->log2_max_poc_lsb);
  772. sps->used_by_curr_pic_lt_sps_flag[i] = get_bits1(gb);
  773. }
  774. }
  775. sps->sps_temporal_mvp_enabled_flag = get_bits1(gb);
  776. sps->sps_strong_intra_smoothing_enable_flag = get_bits1(gb);
  777. sps->vui.sar = (AVRational){0, 1};
  778. vui_present = get_bits1(gb);
  779. if (vui_present)
  780. decode_vui(gb, avctx, apply_defdispwin, sps);
  781. skip_bits1(gb); // sps_extension_flag
  782. if (apply_defdispwin) {
  783. sps->output_window.left_offset += sps->vui.def_disp_win.left_offset;
  784. sps->output_window.right_offset += sps->vui.def_disp_win.right_offset;
  785. sps->output_window.top_offset += sps->vui.def_disp_win.top_offset;
  786. sps->output_window.bottom_offset += sps->vui.def_disp_win.bottom_offset;
  787. }
  788. ow = &sps->output_window;
  789. if (ow->left_offset >= INT_MAX - ow->right_offset ||
  790. ow->top_offset >= INT_MAX - ow->bottom_offset ||
  791. ow->left_offset + ow->right_offset >= sps->width ||
  792. ow->top_offset + ow->bottom_offset >= sps->height) {
  793. av_log(avctx, AV_LOG_WARNING, "Invalid cropping offsets: %u/%u/%u/%u\n",
  794. ow->left_offset, ow->right_offset, ow->top_offset, ow->bottom_offset);
  795. if (avctx->err_recognition & AV_EF_EXPLODE) {
  796. ret = AVERROR_INVALIDDATA;
  797. goto err;
  798. }
  799. av_log(avctx, AV_LOG_WARNING,
  800. "Displaying the whole video surface.\n");
  801. memset(ow, 0, sizeof(*ow));
  802. }
  803. // Inferred parameters
  804. sps->log2_ctb_size = sps->log2_min_cb_size +
  805. sps->log2_diff_max_min_coding_block_size;
  806. sps->log2_min_pu_size = sps->log2_min_cb_size - 1;
  807. sps->ctb_width = (sps->width + (1 << sps->log2_ctb_size) - 1) >> sps->log2_ctb_size;
  808. sps->ctb_height = (sps->height + (1 << sps->log2_ctb_size) - 1) >> sps->log2_ctb_size;
  809. sps->ctb_size = sps->ctb_width * sps->ctb_height;
  810. sps->min_cb_width = sps->width >> sps->log2_min_cb_size;
  811. sps->min_cb_height = sps->height >> sps->log2_min_cb_size;
  812. sps->min_tb_width = sps->width >> sps->log2_min_tb_size;
  813. sps->min_tb_height = sps->height >> sps->log2_min_tb_size;
  814. sps->min_pu_width = sps->width >> sps->log2_min_pu_size;
  815. sps->min_pu_height = sps->height >> sps->log2_min_pu_size;
  816. sps->qp_bd_offset = 6 * (sps->bit_depth - 8);
  817. if (sps->width & ((1 << sps->log2_min_cb_size) - 1) ||
  818. sps->height & ((1 << sps->log2_min_cb_size) - 1)) {
  819. av_log(avctx, AV_LOG_ERROR, "Invalid coded frame dimensions.\n");
  820. goto err;
  821. }
  822. if (sps->log2_ctb_size > HEVC_MAX_LOG2_CTB_SIZE) {
  823. av_log(avctx, AV_LOG_ERROR, "CTB size out of range: 2^%d\n", sps->log2_ctb_size);
  824. goto err;
  825. }
  826. if (sps->max_transform_hierarchy_depth_inter > sps->log2_ctb_size - sps->log2_min_tb_size) {
  827. av_log(avctx, AV_LOG_ERROR, "max_transform_hierarchy_depth_inter out of range: %d\n",
  828. sps->max_transform_hierarchy_depth_inter);
  829. goto err;
  830. }
  831. if (sps->max_transform_hierarchy_depth_intra > sps->log2_ctb_size - sps->log2_min_tb_size) {
  832. av_log(avctx, AV_LOG_ERROR, "max_transform_hierarchy_depth_intra out of range: %d\n",
  833. sps->max_transform_hierarchy_depth_intra);
  834. goto err;
  835. }
  836. if (sps->log2_max_trafo_size > FFMIN(sps->log2_ctb_size, 5)) {
  837. av_log(avctx, AV_LOG_ERROR,
  838. "max transform block size out of range: %d\n",
  839. sps->log2_max_trafo_size);
  840. goto err;
  841. }
  842. return 0;
  843. err:
  844. return ret < 0 ? ret : AVERROR_INVALIDDATA;
  845. }
  846. int ff_hevc_decode_nal_sps(GetBitContext *gb, AVCodecContext *avctx,
  847. HEVCParamSets *ps, int apply_defdispwin)
  848. {
  849. HEVCSPS *sps;
  850. AVBufferRef *sps_buf = av_buffer_allocz(sizeof(*sps));
  851. unsigned int sps_id;
  852. int ret;
  853. if (!sps_buf)
  854. return AVERROR(ENOMEM);
  855. sps = (HEVCSPS*)sps_buf->data;
  856. av_log(avctx, AV_LOG_DEBUG, "Decoding SPS\n");
  857. ret = ff_hevc_parse_sps(sps, gb, &sps_id,
  858. apply_defdispwin,
  859. ps->vps_list, avctx);
  860. if (ret < 0) {
  861. av_buffer_unref(&sps_buf);
  862. return ret;
  863. }
  864. if (avctx->debug & FF_DEBUG_BITSTREAM) {
  865. av_log(avctx, AV_LOG_DEBUG,
  866. "Parsed SPS: id %d; coded wxh: %dx%d; "
  867. "cropped wxh: %dx%d; pix_fmt: %s.\n",
  868. sps_id, sps->width, sps->height,
  869. sps->width - (sps->output_window.left_offset + sps->output_window.right_offset),
  870. sps->height - (sps->output_window.top_offset + sps->output_window.bottom_offset),
  871. av_get_pix_fmt_name(sps->pix_fmt));
  872. }
  873. /* check if this is a repeat of an already parsed SPS, then keep the
  874. * original one.
  875. * otherwise drop all PPSes that depend on it */
  876. if (ps->sps_list[sps_id] &&
  877. !memcmp(ps->sps_list[sps_id]->data, sps_buf->data, sps_buf->size)) {
  878. av_buffer_unref(&sps_buf);
  879. } else {
  880. remove_sps(ps, sps_id);
  881. ps->sps_list[sps_id] = sps_buf;
  882. }
  883. return 0;
  884. }
  885. static void hevc_pps_free(void *opaque, uint8_t *data)
  886. {
  887. HEVCPPS *pps = (HEVCPPS*)data;
  888. av_freep(&pps->column_width);
  889. av_freep(&pps->row_height);
  890. av_freep(&pps->col_bd);
  891. av_freep(&pps->row_bd);
  892. av_freep(&pps->col_idxX);
  893. av_freep(&pps->ctb_addr_rs_to_ts);
  894. av_freep(&pps->ctb_addr_ts_to_rs);
  895. av_freep(&pps->tile_pos_rs);
  896. av_freep(&pps->tile_id);
  897. av_freep(&pps->min_tb_addr_zs);
  898. av_freep(&pps);
  899. }
  900. static inline int setup_pps(AVCodecContext *avctx, GetBitContext *gb,
  901. HEVCPPS *pps, HEVCSPS *sps)
  902. {
  903. int log2_diff;
  904. int pic_area_in_ctbs, pic_area_in_min_tbs;
  905. int i, j, x, y, ctb_addr_rs, tile_id;
  906. // Inferred parameters
  907. pps->col_bd = av_malloc_array(pps->num_tile_columns + 1, sizeof(*pps->col_bd));
  908. pps->row_bd = av_malloc_array(pps->num_tile_rows + 1, sizeof(*pps->row_bd));
  909. pps->col_idxX = av_malloc_array(sps->ctb_width, sizeof(*pps->col_idxX));
  910. if (!pps->col_bd || !pps->row_bd || !pps->col_idxX)
  911. return AVERROR(ENOMEM);
  912. if (pps->uniform_spacing_flag) {
  913. if (!pps->column_width) {
  914. pps->column_width = av_malloc_array(pps->num_tile_columns, sizeof(*pps->column_width));
  915. pps->row_height = av_malloc_array(pps->num_tile_rows, sizeof(*pps->row_height));
  916. }
  917. if (!pps->column_width || !pps->row_height)
  918. return AVERROR(ENOMEM);
  919. for (i = 0; i < pps->num_tile_columns; i++) {
  920. pps->column_width[i] = ((i + 1) * sps->ctb_width) / pps->num_tile_columns -
  921. (i * sps->ctb_width) / pps->num_tile_columns;
  922. }
  923. for (i = 0; i < pps->num_tile_rows; i++) {
  924. pps->row_height[i] = ((i + 1) * sps->ctb_height) / pps->num_tile_rows -
  925. (i * sps->ctb_height) / pps->num_tile_rows;
  926. }
  927. }
  928. pps->col_bd[0] = 0;
  929. for (i = 0; i < pps->num_tile_columns; i++)
  930. pps->col_bd[i + 1] = pps->col_bd[i] + pps->column_width[i];
  931. pps->row_bd[0] = 0;
  932. for (i = 0; i < pps->num_tile_rows; i++)
  933. pps->row_bd[i + 1] = pps->row_bd[i] + pps->row_height[i];
  934. for (i = 0, j = 0; i < sps->ctb_width; i++) {
  935. if (i > pps->col_bd[j])
  936. j++;
  937. pps->col_idxX[i] = j;
  938. }
  939. /**
  940. * 6.5
  941. */
  942. pic_area_in_ctbs = sps->ctb_width * sps->ctb_height;
  943. pic_area_in_min_tbs = sps->min_tb_width * sps->min_tb_height;
  944. pps->ctb_addr_rs_to_ts = av_malloc_array(pic_area_in_ctbs, sizeof(*pps->ctb_addr_rs_to_ts));
  945. pps->ctb_addr_ts_to_rs = av_malloc_array(pic_area_in_ctbs, sizeof(*pps->ctb_addr_ts_to_rs));
  946. pps->tile_id = av_malloc_array(pic_area_in_ctbs, sizeof(*pps->tile_id));
  947. pps->min_tb_addr_zs = av_malloc_array(pic_area_in_min_tbs, sizeof(*pps->min_tb_addr_zs));
  948. if (!pps->ctb_addr_rs_to_ts || !pps->ctb_addr_ts_to_rs ||
  949. !pps->tile_id || !pps->min_tb_addr_zs) {
  950. return AVERROR(ENOMEM);
  951. }
  952. for (ctb_addr_rs = 0; ctb_addr_rs < pic_area_in_ctbs; ctb_addr_rs++) {
  953. int tb_x = ctb_addr_rs % sps->ctb_width;
  954. int tb_y = ctb_addr_rs / sps->ctb_width;
  955. int tile_x = 0;
  956. int tile_y = 0;
  957. int val = 0;
  958. for (i = 0; i < pps->num_tile_columns; i++) {
  959. if (tb_x < pps->col_bd[i + 1]) {
  960. tile_x = i;
  961. break;
  962. }
  963. }
  964. for (i = 0; i < pps->num_tile_rows; i++) {
  965. if (tb_y < pps->row_bd[i + 1]) {
  966. tile_y = i;
  967. break;
  968. }
  969. }
  970. for (i = 0; i < tile_x; i++)
  971. val += pps->row_height[tile_y] * pps->column_width[i];
  972. for (i = 0; i < tile_y; i++)
  973. val += sps->ctb_width * pps->row_height[i];
  974. val += (tb_y - pps->row_bd[tile_y]) * pps->column_width[tile_x] +
  975. tb_x - pps->col_bd[tile_x];
  976. pps->ctb_addr_rs_to_ts[ctb_addr_rs] = val;
  977. pps->ctb_addr_ts_to_rs[val] = ctb_addr_rs;
  978. }
  979. for (j = 0, tile_id = 0; j < pps->num_tile_rows; j++)
  980. for (i = 0; i < pps->num_tile_columns; i++, tile_id++)
  981. for (y = pps->row_bd[j]; y < pps->row_bd[j + 1]; y++)
  982. for (x = pps->col_bd[i]; x < pps->col_bd[i + 1]; x++)
  983. pps->tile_id[pps->ctb_addr_rs_to_ts[y * sps->ctb_width + x]] = tile_id;
  984. pps->tile_pos_rs = av_malloc_array(tile_id, sizeof(*pps->tile_pos_rs));
  985. if (!pps->tile_pos_rs)
  986. return AVERROR(ENOMEM);
  987. for (j = 0; j < pps->num_tile_rows; j++)
  988. for (i = 0; i < pps->num_tile_columns; i++)
  989. pps->tile_pos_rs[j * pps->num_tile_columns + i] =
  990. pps->row_bd[j] * sps->ctb_width + pps->col_bd[i];
  991. log2_diff = sps->log2_ctb_size - sps->log2_min_tb_size;
  992. for (y = 0; y < sps->min_tb_height; y++) {
  993. for (x = 0; x < sps->min_tb_width; x++) {
  994. int tb_x = x >> log2_diff;
  995. int tb_y = y >> log2_diff;
  996. int rs = sps->ctb_width * tb_y + tb_x;
  997. int val = pps->ctb_addr_rs_to_ts[rs] << (log2_diff * 2);
  998. for (i = 0; i < log2_diff; i++) {
  999. int m = 1 << i;
  1000. val += (m & x ? m * m : 0) + (m & y ? 2 * m * m : 0);
  1001. }
  1002. pps->min_tb_addr_zs[y * sps->min_tb_width + x] = val;
  1003. }
  1004. }
  1005. return 0;
  1006. }
  1007. int ff_hevc_decode_nal_pps(GetBitContext *gb, AVCodecContext *avctx,
  1008. HEVCParamSets *ps)
  1009. {
  1010. HEVCSPS *sps = NULL;
  1011. int i, ret = 0;
  1012. unsigned int pps_id = 0;
  1013. AVBufferRef *pps_buf;
  1014. HEVCPPS *pps = av_mallocz(sizeof(*pps));
  1015. if (!pps)
  1016. return AVERROR(ENOMEM);
  1017. pps_buf = av_buffer_create((uint8_t *)pps, sizeof(*pps),
  1018. hevc_pps_free, NULL, 0);
  1019. if (!pps_buf) {
  1020. av_freep(&pps);
  1021. return AVERROR(ENOMEM);
  1022. }
  1023. av_log(avctx, AV_LOG_DEBUG, "Decoding PPS\n");
  1024. // Default values
  1025. pps->loop_filter_across_tiles_enabled_flag = 1;
  1026. pps->num_tile_columns = 1;
  1027. pps->num_tile_rows = 1;
  1028. pps->uniform_spacing_flag = 1;
  1029. pps->disable_dbf = 0;
  1030. pps->beta_offset = 0;
  1031. pps->tc_offset = 0;
  1032. // Coded parameters
  1033. pps_id = get_ue_golomb_long(gb);
  1034. if (pps_id >= HEVC_MAX_PPS_COUNT) {
  1035. av_log(avctx, AV_LOG_ERROR, "PPS id out of range: %d\n", pps_id);
  1036. ret = AVERROR_INVALIDDATA;
  1037. goto err;
  1038. }
  1039. pps->sps_id = get_ue_golomb_long(gb);
  1040. if (pps->sps_id >= HEVC_MAX_SPS_COUNT) {
  1041. av_log(avctx, AV_LOG_ERROR, "SPS id out of range: %d\n", pps->sps_id);
  1042. ret = AVERROR_INVALIDDATA;
  1043. goto err;
  1044. }
  1045. if (!ps->sps_list[pps->sps_id]) {
  1046. av_log(avctx, AV_LOG_ERROR, "SPS %u does not exist.\n", pps->sps_id);
  1047. ret = AVERROR_INVALIDDATA;
  1048. goto err;
  1049. }
  1050. sps = (HEVCSPS *)ps->sps_list[pps->sps_id]->data;
  1051. pps->dependent_slice_segments_enabled_flag = get_bits1(gb);
  1052. pps->output_flag_present_flag = get_bits1(gb);
  1053. pps->num_extra_slice_header_bits = get_bits(gb, 3);
  1054. pps->sign_data_hiding_flag = get_bits1(gb);
  1055. pps->cabac_init_present_flag = get_bits1(gb);
  1056. pps->num_ref_idx_l0_default_active = get_ue_golomb_long(gb) + 1;
  1057. pps->num_ref_idx_l1_default_active = get_ue_golomb_long(gb) + 1;
  1058. pps->pic_init_qp_minus26 = get_se_golomb(gb);
  1059. pps->constrained_intra_pred_flag = get_bits1(gb);
  1060. pps->transform_skip_enabled_flag = get_bits1(gb);
  1061. pps->cu_qp_delta_enabled_flag = get_bits1(gb);
  1062. pps->diff_cu_qp_delta_depth = 0;
  1063. if (pps->cu_qp_delta_enabled_flag)
  1064. pps->diff_cu_qp_delta_depth = get_ue_golomb_long(gb);
  1065. pps->cb_qp_offset = get_se_golomb(gb);
  1066. if (pps->cb_qp_offset < -12 || pps->cb_qp_offset > 12) {
  1067. av_log(avctx, AV_LOG_ERROR, "pps_cb_qp_offset out of range: %d\n",
  1068. pps->cb_qp_offset);
  1069. ret = AVERROR_INVALIDDATA;
  1070. goto err;
  1071. }
  1072. pps->cr_qp_offset = get_se_golomb(gb);
  1073. if (pps->cr_qp_offset < -12 || pps->cr_qp_offset > 12) {
  1074. av_log(avctx, AV_LOG_ERROR, "pps_cr_qp_offset out of range: %d\n",
  1075. pps->cr_qp_offset);
  1076. ret = AVERROR_INVALIDDATA;
  1077. goto err;
  1078. }
  1079. pps->pic_slice_level_chroma_qp_offsets_present_flag = get_bits1(gb);
  1080. pps->weighted_pred_flag = get_bits1(gb);
  1081. pps->weighted_bipred_flag = get_bits1(gb);
  1082. pps->transquant_bypass_enable_flag = get_bits1(gb);
  1083. pps->tiles_enabled_flag = get_bits1(gb);
  1084. pps->entropy_coding_sync_enabled_flag = get_bits1(gb);
  1085. if (pps->tiles_enabled_flag) {
  1086. pps->num_tile_columns = get_ue_golomb_long(gb) + 1;
  1087. pps->num_tile_rows = get_ue_golomb_long(gb) + 1;
  1088. if (pps->num_tile_columns == 0 ||
  1089. pps->num_tile_columns >= sps->width) {
  1090. av_log(avctx, AV_LOG_ERROR, "num_tile_columns_minus1 out of range: %d\n",
  1091. pps->num_tile_columns - 1);
  1092. ret = AVERROR_INVALIDDATA;
  1093. goto err;
  1094. }
  1095. if (pps->num_tile_rows == 0 ||
  1096. pps->num_tile_rows >= sps->height) {
  1097. av_log(avctx, AV_LOG_ERROR, "num_tile_rows_minus1 out of range: %d\n",
  1098. pps->num_tile_rows - 1);
  1099. ret = AVERROR_INVALIDDATA;
  1100. goto err;
  1101. }
  1102. pps->column_width = av_malloc_array(pps->num_tile_columns, sizeof(*pps->column_width));
  1103. pps->row_height = av_malloc_array(pps->num_tile_rows, sizeof(*pps->row_height));
  1104. if (!pps->column_width || !pps->row_height) {
  1105. ret = AVERROR(ENOMEM);
  1106. goto err;
  1107. }
  1108. pps->uniform_spacing_flag = get_bits1(gb);
  1109. if (!pps->uniform_spacing_flag) {
  1110. uint64_t sum = 0;
  1111. for (i = 0; i < pps->num_tile_columns - 1; i++) {
  1112. pps->column_width[i] = get_ue_golomb_long(gb) + 1;
  1113. sum += pps->column_width[i];
  1114. }
  1115. if (sum >= sps->ctb_width) {
  1116. av_log(avctx, AV_LOG_ERROR, "Invalid tile widths.\n");
  1117. ret = AVERROR_INVALIDDATA;
  1118. goto err;
  1119. }
  1120. pps->column_width[pps->num_tile_columns - 1] = sps->ctb_width - sum;
  1121. sum = 0;
  1122. for (i = 0; i < pps->num_tile_rows - 1; i++) {
  1123. pps->row_height[i] = get_ue_golomb_long(gb) + 1;
  1124. sum += pps->row_height[i];
  1125. }
  1126. if (sum >= sps->ctb_height) {
  1127. av_log(avctx, AV_LOG_ERROR, "Invalid tile heights.\n");
  1128. ret = AVERROR_INVALIDDATA;
  1129. goto err;
  1130. }
  1131. pps->row_height[pps->num_tile_rows - 1] = sps->ctb_height - sum;
  1132. }
  1133. pps->loop_filter_across_tiles_enabled_flag = get_bits1(gb);
  1134. }
  1135. pps->seq_loop_filter_across_slices_enabled_flag = get_bits1(gb);
  1136. pps->deblocking_filter_control_present_flag = get_bits1(gb);
  1137. if (pps->deblocking_filter_control_present_flag) {
  1138. pps->deblocking_filter_override_enabled_flag = get_bits1(gb);
  1139. pps->disable_dbf = get_bits1(gb);
  1140. if (!pps->disable_dbf) {
  1141. pps->beta_offset = get_se_golomb(gb) * 2;
  1142. pps->tc_offset = get_se_golomb(gb) * 2;
  1143. if (pps->beta_offset/2 < -6 || pps->beta_offset/2 > 6) {
  1144. av_log(avctx, AV_LOG_ERROR, "pps_beta_offset_div2 out of range: %d\n",
  1145. pps->beta_offset/2);
  1146. ret = AVERROR_INVALIDDATA;
  1147. goto err;
  1148. }
  1149. if (pps->tc_offset/2 < -6 || pps->tc_offset/2 > 6) {
  1150. av_log(avctx, AV_LOG_ERROR, "pps_tc_offset_div2 out of range: %d\n",
  1151. pps->tc_offset/2);
  1152. ret = AVERROR_INVALIDDATA;
  1153. goto err;
  1154. }
  1155. }
  1156. }
  1157. pps->scaling_list_data_present_flag = get_bits1(gb);
  1158. if (pps->scaling_list_data_present_flag) {
  1159. set_default_scaling_list_data(&pps->scaling_list);
  1160. ret = scaling_list_data(gb, avctx, &pps->scaling_list);
  1161. if (ret < 0)
  1162. goto err;
  1163. }
  1164. pps->lists_modification_present_flag = get_bits1(gb);
  1165. pps->log2_parallel_merge_level = get_ue_golomb_long(gb) + 2;
  1166. if (pps->log2_parallel_merge_level > sps->log2_ctb_size) {
  1167. av_log(avctx, AV_LOG_ERROR, "log2_parallel_merge_level_minus2 out of range: %d\n",
  1168. pps->log2_parallel_merge_level - 2);
  1169. ret = AVERROR_INVALIDDATA;
  1170. goto err;
  1171. }
  1172. pps->slice_header_extension_present_flag = get_bits1(gb);
  1173. skip_bits1(gb); // pps_extension_flag
  1174. ret = setup_pps(avctx, gb, pps, sps);
  1175. if (ret < 0)
  1176. goto err;
  1177. remove_pps(ps, pps_id);
  1178. ps->pps_list[pps_id] = pps_buf;
  1179. return 0;
  1180. err:
  1181. av_buffer_unref(&pps_buf);
  1182. return ret;
  1183. }