You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1632 lines
62KB

  1. /*
  2. * HEVC Parameter Set decoding
  3. *
  4. * Copyright (C) 2012 - 2103 Guillaume Martres
  5. * Copyright (C) 2012 - 2103 Mickael Raulet
  6. * Copyright (C) 2012 - 2013 Gildas Cocherel
  7. * Copyright (C) 2013 Vittorio Giovara
  8. *
  9. * This file is part of FFmpeg.
  10. *
  11. * FFmpeg is free software; you can redistribute it and/or
  12. * modify it under the terms of the GNU Lesser General Public
  13. * License as published by the Free Software Foundation; either
  14. * version 2.1 of the License, or (at your option) any later version.
  15. *
  16. * FFmpeg is distributed in the hope that it will be useful,
  17. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  19. * Lesser General Public License for more details.
  20. *
  21. * You should have received a copy of the GNU Lesser General Public
  22. * License along with FFmpeg; if not, write to the Free Software
  23. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  24. */
  25. #include "libavutil/imgutils.h"
  26. #include "golomb.h"
  27. #include "hevc.h"
  28. static const uint8_t default_scaling_list_intra[] = {
  29. 16, 16, 16, 16, 17, 18, 21, 24,
  30. 16, 16, 16, 16, 17, 19, 22, 25,
  31. 16, 16, 17, 18, 20, 22, 25, 29,
  32. 16, 16, 18, 21, 24, 27, 31, 36,
  33. 17, 17, 20, 24, 30, 35, 41, 47,
  34. 18, 19, 22, 27, 35, 44, 54, 65,
  35. 21, 22, 25, 31, 41, 54, 70, 88,
  36. 24, 25, 29, 36, 47, 65, 88, 115
  37. };
  38. static const uint8_t default_scaling_list_inter[] = {
  39. 16, 16, 16, 16, 17, 18, 20, 24,
  40. 16, 16, 16, 17, 18, 20, 24, 25,
  41. 16, 16, 17, 18, 20, 24, 25, 28,
  42. 16, 17, 18, 20, 24, 25, 28, 33,
  43. 17, 18, 20, 24, 25, 28, 33, 41,
  44. 18, 20, 24, 25, 28, 33, 41, 54,
  45. 20, 24, 25, 28, 33, 41, 54, 71,
  46. 24, 25, 28, 33, 41, 54, 71, 91
  47. };
  48. static const AVRational vui_sar[] = {
  49. { 0, 1 },
  50. { 1, 1 },
  51. { 12, 11 },
  52. { 10, 11 },
  53. { 16, 11 },
  54. { 40, 33 },
  55. { 24, 11 },
  56. { 20, 11 },
  57. { 32, 11 },
  58. { 80, 33 },
  59. { 18, 11 },
  60. { 15, 11 },
  61. { 64, 33 },
  62. { 160, 99 },
  63. { 4, 3 },
  64. { 3, 2 },
  65. { 2, 1 },
  66. };
  67. static void remove_pps(HEVCParamSets *s, int id)
  68. {
  69. if (s->pps_list[id] && s->pps == (const HEVCPPS*)s->pps_list[id]->data)
  70. s->pps = NULL;
  71. av_buffer_unref(&s->pps_list[id]);
  72. }
  73. static void remove_sps(HEVCParamSets *s, int id)
  74. {
  75. int i;
  76. if (s->sps_list[id]) {
  77. if (s->sps == (const HEVCSPS*)s->sps_list[id]->data)
  78. s->sps = NULL;
  79. /* drop all PPS that depend on this SPS */
  80. for (i = 0; i < FF_ARRAY_ELEMS(s->pps_list); i++)
  81. if (s->pps_list[i] && ((HEVCPPS*)s->pps_list[i]->data)->sps_id == id)
  82. remove_pps(s, i);
  83. av_assert0(!(s->sps_list[id] && s->sps == (HEVCSPS*)s->sps_list[id]->data));
  84. }
  85. av_buffer_unref(&s->sps_list[id]);
  86. }
  87. static void remove_vps(HEVCParamSets *s, int id)
  88. {
  89. int i;
  90. if (s->vps_list[id]) {
  91. if (s->vps == (const HEVCVPS*)s->vps_list[id]->data)
  92. s->vps = NULL;
  93. for (i = 0; i < FF_ARRAY_ELEMS(s->sps_list); i++)
  94. if (s->sps_list[i] && ((HEVCSPS*)s->sps_list[i]->data)->vps_id == id)
  95. remove_sps(s, i);
  96. }
  97. av_buffer_unref(&s->vps_list[id]);
  98. }
  99. int ff_hevc_decode_short_term_rps(GetBitContext *gb, AVCodecContext *avctx,
  100. ShortTermRPS *rps, const HEVCSPS *sps, int is_slice_header)
  101. {
  102. uint8_t rps_predict = 0;
  103. int delta_poc;
  104. int k0 = 0;
  105. int k1 = 0;
  106. int k = 0;
  107. int i;
  108. if (rps != sps->st_rps && sps->nb_st_rps)
  109. rps_predict = get_bits1(gb);
  110. if (rps_predict) {
  111. const ShortTermRPS *rps_ridx;
  112. int delta_rps;
  113. unsigned abs_delta_rps;
  114. uint8_t use_delta_flag = 0;
  115. uint8_t delta_rps_sign;
  116. if (is_slice_header) {
  117. unsigned int delta_idx = get_ue_golomb_long(gb) + 1;
  118. if (delta_idx > sps->nb_st_rps) {
  119. av_log(avctx, AV_LOG_ERROR,
  120. "Invalid value of delta_idx in slice header RPS: %d > %d.\n",
  121. delta_idx, sps->nb_st_rps);
  122. return AVERROR_INVALIDDATA;
  123. }
  124. rps_ridx = &sps->st_rps[sps->nb_st_rps - delta_idx];
  125. rps->rps_idx_num_delta_pocs = rps_ridx->num_delta_pocs;
  126. } else
  127. rps_ridx = &sps->st_rps[rps - sps->st_rps - 1];
  128. delta_rps_sign = get_bits1(gb);
  129. abs_delta_rps = get_ue_golomb_long(gb) + 1;
  130. if (abs_delta_rps < 1 || abs_delta_rps > 32768) {
  131. av_log(avctx, AV_LOG_ERROR,
  132. "Invalid value of abs_delta_rps: %d\n",
  133. abs_delta_rps);
  134. return AVERROR_INVALIDDATA;
  135. }
  136. delta_rps = (1 - (delta_rps_sign << 1)) * abs_delta_rps;
  137. for (i = 0; i <= rps_ridx->num_delta_pocs; i++) {
  138. int used = rps->used[k] = get_bits1(gb);
  139. if (!used)
  140. use_delta_flag = get_bits1(gb);
  141. if (used || use_delta_flag) {
  142. if (i < rps_ridx->num_delta_pocs)
  143. delta_poc = delta_rps + rps_ridx->delta_poc[i];
  144. else
  145. delta_poc = delta_rps;
  146. rps->delta_poc[k] = delta_poc;
  147. if (delta_poc < 0)
  148. k0++;
  149. else
  150. k1++;
  151. k++;
  152. }
  153. }
  154. rps->num_delta_pocs = k;
  155. rps->num_negative_pics = k0;
  156. // sort in increasing order (smallest first)
  157. if (rps->num_delta_pocs != 0) {
  158. int used, tmp;
  159. for (i = 1; i < rps->num_delta_pocs; i++) {
  160. delta_poc = rps->delta_poc[i];
  161. used = rps->used[i];
  162. for (k = i - 1; k >= 0; k--) {
  163. tmp = rps->delta_poc[k];
  164. if (delta_poc < tmp) {
  165. rps->delta_poc[k + 1] = tmp;
  166. rps->used[k + 1] = rps->used[k];
  167. rps->delta_poc[k] = delta_poc;
  168. rps->used[k] = used;
  169. }
  170. }
  171. }
  172. }
  173. if ((rps->num_negative_pics >> 1) != 0) {
  174. int used;
  175. k = rps->num_negative_pics - 1;
  176. // flip the negative values to largest first
  177. for (i = 0; i < rps->num_negative_pics >> 1; i++) {
  178. delta_poc = rps->delta_poc[i];
  179. used = rps->used[i];
  180. rps->delta_poc[i] = rps->delta_poc[k];
  181. rps->used[i] = rps->used[k];
  182. rps->delta_poc[k] = delta_poc;
  183. rps->used[k] = used;
  184. k--;
  185. }
  186. }
  187. } else {
  188. unsigned int prev, nb_positive_pics;
  189. rps->num_negative_pics = get_ue_golomb_long(gb);
  190. nb_positive_pics = get_ue_golomb_long(gb);
  191. if (rps->num_negative_pics >= MAX_REFS ||
  192. nb_positive_pics >= MAX_REFS) {
  193. av_log(avctx, AV_LOG_ERROR, "Too many refs in a short term RPS.\n");
  194. return AVERROR_INVALIDDATA;
  195. }
  196. rps->num_delta_pocs = rps->num_negative_pics + nb_positive_pics;
  197. if (rps->num_delta_pocs) {
  198. prev = 0;
  199. for (i = 0; i < rps->num_negative_pics; i++) {
  200. delta_poc = get_ue_golomb_long(gb) + 1;
  201. prev -= delta_poc;
  202. rps->delta_poc[i] = prev;
  203. rps->used[i] = get_bits1(gb);
  204. }
  205. prev = 0;
  206. for (i = 0; i < nb_positive_pics; i++) {
  207. delta_poc = get_ue_golomb_long(gb) + 1;
  208. prev += delta_poc;
  209. rps->delta_poc[rps->num_negative_pics + i] = prev;
  210. rps->used[rps->num_negative_pics + i] = get_bits1(gb);
  211. }
  212. }
  213. }
  214. return 0;
  215. }
  216. static int decode_profile_tier_level(GetBitContext *gb, AVCodecContext *avctx,
  217. PTLCommon *ptl)
  218. {
  219. int i;
  220. if (get_bits_left(gb) < 2+1+5 + 32 + 4 + 16 + 16 + 12)
  221. return -1;
  222. ptl->profile_space = get_bits(gb, 2);
  223. ptl->tier_flag = get_bits1(gb);
  224. ptl->profile_idc = get_bits(gb, 5);
  225. if (ptl->profile_idc == FF_PROFILE_HEVC_MAIN)
  226. av_log(avctx, AV_LOG_DEBUG, "Main profile bitstream\n");
  227. else if (ptl->profile_idc == FF_PROFILE_HEVC_MAIN_10)
  228. av_log(avctx, AV_LOG_DEBUG, "Main 10 profile bitstream\n");
  229. else if (ptl->profile_idc == FF_PROFILE_HEVC_MAIN_STILL_PICTURE)
  230. av_log(avctx, AV_LOG_DEBUG, "Main Still Picture profile bitstream\n");
  231. else if (ptl->profile_idc == FF_PROFILE_HEVC_REXT)
  232. av_log(avctx, AV_LOG_DEBUG, "Range Extension profile bitstream\n");
  233. else
  234. av_log(avctx, AV_LOG_WARNING, "Unknown HEVC profile: %d\n", ptl->profile_idc);
  235. for (i = 0; i < 32; i++) {
  236. ptl->profile_compatibility_flag[i] = get_bits1(gb);
  237. if (ptl->profile_idc == 0 && i > 0 && ptl->profile_compatibility_flag[i])
  238. ptl->profile_idc = i;
  239. }
  240. ptl->progressive_source_flag = get_bits1(gb);
  241. ptl->interlaced_source_flag = get_bits1(gb);
  242. ptl->non_packed_constraint_flag = get_bits1(gb);
  243. ptl->frame_only_constraint_flag = get_bits1(gb);
  244. skip_bits(gb, 16); // XXX_reserved_zero_44bits[0..15]
  245. skip_bits(gb, 16); // XXX_reserved_zero_44bits[16..31]
  246. skip_bits(gb, 12); // XXX_reserved_zero_44bits[32..43]
  247. return 0;
  248. }
  249. static int parse_ptl(GetBitContext *gb, AVCodecContext *avctx,
  250. PTL *ptl, int max_num_sub_layers)
  251. {
  252. int i;
  253. if (decode_profile_tier_level(gb, avctx, &ptl->general_ptl) < 0 ||
  254. get_bits_left(gb) < 8 + 8*2) {
  255. av_log(avctx, AV_LOG_ERROR, "PTL information too short\n");
  256. return -1;
  257. }
  258. ptl->general_ptl.level_idc = get_bits(gb, 8);
  259. for (i = 0; i < max_num_sub_layers - 1; i++) {
  260. ptl->sub_layer_profile_present_flag[i] = get_bits1(gb);
  261. ptl->sub_layer_level_present_flag[i] = get_bits1(gb);
  262. }
  263. if (max_num_sub_layers - 1> 0)
  264. for (i = max_num_sub_layers - 1; i < 8; i++)
  265. skip_bits(gb, 2); // reserved_zero_2bits[i]
  266. for (i = 0; i < max_num_sub_layers - 1; i++) {
  267. if (ptl->sub_layer_profile_present_flag[i] &&
  268. decode_profile_tier_level(gb, avctx, &ptl->sub_layer_ptl[i]) < 0) {
  269. av_log(avctx, AV_LOG_ERROR,
  270. "PTL information for sublayer %i too short\n", i);
  271. return -1;
  272. }
  273. if (ptl->sub_layer_level_present_flag[i]) {
  274. if (get_bits_left(gb) < 8) {
  275. av_log(avctx, AV_LOG_ERROR,
  276. "Not enough data for sublayer %i level_idc\n", i);
  277. return -1;
  278. } else
  279. ptl->sub_layer_ptl[i].level_idc = get_bits(gb, 8);
  280. }
  281. }
  282. return 0;
  283. }
  284. static void decode_sublayer_hrd(GetBitContext *gb, unsigned int nb_cpb,
  285. int subpic_params_present)
  286. {
  287. int i;
  288. for (i = 0; i < nb_cpb; i++) {
  289. get_ue_golomb_long(gb); // bit_rate_value_minus1
  290. get_ue_golomb_long(gb); // cpb_size_value_minus1
  291. if (subpic_params_present) {
  292. get_ue_golomb_long(gb); // cpb_size_du_value_minus1
  293. get_ue_golomb_long(gb); // bit_rate_du_value_minus1
  294. }
  295. skip_bits1(gb); // cbr_flag
  296. }
  297. }
  298. static int decode_hrd(GetBitContext *gb, int common_inf_present,
  299. int max_sublayers)
  300. {
  301. int nal_params_present = 0, vcl_params_present = 0;
  302. int subpic_params_present = 0;
  303. int i;
  304. if (common_inf_present) {
  305. nal_params_present = get_bits1(gb);
  306. vcl_params_present = get_bits1(gb);
  307. if (nal_params_present || vcl_params_present) {
  308. subpic_params_present = get_bits1(gb);
  309. if (subpic_params_present) {
  310. skip_bits(gb, 8); // tick_divisor_minus2
  311. skip_bits(gb, 5); // du_cpb_removal_delay_increment_length_minus1
  312. skip_bits(gb, 1); // sub_pic_cpb_params_in_pic_timing_sei_flag
  313. skip_bits(gb, 5); // dpb_output_delay_du_length_minus1
  314. }
  315. skip_bits(gb, 4); // bit_rate_scale
  316. skip_bits(gb, 4); // cpb_size_scale
  317. if (subpic_params_present)
  318. skip_bits(gb, 4); // cpb_size_du_scale
  319. skip_bits(gb, 5); // initial_cpb_removal_delay_length_minus1
  320. skip_bits(gb, 5); // au_cpb_removal_delay_length_minus1
  321. skip_bits(gb, 5); // dpb_output_delay_length_minus1
  322. }
  323. }
  324. for (i = 0; i < max_sublayers; i++) {
  325. int low_delay = 0;
  326. unsigned int nb_cpb = 1;
  327. int fixed_rate = get_bits1(gb);
  328. if (!fixed_rate)
  329. fixed_rate = get_bits1(gb);
  330. if (fixed_rate)
  331. get_ue_golomb_long(gb); // elemental_duration_in_tc_minus1
  332. else
  333. low_delay = get_bits1(gb);
  334. if (!low_delay) {
  335. nb_cpb = get_ue_golomb_long(gb) + 1;
  336. if (nb_cpb < 1 || nb_cpb > 32) {
  337. av_log(NULL, AV_LOG_ERROR, "nb_cpb %d invalid\n", nb_cpb);
  338. return AVERROR_INVALIDDATA;
  339. }
  340. }
  341. if (nal_params_present)
  342. decode_sublayer_hrd(gb, nb_cpb, subpic_params_present);
  343. if (vcl_params_present)
  344. decode_sublayer_hrd(gb, nb_cpb, subpic_params_present);
  345. }
  346. return 0;
  347. }
  348. int ff_hevc_decode_nal_vps(GetBitContext *gb, AVCodecContext *avctx,
  349. HEVCParamSets *ps)
  350. {
  351. int i,j;
  352. int vps_id = 0;
  353. HEVCVPS *vps;
  354. AVBufferRef *vps_buf = av_buffer_allocz(sizeof(*vps));
  355. if (!vps_buf)
  356. return AVERROR(ENOMEM);
  357. vps = (HEVCVPS*)vps_buf->data;
  358. av_log(avctx, AV_LOG_DEBUG, "Decoding VPS\n");
  359. vps_id = get_bits(gb, 4);
  360. if (vps_id >= MAX_VPS_COUNT) {
  361. av_log(avctx, AV_LOG_ERROR, "VPS id out of range: %d\n", vps_id);
  362. goto err;
  363. }
  364. if (get_bits(gb, 2) != 3) { // vps_reserved_three_2bits
  365. av_log(avctx, AV_LOG_ERROR, "vps_reserved_three_2bits is not three\n");
  366. goto err;
  367. }
  368. vps->vps_max_layers = get_bits(gb, 6) + 1;
  369. vps->vps_max_sub_layers = get_bits(gb, 3) + 1;
  370. vps->vps_temporal_id_nesting_flag = get_bits1(gb);
  371. if (get_bits(gb, 16) != 0xffff) { // vps_reserved_ffff_16bits
  372. av_log(avctx, AV_LOG_ERROR, "vps_reserved_ffff_16bits is not 0xffff\n");
  373. goto err;
  374. }
  375. if (vps->vps_max_sub_layers > MAX_SUB_LAYERS) {
  376. av_log(avctx, AV_LOG_ERROR, "vps_max_sub_layers out of range: %d\n",
  377. vps->vps_max_sub_layers);
  378. goto err;
  379. }
  380. if (parse_ptl(gb, avctx, &vps->ptl, vps->vps_max_sub_layers) < 0)
  381. goto err;
  382. vps->vps_sub_layer_ordering_info_present_flag = get_bits1(gb);
  383. i = vps->vps_sub_layer_ordering_info_present_flag ? 0 : vps->vps_max_sub_layers - 1;
  384. for (; i < vps->vps_max_sub_layers; i++) {
  385. vps->vps_max_dec_pic_buffering[i] = get_ue_golomb_long(gb) + 1;
  386. vps->vps_num_reorder_pics[i] = get_ue_golomb_long(gb);
  387. vps->vps_max_latency_increase[i] = get_ue_golomb_long(gb) - 1;
  388. if (vps->vps_max_dec_pic_buffering[i] > MAX_DPB_SIZE || !vps->vps_max_dec_pic_buffering[i]) {
  389. av_log(avctx, AV_LOG_ERROR, "vps_max_dec_pic_buffering_minus1 out of range: %d\n",
  390. vps->vps_max_dec_pic_buffering[i] - 1);
  391. goto err;
  392. }
  393. if (vps->vps_num_reorder_pics[i] > vps->vps_max_dec_pic_buffering[i] - 1) {
  394. av_log(avctx, AV_LOG_WARNING, "vps_max_num_reorder_pics out of range: %d\n",
  395. vps->vps_num_reorder_pics[i]);
  396. if (avctx->err_recognition & AV_EF_EXPLODE)
  397. goto err;
  398. }
  399. }
  400. vps->vps_max_layer_id = get_bits(gb, 6);
  401. vps->vps_num_layer_sets = get_ue_golomb_long(gb) + 1;
  402. if (vps->vps_num_layer_sets < 1 || vps->vps_num_layer_sets > 1024 ||
  403. (vps->vps_num_layer_sets - 1LL) * (vps->vps_max_layer_id + 1LL) > get_bits_left(gb)) {
  404. av_log(avctx, AV_LOG_ERROR, "too many layer_id_included_flags\n");
  405. goto err;
  406. }
  407. for (i = 1; i < vps->vps_num_layer_sets; i++)
  408. for (j = 0; j <= vps->vps_max_layer_id; j++)
  409. skip_bits(gb, 1); // layer_id_included_flag[i][j]
  410. vps->vps_timing_info_present_flag = get_bits1(gb);
  411. if (vps->vps_timing_info_present_flag) {
  412. vps->vps_num_units_in_tick = get_bits_long(gb, 32);
  413. vps->vps_time_scale = get_bits_long(gb, 32);
  414. vps->vps_poc_proportional_to_timing_flag = get_bits1(gb);
  415. if (vps->vps_poc_proportional_to_timing_flag)
  416. vps->vps_num_ticks_poc_diff_one = get_ue_golomb_long(gb) + 1;
  417. vps->vps_num_hrd_parameters = get_ue_golomb_long(gb);
  418. if (vps->vps_num_hrd_parameters > (unsigned)vps->vps_num_layer_sets) {
  419. av_log(avctx, AV_LOG_ERROR,
  420. "vps_num_hrd_parameters %d is invalid\n", vps->vps_num_hrd_parameters);
  421. goto err;
  422. }
  423. for (i = 0; i < vps->vps_num_hrd_parameters; i++) {
  424. int common_inf_present = 1;
  425. get_ue_golomb_long(gb); // hrd_layer_set_idx
  426. if (i)
  427. common_inf_present = get_bits1(gb);
  428. decode_hrd(gb, common_inf_present, vps->vps_max_sub_layers);
  429. }
  430. }
  431. get_bits1(gb); /* vps_extension_flag */
  432. if (get_bits_left(gb) < 0) {
  433. av_log(avctx, AV_LOG_ERROR,
  434. "Overread VPS by %d bits\n", -get_bits_left(gb));
  435. if (ps->vps_list[vps_id])
  436. goto err;
  437. }
  438. if (ps->vps_list[vps_id] &&
  439. !memcmp(ps->vps_list[vps_id]->data, vps_buf->data, vps_buf->size)) {
  440. av_buffer_unref(&vps_buf);
  441. } else {
  442. remove_vps(ps, vps_id);
  443. ps->vps_list[vps_id] = vps_buf;
  444. }
  445. return 0;
  446. err:
  447. av_buffer_unref(&vps_buf);
  448. return AVERROR_INVALIDDATA;
  449. }
  450. static void decode_vui(GetBitContext *gb, AVCodecContext *avctx,
  451. int apply_defdispwin, HEVCSPS *sps)
  452. {
  453. VUI *vui = &sps->vui;
  454. GetBitContext backup;
  455. int sar_present, alt = 0;
  456. av_log(avctx, AV_LOG_DEBUG, "Decoding VUI\n");
  457. sar_present = get_bits1(gb);
  458. if (sar_present) {
  459. uint8_t sar_idx = get_bits(gb, 8);
  460. if (sar_idx < FF_ARRAY_ELEMS(vui_sar))
  461. vui->sar = vui_sar[sar_idx];
  462. else if (sar_idx == 255) {
  463. vui->sar.num = get_bits(gb, 16);
  464. vui->sar.den = get_bits(gb, 16);
  465. } else
  466. av_log(avctx, AV_LOG_WARNING,
  467. "Unknown SAR index: %u.\n", sar_idx);
  468. }
  469. vui->overscan_info_present_flag = get_bits1(gb);
  470. if (vui->overscan_info_present_flag)
  471. vui->overscan_appropriate_flag = get_bits1(gb);
  472. vui->video_signal_type_present_flag = get_bits1(gb);
  473. if (vui->video_signal_type_present_flag) {
  474. vui->video_format = get_bits(gb, 3);
  475. vui->video_full_range_flag = get_bits1(gb);
  476. vui->colour_description_present_flag = get_bits1(gb);
  477. if (vui->video_full_range_flag && sps->pix_fmt == AV_PIX_FMT_YUV420P)
  478. sps->pix_fmt = AV_PIX_FMT_YUVJ420P;
  479. if (vui->colour_description_present_flag) {
  480. vui->colour_primaries = get_bits(gb, 8);
  481. vui->transfer_characteristic = get_bits(gb, 8);
  482. vui->matrix_coeffs = get_bits(gb, 8);
  483. // Set invalid values to "unspecified"
  484. if (vui->colour_primaries >= AVCOL_PRI_NB)
  485. vui->colour_primaries = AVCOL_PRI_UNSPECIFIED;
  486. if (vui->transfer_characteristic >= AVCOL_TRC_NB)
  487. vui->transfer_characteristic = AVCOL_TRC_UNSPECIFIED;
  488. if (vui->matrix_coeffs >= AVCOL_SPC_NB)
  489. vui->matrix_coeffs = AVCOL_SPC_UNSPECIFIED;
  490. if (vui->matrix_coeffs == AVCOL_SPC_RGB) {
  491. switch (sps->pix_fmt) {
  492. case AV_PIX_FMT_YUV444P:
  493. sps->pix_fmt = AV_PIX_FMT_GBRP;
  494. break;
  495. case AV_PIX_FMT_YUV444P10:
  496. sps->pix_fmt = AV_PIX_FMT_GBRP10;
  497. break;
  498. case AV_PIX_FMT_YUV444P12:
  499. sps->pix_fmt = AV_PIX_FMT_GBRP12;
  500. break;
  501. }
  502. }
  503. }
  504. }
  505. vui->chroma_loc_info_present_flag = get_bits1(gb);
  506. if (vui->chroma_loc_info_present_flag) {
  507. vui->chroma_sample_loc_type_top_field = get_ue_golomb_long(gb);
  508. vui->chroma_sample_loc_type_bottom_field = get_ue_golomb_long(gb);
  509. }
  510. vui->neutra_chroma_indication_flag = get_bits1(gb);
  511. vui->field_seq_flag = get_bits1(gb);
  512. vui->frame_field_info_present_flag = get_bits1(gb);
  513. if (get_bits_left(gb) >= 68 && show_bits_long(gb, 21) == 0x100000) {
  514. vui->default_display_window_flag = 0;
  515. av_log(avctx, AV_LOG_WARNING, "Invalid default display window\n");
  516. } else
  517. vui->default_display_window_flag = get_bits1(gb);
  518. // Backup context in case an alternate header is detected
  519. memcpy(&backup, gb, sizeof(backup));
  520. if (vui->default_display_window_flag) {
  521. int vert_mult = 1 + (sps->chroma_format_idc < 2);
  522. int horiz_mult = 1 + (sps->chroma_format_idc < 3);
  523. vui->def_disp_win.left_offset = get_ue_golomb_long(gb) * horiz_mult;
  524. vui->def_disp_win.right_offset = get_ue_golomb_long(gb) * horiz_mult;
  525. vui->def_disp_win.top_offset = get_ue_golomb_long(gb) * vert_mult;
  526. vui->def_disp_win.bottom_offset = get_ue_golomb_long(gb) * vert_mult;
  527. if (apply_defdispwin &&
  528. avctx->flags2 & AV_CODEC_FLAG2_IGNORE_CROP) {
  529. av_log(avctx, AV_LOG_DEBUG,
  530. "discarding vui default display window, "
  531. "original values are l:%u r:%u t:%u b:%u\n",
  532. vui->def_disp_win.left_offset,
  533. vui->def_disp_win.right_offset,
  534. vui->def_disp_win.top_offset,
  535. vui->def_disp_win.bottom_offset);
  536. vui->def_disp_win.left_offset =
  537. vui->def_disp_win.right_offset =
  538. vui->def_disp_win.top_offset =
  539. vui->def_disp_win.bottom_offset = 0;
  540. }
  541. }
  542. vui->vui_timing_info_present_flag = get_bits1(gb);
  543. if (vui->vui_timing_info_present_flag) {
  544. if( get_bits_left(gb) < 66) {
  545. // The alternate syntax seem to have timing info located
  546. // at where def_disp_win is normally located
  547. av_log(avctx, AV_LOG_WARNING,
  548. "Strange VUI timing information, retrying...\n");
  549. vui->default_display_window_flag = 0;
  550. memset(&vui->def_disp_win, 0, sizeof(vui->def_disp_win));
  551. memcpy(gb, &backup, sizeof(backup));
  552. alt = 1;
  553. }
  554. vui->vui_num_units_in_tick = get_bits_long(gb, 32);
  555. vui->vui_time_scale = get_bits_long(gb, 32);
  556. if (alt) {
  557. av_log(avctx, AV_LOG_INFO, "Retry got %i/%i fps\n",
  558. vui->vui_time_scale, vui->vui_num_units_in_tick);
  559. }
  560. vui->vui_poc_proportional_to_timing_flag = get_bits1(gb);
  561. if (vui->vui_poc_proportional_to_timing_flag)
  562. vui->vui_num_ticks_poc_diff_one_minus1 = get_ue_golomb_long(gb);
  563. vui->vui_hrd_parameters_present_flag = get_bits1(gb);
  564. if (vui->vui_hrd_parameters_present_flag)
  565. decode_hrd(gb, 1, sps->max_sub_layers);
  566. }
  567. vui->bitstream_restriction_flag = get_bits1(gb);
  568. if (vui->bitstream_restriction_flag) {
  569. vui->tiles_fixed_structure_flag = get_bits1(gb);
  570. vui->motion_vectors_over_pic_boundaries_flag = get_bits1(gb);
  571. vui->restricted_ref_pic_lists_flag = get_bits1(gb);
  572. vui->min_spatial_segmentation_idc = get_ue_golomb_long(gb);
  573. vui->max_bytes_per_pic_denom = get_ue_golomb_long(gb);
  574. vui->max_bits_per_min_cu_denom = get_ue_golomb_long(gb);
  575. vui->log2_max_mv_length_horizontal = get_ue_golomb_long(gb);
  576. vui->log2_max_mv_length_vertical = get_ue_golomb_long(gb);
  577. }
  578. }
  579. static void set_default_scaling_list_data(ScalingList *sl)
  580. {
  581. int matrixId;
  582. for (matrixId = 0; matrixId < 6; matrixId++) {
  583. // 4x4 default is 16
  584. memset(sl->sl[0][matrixId], 16, 16);
  585. sl->sl_dc[0][matrixId] = 16; // default for 16x16
  586. sl->sl_dc[1][matrixId] = 16; // default for 32x32
  587. }
  588. memcpy(sl->sl[1][0], default_scaling_list_intra, 64);
  589. memcpy(sl->sl[1][1], default_scaling_list_intra, 64);
  590. memcpy(sl->sl[1][2], default_scaling_list_intra, 64);
  591. memcpy(sl->sl[1][3], default_scaling_list_inter, 64);
  592. memcpy(sl->sl[1][4], default_scaling_list_inter, 64);
  593. memcpy(sl->sl[1][5], default_scaling_list_inter, 64);
  594. memcpy(sl->sl[2][0], default_scaling_list_intra, 64);
  595. memcpy(sl->sl[2][1], default_scaling_list_intra, 64);
  596. memcpy(sl->sl[2][2], default_scaling_list_intra, 64);
  597. memcpy(sl->sl[2][3], default_scaling_list_inter, 64);
  598. memcpy(sl->sl[2][4], default_scaling_list_inter, 64);
  599. memcpy(sl->sl[2][5], default_scaling_list_inter, 64);
  600. memcpy(sl->sl[3][0], default_scaling_list_intra, 64);
  601. memcpy(sl->sl[3][1], default_scaling_list_intra, 64);
  602. memcpy(sl->sl[3][2], default_scaling_list_intra, 64);
  603. memcpy(sl->sl[3][3], default_scaling_list_inter, 64);
  604. memcpy(sl->sl[3][4], default_scaling_list_inter, 64);
  605. memcpy(sl->sl[3][5], default_scaling_list_inter, 64);
  606. }
  607. static int scaling_list_data(GetBitContext *gb, AVCodecContext *avctx, ScalingList *sl, HEVCSPS *sps)
  608. {
  609. uint8_t scaling_list_pred_mode_flag;
  610. int32_t scaling_list_dc_coef[2][6];
  611. int size_id, matrix_id, pos;
  612. int i;
  613. for (size_id = 0; size_id < 4; size_id++)
  614. for (matrix_id = 0; matrix_id < 6; matrix_id += ((size_id == 3) ? 3 : 1)) {
  615. scaling_list_pred_mode_flag = get_bits1(gb);
  616. if (!scaling_list_pred_mode_flag) {
  617. unsigned int delta = get_ue_golomb_long(gb);
  618. /* Only need to handle non-zero delta. Zero means default,
  619. * which should already be in the arrays. */
  620. if (delta) {
  621. // Copy from previous array.
  622. if (matrix_id < delta) {
  623. av_log(avctx, AV_LOG_ERROR,
  624. "Invalid delta in scaling list data: %d.\n", delta);
  625. return AVERROR_INVALIDDATA;
  626. }
  627. memcpy(sl->sl[size_id][matrix_id],
  628. sl->sl[size_id][matrix_id - delta],
  629. size_id > 0 ? 64 : 16);
  630. if (size_id > 1)
  631. sl->sl_dc[size_id - 2][matrix_id] = sl->sl_dc[size_id - 2][matrix_id - delta];
  632. }
  633. } else {
  634. int next_coef, coef_num;
  635. int32_t scaling_list_delta_coef;
  636. next_coef = 8;
  637. coef_num = FFMIN(64, 1 << (4 + (size_id << 1)));
  638. if (size_id > 1) {
  639. scaling_list_dc_coef[size_id - 2][matrix_id] = get_se_golomb(gb) + 8;
  640. next_coef = scaling_list_dc_coef[size_id - 2][matrix_id];
  641. sl->sl_dc[size_id - 2][matrix_id] = next_coef;
  642. }
  643. for (i = 0; i < coef_num; i++) {
  644. if (size_id == 0)
  645. pos = 4 * ff_hevc_diag_scan4x4_y[i] +
  646. ff_hevc_diag_scan4x4_x[i];
  647. else
  648. pos = 8 * ff_hevc_diag_scan8x8_y[i] +
  649. ff_hevc_diag_scan8x8_x[i];
  650. scaling_list_delta_coef = get_se_golomb(gb);
  651. next_coef = (next_coef + scaling_list_delta_coef + 256) % 256;
  652. sl->sl[size_id][matrix_id][pos] = next_coef;
  653. }
  654. }
  655. }
  656. if (sps->chroma_format_idc == 3) {
  657. for (i = 0; i < 64; i++) {
  658. sl->sl[3][1][i] = sl->sl[2][1][i];
  659. sl->sl[3][2][i] = sl->sl[2][2][i];
  660. sl->sl[3][4][i] = sl->sl[2][4][i];
  661. sl->sl[3][5][i] = sl->sl[2][5][i];
  662. }
  663. sl->sl_dc[1][1] = sl->sl_dc[0][1];
  664. sl->sl_dc[1][2] = sl->sl_dc[0][2];
  665. sl->sl_dc[1][4] = sl->sl_dc[0][4];
  666. sl->sl_dc[1][5] = sl->sl_dc[0][5];
  667. }
  668. return 0;
  669. }
  670. static int map_pixel_format(AVCodecContext *avctx, HEVCSPS *sps)
  671. {
  672. const AVPixFmtDescriptor *desc;
  673. switch (sps->bit_depth) {
  674. case 8:
  675. if (sps->chroma_format_idc == 0) sps->pix_fmt = AV_PIX_FMT_GRAY8;
  676. if (sps->chroma_format_idc == 1) sps->pix_fmt = AV_PIX_FMT_YUV420P;
  677. if (sps->chroma_format_idc == 2) sps->pix_fmt = AV_PIX_FMT_YUV422P;
  678. if (sps->chroma_format_idc == 3) sps->pix_fmt = AV_PIX_FMT_YUV444P;
  679. break;
  680. case 9:
  681. if (sps->chroma_format_idc == 0) sps->pix_fmt = AV_PIX_FMT_GRAY16;
  682. if (sps->chroma_format_idc == 1) sps->pix_fmt = AV_PIX_FMT_YUV420P9;
  683. if (sps->chroma_format_idc == 2) sps->pix_fmt = AV_PIX_FMT_YUV422P9;
  684. if (sps->chroma_format_idc == 3) sps->pix_fmt = AV_PIX_FMT_YUV444P9;
  685. break;
  686. case 10:
  687. if (sps->chroma_format_idc == 0) sps->pix_fmt = AV_PIX_FMT_GRAY16;
  688. if (sps->chroma_format_idc == 1) sps->pix_fmt = AV_PIX_FMT_YUV420P10;
  689. if (sps->chroma_format_idc == 2) sps->pix_fmt = AV_PIX_FMT_YUV422P10;
  690. if (sps->chroma_format_idc == 3) sps->pix_fmt = AV_PIX_FMT_YUV444P10;
  691. break;
  692. case 12:
  693. if (sps->chroma_format_idc == 0) sps->pix_fmt = AV_PIX_FMT_GRAY16;
  694. if (sps->chroma_format_idc == 1) sps->pix_fmt = AV_PIX_FMT_YUV420P12;
  695. if (sps->chroma_format_idc == 2) sps->pix_fmt = AV_PIX_FMT_YUV422P12;
  696. if (sps->chroma_format_idc == 3) sps->pix_fmt = AV_PIX_FMT_YUV444P12;
  697. break;
  698. default:
  699. av_log(avctx, AV_LOG_ERROR,
  700. "4:2:0, 4:2:2, 4:4:4 supports are currently specified for 8, 10 and 12 bits.\n");
  701. av_log(avctx, AV_LOG_ERROR,
  702. "chroma_format_idc is %d, depth is %d",
  703. sps->chroma_format_idc, sps->bit_depth);
  704. return AVERROR_INVALIDDATA;
  705. }
  706. desc = av_pix_fmt_desc_get(sps->pix_fmt);
  707. if (!desc)
  708. return AVERROR(EINVAL);
  709. sps->hshift[0] = sps->vshift[0] = 0;
  710. sps->hshift[2] = sps->hshift[1] = desc->log2_chroma_w;
  711. sps->vshift[2] = sps->vshift[1] = desc->log2_chroma_h;
  712. sps->pixel_shift = sps->bit_depth > 8;
  713. return 0;
  714. }
  715. int ff_hevc_parse_sps(HEVCSPS *sps, GetBitContext *gb, unsigned int *sps_id,
  716. int apply_defdispwin, AVBufferRef **vps_list, AVCodecContext *avctx)
  717. {
  718. int ret = 0;
  719. int log2_diff_max_min_transform_block_size;
  720. int bit_depth_chroma, start, vui_present, sublayer_ordering_info;
  721. int i;
  722. // Coded parameters
  723. sps->vps_id = get_bits(gb, 4);
  724. if (sps->vps_id >= MAX_VPS_COUNT) {
  725. av_log(avctx, AV_LOG_ERROR, "VPS id out of range: %d\n", sps->vps_id);
  726. return AVERROR_INVALIDDATA;
  727. }
  728. if (vps_list && !vps_list[sps->vps_id]) {
  729. av_log(avctx, AV_LOG_ERROR, "VPS %d does not exist\n",
  730. sps->vps_id);
  731. return AVERROR_INVALIDDATA;
  732. }
  733. sps->max_sub_layers = get_bits(gb, 3) + 1;
  734. if (sps->max_sub_layers > MAX_SUB_LAYERS) {
  735. av_log(avctx, AV_LOG_ERROR, "sps_max_sub_layers out of range: %d\n",
  736. sps->max_sub_layers);
  737. return AVERROR_INVALIDDATA;
  738. }
  739. skip_bits1(gb); // temporal_id_nesting_flag
  740. if ((ret = parse_ptl(gb, avctx, &sps->ptl, sps->max_sub_layers)) < 0)
  741. return ret;
  742. *sps_id = get_ue_golomb_long(gb);
  743. if (*sps_id >= MAX_SPS_COUNT) {
  744. av_log(avctx, AV_LOG_ERROR, "SPS id out of range: %d\n", *sps_id);
  745. return AVERROR_INVALIDDATA;
  746. }
  747. sps->chroma_format_idc = get_ue_golomb_long(gb);
  748. if (sps->chroma_format_idc > 3U) {
  749. av_log(avctx, AV_LOG_ERROR, "chroma_format_idc %d is invalid\n", sps->chroma_format_idc);
  750. return AVERROR_INVALIDDATA;
  751. }
  752. if (sps->chroma_format_idc == 3)
  753. sps->separate_colour_plane_flag = get_bits1(gb);
  754. if (sps->separate_colour_plane_flag)
  755. sps->chroma_format_idc = 0;
  756. sps->width = get_ue_golomb_long(gb);
  757. sps->height = get_ue_golomb_long(gb);
  758. if ((ret = av_image_check_size(sps->width,
  759. sps->height, 0, avctx)) < 0)
  760. return ret;
  761. if (get_bits1(gb)) { // pic_conformance_flag
  762. int vert_mult = 1 + (sps->chroma_format_idc < 2);
  763. int horiz_mult = 1 + (sps->chroma_format_idc < 3);
  764. sps->pic_conf_win.left_offset = get_ue_golomb_long(gb) * horiz_mult;
  765. sps->pic_conf_win.right_offset = get_ue_golomb_long(gb) * horiz_mult;
  766. sps->pic_conf_win.top_offset = get_ue_golomb_long(gb) * vert_mult;
  767. sps->pic_conf_win.bottom_offset = get_ue_golomb_long(gb) * vert_mult;
  768. if (avctx->flags2 & AV_CODEC_FLAG2_IGNORE_CROP) {
  769. av_log(avctx, AV_LOG_DEBUG,
  770. "discarding sps conformance window, "
  771. "original values are l:%u r:%u t:%u b:%u\n",
  772. sps->pic_conf_win.left_offset,
  773. sps->pic_conf_win.right_offset,
  774. sps->pic_conf_win.top_offset,
  775. sps->pic_conf_win.bottom_offset);
  776. sps->pic_conf_win.left_offset =
  777. sps->pic_conf_win.right_offset =
  778. sps->pic_conf_win.top_offset =
  779. sps->pic_conf_win.bottom_offset = 0;
  780. }
  781. sps->output_window = sps->pic_conf_win;
  782. }
  783. sps->bit_depth = get_ue_golomb_long(gb) + 8;
  784. bit_depth_chroma = get_ue_golomb_long(gb) + 8;
  785. if (sps->chroma_format_idc && bit_depth_chroma != sps->bit_depth) {
  786. av_log(avctx, AV_LOG_ERROR,
  787. "Luma bit depth (%d) is different from chroma bit depth (%d), "
  788. "this is unsupported.\n",
  789. sps->bit_depth, bit_depth_chroma);
  790. return AVERROR_INVALIDDATA;
  791. }
  792. ret = map_pixel_format(avctx, sps);
  793. if (ret < 0)
  794. return ret;
  795. sps->log2_max_poc_lsb = get_ue_golomb_long(gb) + 4;
  796. if (sps->log2_max_poc_lsb > 16) {
  797. av_log(avctx, AV_LOG_ERROR, "log2_max_pic_order_cnt_lsb_minus4 out range: %d\n",
  798. sps->log2_max_poc_lsb - 4);
  799. return AVERROR_INVALIDDATA;
  800. }
  801. sublayer_ordering_info = get_bits1(gb);
  802. start = sublayer_ordering_info ? 0 : sps->max_sub_layers - 1;
  803. for (i = start; i < sps->max_sub_layers; i++) {
  804. sps->temporal_layer[i].max_dec_pic_buffering = get_ue_golomb_long(gb) + 1;
  805. sps->temporal_layer[i].num_reorder_pics = get_ue_golomb_long(gb);
  806. sps->temporal_layer[i].max_latency_increase = get_ue_golomb_long(gb) - 1;
  807. if (sps->temporal_layer[i].max_dec_pic_buffering > MAX_DPB_SIZE) {
  808. av_log(avctx, AV_LOG_ERROR, "sps_max_dec_pic_buffering_minus1 out of range: %d\n",
  809. sps->temporal_layer[i].max_dec_pic_buffering - 1);
  810. return AVERROR_INVALIDDATA;
  811. }
  812. if (sps->temporal_layer[i].num_reorder_pics > sps->temporal_layer[i].max_dec_pic_buffering - 1) {
  813. av_log(avctx, AV_LOG_WARNING, "sps_max_num_reorder_pics out of range: %d\n",
  814. sps->temporal_layer[i].num_reorder_pics);
  815. if (avctx->err_recognition & AV_EF_EXPLODE ||
  816. sps->temporal_layer[i].num_reorder_pics > MAX_DPB_SIZE - 1) {
  817. return AVERROR_INVALIDDATA;
  818. }
  819. sps->temporal_layer[i].max_dec_pic_buffering = sps->temporal_layer[i].num_reorder_pics + 1;
  820. }
  821. }
  822. if (!sublayer_ordering_info) {
  823. for (i = 0; i < start; i++) {
  824. sps->temporal_layer[i].max_dec_pic_buffering = sps->temporal_layer[start].max_dec_pic_buffering;
  825. sps->temporal_layer[i].num_reorder_pics = sps->temporal_layer[start].num_reorder_pics;
  826. sps->temporal_layer[i].max_latency_increase = sps->temporal_layer[start].max_latency_increase;
  827. }
  828. }
  829. sps->log2_min_cb_size = get_ue_golomb_long(gb) + 3;
  830. sps->log2_diff_max_min_coding_block_size = get_ue_golomb_long(gb);
  831. sps->log2_min_tb_size = get_ue_golomb_long(gb) + 2;
  832. log2_diff_max_min_transform_block_size = get_ue_golomb_long(gb);
  833. sps->log2_max_trafo_size = log2_diff_max_min_transform_block_size +
  834. sps->log2_min_tb_size;
  835. if (sps->log2_min_cb_size < 3 || sps->log2_min_cb_size > 30) {
  836. av_log(avctx, AV_LOG_ERROR, "Invalid value %d for log2_min_cb_size", sps->log2_min_cb_size);
  837. return AVERROR_INVALIDDATA;
  838. }
  839. if (sps->log2_diff_max_min_coding_block_size > 30) {
  840. av_log(avctx, AV_LOG_ERROR, "Invalid value %d for log2_diff_max_min_coding_block_size", sps->log2_diff_max_min_coding_block_size);
  841. return AVERROR_INVALIDDATA;
  842. }
  843. if (sps->log2_min_tb_size >= sps->log2_min_cb_size || sps->log2_min_tb_size < 2) {
  844. av_log(avctx, AV_LOG_ERROR, "Invalid value for log2_min_tb_size");
  845. return AVERROR_INVALIDDATA;
  846. }
  847. if (log2_diff_max_min_transform_block_size < 0 || log2_diff_max_min_transform_block_size > 30) {
  848. av_log(avctx, AV_LOG_ERROR, "Invalid value %d for log2_diff_max_min_transform_block_size", log2_diff_max_min_transform_block_size);
  849. return AVERROR_INVALIDDATA;
  850. }
  851. sps->max_transform_hierarchy_depth_inter = get_ue_golomb_long(gb);
  852. sps->max_transform_hierarchy_depth_intra = get_ue_golomb_long(gb);
  853. sps->scaling_list_enable_flag = get_bits1(gb);
  854. if (sps->scaling_list_enable_flag) {
  855. set_default_scaling_list_data(&sps->scaling_list);
  856. if (get_bits1(gb)) {
  857. ret = scaling_list_data(gb, avctx, &sps->scaling_list, sps);
  858. if (ret < 0)
  859. return ret;
  860. }
  861. }
  862. sps->amp_enabled_flag = get_bits1(gb);
  863. sps->sao_enabled = get_bits1(gb);
  864. sps->pcm_enabled_flag = get_bits1(gb);
  865. if (sps->pcm_enabled_flag) {
  866. sps->pcm.bit_depth = get_bits(gb, 4) + 1;
  867. sps->pcm.bit_depth_chroma = get_bits(gb, 4) + 1;
  868. sps->pcm.log2_min_pcm_cb_size = get_ue_golomb_long(gb) + 3;
  869. sps->pcm.log2_max_pcm_cb_size = sps->pcm.log2_min_pcm_cb_size +
  870. get_ue_golomb_long(gb);
  871. if (sps->pcm.bit_depth > sps->bit_depth) {
  872. av_log(avctx, AV_LOG_ERROR,
  873. "PCM bit depth (%d) is greater than normal bit depth (%d)\n",
  874. sps->pcm.bit_depth, sps->bit_depth);
  875. return AVERROR_INVALIDDATA;
  876. }
  877. sps->pcm.loop_filter_disable_flag = get_bits1(gb);
  878. }
  879. sps->nb_st_rps = get_ue_golomb_long(gb);
  880. if (sps->nb_st_rps > MAX_SHORT_TERM_RPS_COUNT) {
  881. av_log(avctx, AV_LOG_ERROR, "Too many short term RPS: %d.\n",
  882. sps->nb_st_rps);
  883. return AVERROR_INVALIDDATA;
  884. }
  885. for (i = 0; i < sps->nb_st_rps; i++) {
  886. if ((ret = ff_hevc_decode_short_term_rps(gb, avctx, &sps->st_rps[i],
  887. sps, 0)) < 0)
  888. return ret;
  889. }
  890. sps->long_term_ref_pics_present_flag = get_bits1(gb);
  891. if (sps->long_term_ref_pics_present_flag) {
  892. sps->num_long_term_ref_pics_sps = get_ue_golomb_long(gb);
  893. if (sps->num_long_term_ref_pics_sps > 31U) {
  894. av_log(avctx, AV_LOG_ERROR, "num_long_term_ref_pics_sps %d is out of range.\n",
  895. sps->num_long_term_ref_pics_sps);
  896. return AVERROR_INVALIDDATA;
  897. }
  898. for (i = 0; i < sps->num_long_term_ref_pics_sps; i++) {
  899. sps->lt_ref_pic_poc_lsb_sps[i] = get_bits(gb, sps->log2_max_poc_lsb);
  900. sps->used_by_curr_pic_lt_sps_flag[i] = get_bits1(gb);
  901. }
  902. }
  903. sps->sps_temporal_mvp_enabled_flag = get_bits1(gb);
  904. sps->sps_strong_intra_smoothing_enable_flag = get_bits1(gb);
  905. sps->vui.sar = (AVRational){0, 1};
  906. vui_present = get_bits1(gb);
  907. if (vui_present)
  908. decode_vui(gb, avctx, apply_defdispwin, sps);
  909. if (get_bits1(gb)) { // sps_extension_flag
  910. int sps_extension_flag[1];
  911. for (i = 0; i < 1; i++)
  912. sps_extension_flag[i] = get_bits1(gb);
  913. skip_bits(gb, 7); //sps_extension_7bits = get_bits(gb, 7);
  914. if (sps_extension_flag[0]) {
  915. int extended_precision_processing_flag;
  916. int high_precision_offsets_enabled_flag;
  917. int cabac_bypass_alignment_enabled_flag;
  918. sps->transform_skip_rotation_enabled_flag = get_bits1(gb);
  919. sps->transform_skip_context_enabled_flag = get_bits1(gb);
  920. sps->implicit_rdpcm_enabled_flag = get_bits1(gb);
  921. sps->explicit_rdpcm_enabled_flag = get_bits1(gb);
  922. extended_precision_processing_flag = get_bits1(gb);
  923. if (extended_precision_processing_flag)
  924. av_log(avctx, AV_LOG_WARNING,
  925. "extended_precision_processing_flag not yet implemented\n");
  926. sps->intra_smoothing_disabled_flag = get_bits1(gb);
  927. high_precision_offsets_enabled_flag = get_bits1(gb);
  928. if (high_precision_offsets_enabled_flag)
  929. av_log(avctx, AV_LOG_WARNING,
  930. "high_precision_offsets_enabled_flag not yet implemented\n");
  931. sps->persistent_rice_adaptation_enabled_flag = get_bits1(gb);
  932. cabac_bypass_alignment_enabled_flag = get_bits1(gb);
  933. if (cabac_bypass_alignment_enabled_flag)
  934. av_log(avctx, AV_LOG_WARNING,
  935. "cabac_bypass_alignment_enabled_flag not yet implemented\n");
  936. }
  937. }
  938. if (apply_defdispwin) {
  939. sps->output_window.left_offset += sps->vui.def_disp_win.left_offset;
  940. sps->output_window.right_offset += sps->vui.def_disp_win.right_offset;
  941. sps->output_window.top_offset += sps->vui.def_disp_win.top_offset;
  942. sps->output_window.bottom_offset += sps->vui.def_disp_win.bottom_offset;
  943. }
  944. if (sps->output_window.left_offset & (0x1F >> (sps->pixel_shift)) &&
  945. !(avctx->flags & AV_CODEC_FLAG_UNALIGNED)) {
  946. sps->output_window.left_offset &= ~(0x1F >> (sps->pixel_shift));
  947. av_log(avctx, AV_LOG_WARNING, "Reducing left output window to %d "
  948. "chroma samples to preserve alignment.\n",
  949. sps->output_window.left_offset);
  950. }
  951. sps->output_width = sps->width -
  952. (sps->output_window.left_offset + sps->output_window.right_offset);
  953. sps->output_height = sps->height -
  954. (sps->output_window.top_offset + sps->output_window.bottom_offset);
  955. if (sps->width <= sps->output_window.left_offset + (int64_t)sps->output_window.right_offset ||
  956. sps->height <= sps->output_window.top_offset + (int64_t)sps->output_window.bottom_offset) {
  957. av_log(avctx, AV_LOG_WARNING, "Invalid visible frame dimensions: %dx%d.\n",
  958. sps->output_width, sps->output_height);
  959. if (avctx->err_recognition & AV_EF_EXPLODE) {
  960. return AVERROR_INVALIDDATA;
  961. }
  962. av_log(avctx, AV_LOG_WARNING,
  963. "Displaying the whole video surface.\n");
  964. memset(&sps->pic_conf_win, 0, sizeof(sps->pic_conf_win));
  965. memset(&sps->output_window, 0, sizeof(sps->output_window));
  966. sps->output_width = sps->width;
  967. sps->output_height = sps->height;
  968. }
  969. // Inferred parameters
  970. sps->log2_ctb_size = sps->log2_min_cb_size +
  971. sps->log2_diff_max_min_coding_block_size;
  972. sps->log2_min_pu_size = sps->log2_min_cb_size - 1;
  973. if (sps->log2_ctb_size > MAX_LOG2_CTB_SIZE) {
  974. av_log(avctx, AV_LOG_ERROR, "CTB size out of range: 2^%d\n", sps->log2_ctb_size);
  975. return AVERROR_INVALIDDATA;
  976. }
  977. if (sps->log2_ctb_size < 4) {
  978. av_log(avctx,
  979. AV_LOG_ERROR,
  980. "log2_ctb_size %d differs from the bounds of any known profile\n",
  981. sps->log2_ctb_size);
  982. avpriv_request_sample(avctx, "log2_ctb_size %d", sps->log2_ctb_size);
  983. return AVERROR_INVALIDDATA;
  984. }
  985. sps->ctb_width = (sps->width + (1 << sps->log2_ctb_size) - 1) >> sps->log2_ctb_size;
  986. sps->ctb_height = (sps->height + (1 << sps->log2_ctb_size) - 1) >> sps->log2_ctb_size;
  987. sps->ctb_size = sps->ctb_width * sps->ctb_height;
  988. sps->min_cb_width = sps->width >> sps->log2_min_cb_size;
  989. sps->min_cb_height = sps->height >> sps->log2_min_cb_size;
  990. sps->min_tb_width = sps->width >> sps->log2_min_tb_size;
  991. sps->min_tb_height = sps->height >> sps->log2_min_tb_size;
  992. sps->min_pu_width = sps->width >> sps->log2_min_pu_size;
  993. sps->min_pu_height = sps->height >> sps->log2_min_pu_size;
  994. sps->tb_mask = (1 << (sps->log2_ctb_size - sps->log2_min_tb_size)) - 1;
  995. sps->qp_bd_offset = 6 * (sps->bit_depth - 8);
  996. if (av_mod_uintp2(sps->width, sps->log2_min_cb_size) ||
  997. av_mod_uintp2(sps->height, sps->log2_min_cb_size)) {
  998. av_log(avctx, AV_LOG_ERROR, "Invalid coded frame dimensions.\n");
  999. return AVERROR_INVALIDDATA;
  1000. }
  1001. if (sps->max_transform_hierarchy_depth_inter > sps->log2_ctb_size - sps->log2_min_tb_size) {
  1002. av_log(avctx, AV_LOG_ERROR, "max_transform_hierarchy_depth_inter out of range: %d\n",
  1003. sps->max_transform_hierarchy_depth_inter);
  1004. return AVERROR_INVALIDDATA;
  1005. }
  1006. if (sps->max_transform_hierarchy_depth_intra > sps->log2_ctb_size - sps->log2_min_tb_size) {
  1007. av_log(avctx, AV_LOG_ERROR, "max_transform_hierarchy_depth_intra out of range: %d\n",
  1008. sps->max_transform_hierarchy_depth_intra);
  1009. return AVERROR_INVALIDDATA;
  1010. }
  1011. if (sps->log2_max_trafo_size > FFMIN(sps->log2_ctb_size, 5)) {
  1012. av_log(avctx, AV_LOG_ERROR,
  1013. "max transform block size out of range: %d\n",
  1014. sps->log2_max_trafo_size);
  1015. return AVERROR_INVALIDDATA;
  1016. }
  1017. if (get_bits_left(gb) < 0) {
  1018. av_log(avctx, AV_LOG_ERROR,
  1019. "Overread SPS by %d bits\n", -get_bits_left(gb));
  1020. return AVERROR_INVALIDDATA;
  1021. }
  1022. return 0;
  1023. }
  1024. int ff_hevc_decode_nal_sps(GetBitContext *gb, AVCodecContext *avctx,
  1025. HEVCParamSets *ps, int apply_defdispwin)
  1026. {
  1027. HEVCSPS *sps;
  1028. AVBufferRef *sps_buf = av_buffer_allocz(sizeof(*sps));
  1029. unsigned int sps_id;
  1030. int ret;
  1031. if (!sps_buf)
  1032. return AVERROR(ENOMEM);
  1033. sps = (HEVCSPS*)sps_buf->data;
  1034. av_log(avctx, AV_LOG_DEBUG, "Decoding SPS\n");
  1035. ret = ff_hevc_parse_sps(sps, gb, &sps_id,
  1036. apply_defdispwin,
  1037. ps->vps_list, avctx);
  1038. if (ret < 0) {
  1039. av_buffer_unref(&sps_buf);
  1040. return ret;
  1041. }
  1042. if (avctx->debug & FF_DEBUG_BITSTREAM) {
  1043. av_log(avctx, AV_LOG_DEBUG,
  1044. "Parsed SPS: id %d; coded wxh: %dx%d; "
  1045. "cropped wxh: %dx%d; pix_fmt: %s.\n",
  1046. sps_id, sps->width, sps->height,
  1047. sps->output_width, sps->output_height,
  1048. av_get_pix_fmt_name(sps->pix_fmt));
  1049. }
  1050. /* check if this is a repeat of an already parsed SPS, then keep the
  1051. * original one.
  1052. * otherwise drop all PPSes that depend on it */
  1053. if (ps->sps_list[sps_id] &&
  1054. !memcmp(ps->sps_list[sps_id]->data, sps_buf->data, sps_buf->size)) {
  1055. av_buffer_unref(&sps_buf);
  1056. } else {
  1057. remove_sps(ps, sps_id);
  1058. ps->sps_list[sps_id] = sps_buf;
  1059. }
  1060. return 0;
  1061. }
  1062. static void hevc_pps_free(void *opaque, uint8_t *data)
  1063. {
  1064. HEVCPPS *pps = (HEVCPPS*)data;
  1065. av_freep(&pps->column_width);
  1066. av_freep(&pps->row_height);
  1067. av_freep(&pps->col_bd);
  1068. av_freep(&pps->row_bd);
  1069. av_freep(&pps->col_idxX);
  1070. av_freep(&pps->ctb_addr_rs_to_ts);
  1071. av_freep(&pps->ctb_addr_ts_to_rs);
  1072. av_freep(&pps->tile_pos_rs);
  1073. av_freep(&pps->tile_id);
  1074. av_freep(&pps->min_tb_addr_zs_tab);
  1075. av_freep(&pps);
  1076. }
  1077. static int pps_range_extensions(GetBitContext *gb, AVCodecContext *avctx,
  1078. HEVCPPS *pps, HEVCSPS *sps) {
  1079. int i;
  1080. if (pps->transform_skip_enabled_flag) {
  1081. pps->log2_max_transform_skip_block_size = get_ue_golomb_long(gb) + 2;
  1082. }
  1083. pps->cross_component_prediction_enabled_flag = get_bits1(gb);
  1084. pps->chroma_qp_offset_list_enabled_flag = get_bits1(gb);
  1085. if (pps->chroma_qp_offset_list_enabled_flag) {
  1086. pps->diff_cu_chroma_qp_offset_depth = get_ue_golomb_long(gb);
  1087. pps->chroma_qp_offset_list_len_minus1 = get_ue_golomb_long(gb);
  1088. if (pps->chroma_qp_offset_list_len_minus1 && pps->chroma_qp_offset_list_len_minus1 >= 5) {
  1089. av_log(avctx, AV_LOG_ERROR,
  1090. "chroma_qp_offset_list_len_minus1 shall be in the range [0, 5].\n");
  1091. return AVERROR_INVALIDDATA;
  1092. }
  1093. for (i = 0; i <= pps->chroma_qp_offset_list_len_minus1; i++) {
  1094. pps->cb_qp_offset_list[i] = get_se_golomb_long(gb);
  1095. if (pps->cb_qp_offset_list[i]) {
  1096. av_log(avctx, AV_LOG_WARNING,
  1097. "cb_qp_offset_list not tested yet.\n");
  1098. }
  1099. pps->cr_qp_offset_list[i] = get_se_golomb_long(gb);
  1100. if (pps->cr_qp_offset_list[i]) {
  1101. av_log(avctx, AV_LOG_WARNING,
  1102. "cb_qp_offset_list not tested yet.\n");
  1103. }
  1104. }
  1105. }
  1106. pps->log2_sao_offset_scale_luma = get_ue_golomb_long(gb);
  1107. pps->log2_sao_offset_scale_chroma = get_ue_golomb_long(gb);
  1108. return(0);
  1109. }
  1110. static inline int setup_pps(AVCodecContext *avctx, GetBitContext *gb,
  1111. HEVCPPS *pps, HEVCSPS *sps)
  1112. {
  1113. int log2_diff;
  1114. int pic_area_in_ctbs;
  1115. int i, j, x, y, ctb_addr_rs, tile_id;
  1116. // Inferred parameters
  1117. pps->col_bd = av_malloc_array(pps->num_tile_columns + 1, sizeof(*pps->col_bd));
  1118. pps->row_bd = av_malloc_array(pps->num_tile_rows + 1, sizeof(*pps->row_bd));
  1119. pps->col_idxX = av_malloc_array(sps->ctb_width, sizeof(*pps->col_idxX));
  1120. if (!pps->col_bd || !pps->row_bd || !pps->col_idxX)
  1121. return AVERROR(ENOMEM);
  1122. if (pps->uniform_spacing_flag) {
  1123. if (!pps->column_width) {
  1124. pps->column_width = av_malloc_array(pps->num_tile_columns, sizeof(*pps->column_width));
  1125. pps->row_height = av_malloc_array(pps->num_tile_rows, sizeof(*pps->row_height));
  1126. }
  1127. if (!pps->column_width || !pps->row_height)
  1128. return AVERROR(ENOMEM);
  1129. for (i = 0; i < pps->num_tile_columns; i++) {
  1130. pps->column_width[i] = ((i + 1) * sps->ctb_width) / pps->num_tile_columns -
  1131. (i * sps->ctb_width) / pps->num_tile_columns;
  1132. }
  1133. for (i = 0; i < pps->num_tile_rows; i++) {
  1134. pps->row_height[i] = ((i + 1) * sps->ctb_height) / pps->num_tile_rows -
  1135. (i * sps->ctb_height) / pps->num_tile_rows;
  1136. }
  1137. }
  1138. pps->col_bd[0] = 0;
  1139. for (i = 0; i < pps->num_tile_columns; i++)
  1140. pps->col_bd[i + 1] = pps->col_bd[i] + pps->column_width[i];
  1141. pps->row_bd[0] = 0;
  1142. for (i = 0; i < pps->num_tile_rows; i++)
  1143. pps->row_bd[i + 1] = pps->row_bd[i] + pps->row_height[i];
  1144. for (i = 0, j = 0; i < sps->ctb_width; i++) {
  1145. if (i > pps->col_bd[j])
  1146. j++;
  1147. pps->col_idxX[i] = j;
  1148. }
  1149. /**
  1150. * 6.5
  1151. */
  1152. pic_area_in_ctbs = sps->ctb_width * sps->ctb_height;
  1153. pps->ctb_addr_rs_to_ts = av_malloc_array(pic_area_in_ctbs, sizeof(*pps->ctb_addr_rs_to_ts));
  1154. pps->ctb_addr_ts_to_rs = av_malloc_array(pic_area_in_ctbs, sizeof(*pps->ctb_addr_ts_to_rs));
  1155. pps->tile_id = av_malloc_array(pic_area_in_ctbs, sizeof(*pps->tile_id));
  1156. pps->min_tb_addr_zs_tab = av_malloc_array((sps->tb_mask+2) * (sps->tb_mask+2), sizeof(*pps->min_tb_addr_zs_tab));
  1157. if (!pps->ctb_addr_rs_to_ts || !pps->ctb_addr_ts_to_rs ||
  1158. !pps->tile_id || !pps->min_tb_addr_zs_tab) {
  1159. return AVERROR(ENOMEM);
  1160. }
  1161. for (ctb_addr_rs = 0; ctb_addr_rs < pic_area_in_ctbs; ctb_addr_rs++) {
  1162. int tb_x = ctb_addr_rs % sps->ctb_width;
  1163. int tb_y = ctb_addr_rs / sps->ctb_width;
  1164. int tile_x = 0;
  1165. int tile_y = 0;
  1166. int val = 0;
  1167. for (i = 0; i < pps->num_tile_columns; i++) {
  1168. if (tb_x < pps->col_bd[i + 1]) {
  1169. tile_x = i;
  1170. break;
  1171. }
  1172. }
  1173. for (i = 0; i < pps->num_tile_rows; i++) {
  1174. if (tb_y < pps->row_bd[i + 1]) {
  1175. tile_y = i;
  1176. break;
  1177. }
  1178. }
  1179. for (i = 0; i < tile_x; i++)
  1180. val += pps->row_height[tile_y] * pps->column_width[i];
  1181. for (i = 0; i < tile_y; i++)
  1182. val += sps->ctb_width * pps->row_height[i];
  1183. val += (tb_y - pps->row_bd[tile_y]) * pps->column_width[tile_x] +
  1184. tb_x - pps->col_bd[tile_x];
  1185. pps->ctb_addr_rs_to_ts[ctb_addr_rs] = val;
  1186. pps->ctb_addr_ts_to_rs[val] = ctb_addr_rs;
  1187. }
  1188. for (j = 0, tile_id = 0; j < pps->num_tile_rows; j++)
  1189. for (i = 0; i < pps->num_tile_columns; i++, tile_id++)
  1190. for (y = pps->row_bd[j]; y < pps->row_bd[j + 1]; y++)
  1191. for (x = pps->col_bd[i]; x < pps->col_bd[i + 1]; x++)
  1192. pps->tile_id[pps->ctb_addr_rs_to_ts[y * sps->ctb_width + x]] = tile_id;
  1193. pps->tile_pos_rs = av_malloc_array(tile_id, sizeof(*pps->tile_pos_rs));
  1194. if (!pps->tile_pos_rs)
  1195. return AVERROR(ENOMEM);
  1196. for (j = 0; j < pps->num_tile_rows; j++)
  1197. for (i = 0; i < pps->num_tile_columns; i++)
  1198. pps->tile_pos_rs[j * pps->num_tile_columns + i] =
  1199. pps->row_bd[j] * sps->ctb_width + pps->col_bd[i];
  1200. log2_diff = sps->log2_ctb_size - sps->log2_min_tb_size;
  1201. pps->min_tb_addr_zs = &pps->min_tb_addr_zs_tab[1*(sps->tb_mask+2)+1];
  1202. for (y = 0; y < sps->tb_mask+2; y++) {
  1203. pps->min_tb_addr_zs_tab[y*(sps->tb_mask+2)] = -1;
  1204. pps->min_tb_addr_zs_tab[y] = -1;
  1205. }
  1206. for (y = 0; y < sps->tb_mask+1; y++) {
  1207. for (x = 0; x < sps->tb_mask+1; x++) {
  1208. int tb_x = x >> log2_diff;
  1209. int tb_y = y >> log2_diff;
  1210. int rs = sps->ctb_width * tb_y + tb_x;
  1211. int val = pps->ctb_addr_rs_to_ts[rs] << (log2_diff * 2);
  1212. for (i = 0; i < log2_diff; i++) {
  1213. int m = 1 << i;
  1214. val += (m & x ? m * m : 0) + (m & y ? 2 * m * m : 0);
  1215. }
  1216. pps->min_tb_addr_zs[y * (sps->tb_mask+2) + x] = val;
  1217. }
  1218. }
  1219. return 0;
  1220. }
  1221. int ff_hevc_decode_nal_pps(GetBitContext *gb, AVCodecContext *avctx,
  1222. HEVCParamSets *ps)
  1223. {
  1224. HEVCSPS *sps = NULL;
  1225. int i, ret = 0;
  1226. unsigned int pps_id = 0;
  1227. AVBufferRef *pps_buf;
  1228. HEVCPPS *pps = av_mallocz(sizeof(*pps));
  1229. if (!pps)
  1230. return AVERROR(ENOMEM);
  1231. pps_buf = av_buffer_create((uint8_t *)pps, sizeof(*pps),
  1232. hevc_pps_free, NULL, 0);
  1233. if (!pps_buf) {
  1234. av_freep(&pps);
  1235. return AVERROR(ENOMEM);
  1236. }
  1237. av_log(avctx, AV_LOG_DEBUG, "Decoding PPS\n");
  1238. // Default values
  1239. pps->loop_filter_across_tiles_enabled_flag = 1;
  1240. pps->num_tile_columns = 1;
  1241. pps->num_tile_rows = 1;
  1242. pps->uniform_spacing_flag = 1;
  1243. pps->disable_dbf = 0;
  1244. pps->beta_offset = 0;
  1245. pps->tc_offset = 0;
  1246. pps->log2_max_transform_skip_block_size = 2;
  1247. // Coded parameters
  1248. pps_id = get_ue_golomb_long(gb);
  1249. if (pps_id >= MAX_PPS_COUNT) {
  1250. av_log(avctx, AV_LOG_ERROR, "PPS id out of range: %d\n", pps_id);
  1251. ret = AVERROR_INVALIDDATA;
  1252. goto err;
  1253. }
  1254. pps->sps_id = get_ue_golomb_long(gb);
  1255. if (pps->sps_id >= MAX_SPS_COUNT) {
  1256. av_log(avctx, AV_LOG_ERROR, "SPS id out of range: %d\n", pps->sps_id);
  1257. ret = AVERROR_INVALIDDATA;
  1258. goto err;
  1259. }
  1260. if (!ps->sps_list[pps->sps_id]) {
  1261. av_log(avctx, AV_LOG_ERROR, "SPS %u does not exist.\n", pps->sps_id);
  1262. ret = AVERROR_INVALIDDATA;
  1263. goto err;
  1264. }
  1265. sps = (HEVCSPS *)ps->sps_list[pps->sps_id]->data;
  1266. pps->dependent_slice_segments_enabled_flag = get_bits1(gb);
  1267. pps->output_flag_present_flag = get_bits1(gb);
  1268. pps->num_extra_slice_header_bits = get_bits(gb, 3);
  1269. pps->sign_data_hiding_flag = get_bits1(gb);
  1270. pps->cabac_init_present_flag = get_bits1(gb);
  1271. pps->num_ref_idx_l0_default_active = get_ue_golomb_long(gb) + 1;
  1272. pps->num_ref_idx_l1_default_active = get_ue_golomb_long(gb) + 1;
  1273. pps->pic_init_qp_minus26 = get_se_golomb(gb);
  1274. pps->constrained_intra_pred_flag = get_bits1(gb);
  1275. pps->transform_skip_enabled_flag = get_bits1(gb);
  1276. pps->cu_qp_delta_enabled_flag = get_bits1(gb);
  1277. pps->diff_cu_qp_delta_depth = 0;
  1278. if (pps->cu_qp_delta_enabled_flag)
  1279. pps->diff_cu_qp_delta_depth = get_ue_golomb_long(gb);
  1280. if (pps->diff_cu_qp_delta_depth < 0 ||
  1281. pps->diff_cu_qp_delta_depth > sps->log2_diff_max_min_coding_block_size) {
  1282. av_log(avctx, AV_LOG_ERROR, "diff_cu_qp_delta_depth %d is invalid\n",
  1283. pps->diff_cu_qp_delta_depth);
  1284. ret = AVERROR_INVALIDDATA;
  1285. goto err;
  1286. }
  1287. pps->cb_qp_offset = get_se_golomb(gb);
  1288. if (pps->cb_qp_offset < -12 || pps->cb_qp_offset > 12) {
  1289. av_log(avctx, AV_LOG_ERROR, "pps_cb_qp_offset out of range: %d\n",
  1290. pps->cb_qp_offset);
  1291. ret = AVERROR_INVALIDDATA;
  1292. goto err;
  1293. }
  1294. pps->cr_qp_offset = get_se_golomb(gb);
  1295. if (pps->cr_qp_offset < -12 || pps->cr_qp_offset > 12) {
  1296. av_log(avctx, AV_LOG_ERROR, "pps_cr_qp_offset out of range: %d\n",
  1297. pps->cr_qp_offset);
  1298. ret = AVERROR_INVALIDDATA;
  1299. goto err;
  1300. }
  1301. pps->pic_slice_level_chroma_qp_offsets_present_flag = get_bits1(gb);
  1302. pps->weighted_pred_flag = get_bits1(gb);
  1303. pps->weighted_bipred_flag = get_bits1(gb);
  1304. pps->transquant_bypass_enable_flag = get_bits1(gb);
  1305. pps->tiles_enabled_flag = get_bits1(gb);
  1306. pps->entropy_coding_sync_enabled_flag = get_bits1(gb);
  1307. if (pps->tiles_enabled_flag) {
  1308. pps->num_tile_columns = get_ue_golomb_long(gb) + 1;
  1309. pps->num_tile_rows = get_ue_golomb_long(gb) + 1;
  1310. if (pps->num_tile_columns <= 0 ||
  1311. pps->num_tile_columns >= sps->width) {
  1312. av_log(avctx, AV_LOG_ERROR, "num_tile_columns_minus1 out of range: %d\n",
  1313. pps->num_tile_columns - 1);
  1314. ret = AVERROR_INVALIDDATA;
  1315. goto err;
  1316. }
  1317. if (pps->num_tile_rows <= 0 ||
  1318. pps->num_tile_rows >= sps->height) {
  1319. av_log(avctx, AV_LOG_ERROR, "num_tile_rows_minus1 out of range: %d\n",
  1320. pps->num_tile_rows - 1);
  1321. ret = AVERROR_INVALIDDATA;
  1322. goto err;
  1323. }
  1324. pps->column_width = av_malloc_array(pps->num_tile_columns, sizeof(*pps->column_width));
  1325. pps->row_height = av_malloc_array(pps->num_tile_rows, sizeof(*pps->row_height));
  1326. if (!pps->column_width || !pps->row_height) {
  1327. ret = AVERROR(ENOMEM);
  1328. goto err;
  1329. }
  1330. pps->uniform_spacing_flag = get_bits1(gb);
  1331. if (!pps->uniform_spacing_flag) {
  1332. uint64_t sum = 0;
  1333. for (i = 0; i < pps->num_tile_columns - 1; i++) {
  1334. pps->column_width[i] = get_ue_golomb_long(gb) + 1;
  1335. sum += pps->column_width[i];
  1336. }
  1337. if (sum >= sps->ctb_width) {
  1338. av_log(avctx, AV_LOG_ERROR, "Invalid tile widths.\n");
  1339. ret = AVERROR_INVALIDDATA;
  1340. goto err;
  1341. }
  1342. pps->column_width[pps->num_tile_columns - 1] = sps->ctb_width - sum;
  1343. sum = 0;
  1344. for (i = 0; i < pps->num_tile_rows - 1; i++) {
  1345. pps->row_height[i] = get_ue_golomb_long(gb) + 1;
  1346. sum += pps->row_height[i];
  1347. }
  1348. if (sum >= sps->ctb_height) {
  1349. av_log(avctx, AV_LOG_ERROR, "Invalid tile heights.\n");
  1350. ret = AVERROR_INVALIDDATA;
  1351. goto err;
  1352. }
  1353. pps->row_height[pps->num_tile_rows - 1] = sps->ctb_height - sum;
  1354. }
  1355. pps->loop_filter_across_tiles_enabled_flag = get_bits1(gb);
  1356. }
  1357. pps->seq_loop_filter_across_slices_enabled_flag = get_bits1(gb);
  1358. pps->deblocking_filter_control_present_flag = get_bits1(gb);
  1359. if (pps->deblocking_filter_control_present_flag) {
  1360. pps->deblocking_filter_override_enabled_flag = get_bits1(gb);
  1361. pps->disable_dbf = get_bits1(gb);
  1362. if (!pps->disable_dbf) {
  1363. pps->beta_offset = get_se_golomb(gb) * 2;
  1364. pps->tc_offset = get_se_golomb(gb) * 2;
  1365. if (pps->beta_offset/2 < -6 || pps->beta_offset/2 > 6) {
  1366. av_log(avctx, AV_LOG_ERROR, "pps_beta_offset_div2 out of range: %d\n",
  1367. pps->beta_offset/2);
  1368. ret = AVERROR_INVALIDDATA;
  1369. goto err;
  1370. }
  1371. if (pps->tc_offset/2 < -6 || pps->tc_offset/2 > 6) {
  1372. av_log(avctx, AV_LOG_ERROR, "pps_tc_offset_div2 out of range: %d\n",
  1373. pps->tc_offset/2);
  1374. ret = AVERROR_INVALIDDATA;
  1375. goto err;
  1376. }
  1377. }
  1378. }
  1379. pps->scaling_list_data_present_flag = get_bits1(gb);
  1380. if (pps->scaling_list_data_present_flag) {
  1381. set_default_scaling_list_data(&pps->scaling_list);
  1382. ret = scaling_list_data(gb, avctx, &pps->scaling_list, sps);
  1383. if (ret < 0)
  1384. goto err;
  1385. }
  1386. pps->lists_modification_present_flag = get_bits1(gb);
  1387. pps->log2_parallel_merge_level = get_ue_golomb_long(gb) + 2;
  1388. if (pps->log2_parallel_merge_level > sps->log2_ctb_size) {
  1389. av_log(avctx, AV_LOG_ERROR, "log2_parallel_merge_level_minus2 out of range: %d\n",
  1390. pps->log2_parallel_merge_level - 2);
  1391. ret = AVERROR_INVALIDDATA;
  1392. goto err;
  1393. }
  1394. pps->slice_header_extension_present_flag = get_bits1(gb);
  1395. if (get_bits1(gb)) { // pps_extension_present_flag
  1396. int pps_range_extensions_flag = get_bits1(gb);
  1397. /* int pps_extension_7bits = */ get_bits(gb, 7);
  1398. if (sps->ptl.general_ptl.profile_idc == FF_PROFILE_HEVC_REXT && pps_range_extensions_flag) {
  1399. if ((ret = pps_range_extensions(gb, avctx, pps, sps)) < 0)
  1400. goto err;
  1401. }
  1402. }
  1403. ret = setup_pps(avctx, gb, pps, sps);
  1404. if (ret < 0)
  1405. goto err;
  1406. if (get_bits_left(gb) < 0) {
  1407. av_log(avctx, AV_LOG_ERROR,
  1408. "Overread PPS by %d bits\n", -get_bits_left(gb));
  1409. goto err;
  1410. }
  1411. remove_pps(ps, pps_id);
  1412. ps->pps_list[pps_id] = pps_buf;
  1413. return 0;
  1414. err:
  1415. av_buffer_unref(&pps_buf);
  1416. return ret;
  1417. }