You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

771 lines
28KB

  1. /*
  2. * Copyright (c) 2015-2016 Kieran Kunhya <kieran@kunhya.com>
  3. *
  4. * This file is part of Libav.
  5. *
  6. * Libav is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public
  8. * License as published by the Free Software Foundation; either
  9. * version 2.1 of the License, or (at your option) any later version.
  10. *
  11. * Libav is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  14. * Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public
  17. * License along with Libav; if not, write to the Free Software
  18. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20. /**
  21. * @file
  22. * Cineform HD video decoder
  23. */
  24. #include "libavutil/attributes.h"
  25. #include "libavutil/buffer.h"
  26. #include "libavutil/common.h"
  27. #include "libavutil/imgutils.h"
  28. #include "libavutil/intreadwrite.h"
  29. #include "libavutil/opt.h"
  30. #include "avcodec.h"
  31. #include "bitstream.h"
  32. #include "bytestream.h"
  33. #include "internal.h"
  34. #include "thread.h"
  35. #include "cfhd.h"
  36. enum CFHDParam {
  37. ChannelCount = 12,
  38. SubbandCount = 14,
  39. ImageWidth = 20,
  40. ImageHeight = 21,
  41. LowpassPrecision = 35,
  42. SubbandNumber = 48,
  43. Quantization = 53,
  44. ChannelNumber = 62,
  45. BitsPerComponent = 101,
  46. ChannelWidth = 104,
  47. ChannelHeight = 105,
  48. PrescaleShift = 109,
  49. };
  50. static av_cold int cfhd_init(AVCodecContext *avctx)
  51. {
  52. CFHDContext *s = avctx->priv_data;
  53. memset(s, 0, sizeof(*s));
  54. s->avctx = avctx;
  55. avctx->bits_per_raw_sample = 10;
  56. return ff_cfhd_init_vlcs(s);
  57. }
  58. static void init_plane_defaults(CFHDContext *s)
  59. {
  60. s->subband_num = 0;
  61. s->level = 0;
  62. s->subband_num_actual = 0;
  63. }
  64. static void init_frame_defaults(CFHDContext *s)
  65. {
  66. s->coded_format = AV_PIX_FMT_YUV422P10;
  67. s->coded_width = 0;
  68. s->coded_height = 0;
  69. s->cropped_height = 0;
  70. s->bpc = 10;
  71. s->channel_cnt = 4;
  72. s->subband_cnt = SUBBAND_COUNT;
  73. s->channel_num = 0;
  74. s->lowpass_precision = 16;
  75. s->quantisation = 1;
  76. s->prescale_shift[0] = 0;
  77. s->prescale_shift[1] = 0;
  78. s->prescale_shift[2] = 0;
  79. s->wavelet_depth = 3;
  80. s->pshift = 1;
  81. s->codebook = 0;
  82. init_plane_defaults(s);
  83. }
  84. /* TODO: merge with VLC tables or use LUT */
  85. static inline int dequant_and_decompand(int level, int quantisation)
  86. {
  87. int64_t abslevel = abs(level);
  88. return (abslevel + ((768 * abslevel * abslevel * abslevel) / (255 * 255 * 255))) *
  89. FFSIGN(level) * quantisation;
  90. }
  91. static inline void filter(int16_t *output, ptrdiff_t out_stride,
  92. int16_t *low, ptrdiff_t low_stride,
  93. int16_t *high, ptrdiff_t high_stride,
  94. int len, int clip)
  95. {
  96. int16_t tmp;
  97. int i;
  98. for (i = 0; i < len; i++) {
  99. if (i == 0) {
  100. tmp = (11 * low[0 * low_stride] - 4 * low[1 * low_stride] + low[2 * low_stride] + 4) >> 3;
  101. output[(2 * i + 0) * out_stride] = (tmp + high[0 * high_stride]) >> 1;
  102. } else if (i == len - 1) {
  103. tmp = (5 * low[i * low_stride] + 4 * low[(i - 1) * low_stride] - low[(i - 2) * low_stride] + 4) >> 3;
  104. output[(2 * i + 0) * out_stride] = (tmp + high[i * high_stride]) >> 1;
  105. } else {
  106. tmp = (low[(i - 1) * low_stride] - low[(i + 1) * low_stride] + 4) >> 3;
  107. output[(2 * i + 0) * out_stride] = (tmp + low[i * low_stride] + high[i * high_stride]) >> 1;
  108. }
  109. if (clip)
  110. output[(2 * i + 0) * out_stride] = av_clip_uintp2_c(output[(2 * i + 0) * out_stride], clip);
  111. if (i == 0) {
  112. tmp = (5 * low[0 * low_stride] + 4 * low[1 * low_stride] - low[2 * low_stride] + 4) >> 3;
  113. output[(2 * i + 1) * out_stride] = (tmp - high[0 * high_stride]) >> 1;
  114. } else if (i == len - 1) {
  115. tmp = (11 * low[i * low_stride] - 4 * low[(i - 1) * low_stride] + low[(i - 2) * low_stride] + 4) >> 3;
  116. output[(2 * i + 1) * out_stride] = (tmp - high[i * high_stride]) >> 1;
  117. } else {
  118. tmp = (low[(i + 1) * low_stride] - low[(i - 1) * low_stride] + 4) >> 3;
  119. output[(2 * i + 1) * out_stride] = (tmp + low[i * low_stride] - high[i * high_stride]) >> 1;
  120. }
  121. if (clip)
  122. output[(2 * i + 1) * out_stride] = av_clip_uintp2_c(output[(2 * i + 1) * out_stride], clip);
  123. }
  124. }
  125. static void horiz_filter(int16_t *output, int16_t *low, int16_t *high,
  126. int width)
  127. {
  128. filter(output, 1, low, 1, high, 1, width, 0);
  129. }
  130. static void horiz_filter_clip(int16_t *output, int16_t *low, int16_t *high,
  131. int width, int clip)
  132. {
  133. filter(output, 1, low, 1, high, 1, width, clip);
  134. }
  135. static void vert_filter(int16_t *output, ptrdiff_t out_stride,
  136. int16_t *low, ptrdiff_t low_stride,
  137. int16_t *high, ptrdiff_t high_stride, int len)
  138. {
  139. filter(output, out_stride, low, low_stride, high, high_stride, len, 0);
  140. }
  141. static void free_buffers(CFHDContext *s)
  142. {
  143. unsigned i;
  144. for (i = 0; i < FF_ARRAY_ELEMS(s->plane); i++) {
  145. av_freep(&s->plane[i].idwt_buf);
  146. av_freep(&s->plane[i].idwt_tmp);
  147. }
  148. s->a_height = 0;
  149. s->a_width = 0;
  150. }
  151. static int alloc_buffers(CFHDContext *s)
  152. {
  153. int i, j, ret, planes;
  154. int chroma_x_shift, chroma_y_shift;
  155. unsigned k;
  156. if ((ret = av_pix_fmt_get_chroma_sub_sample(s->coded_format,
  157. &chroma_x_shift,
  158. &chroma_y_shift)) < 0)
  159. return ret;
  160. planes = av_pix_fmt_count_planes(s->coded_format);
  161. for (i = 0; i < planes; i++) {
  162. int w8, h8, w4, h4, w2, h2;
  163. int width = i ? s->coded_width >> chroma_x_shift : s->coded_width;
  164. int height = i ? s->coded_height >> chroma_y_shift : s->coded_height;
  165. ptrdiff_t stride = FFALIGN(width / 8, 8) * 8;
  166. height = FFALIGN(height / 8, 2) * 8;
  167. s->plane[i].width = width;
  168. s->plane[i].height = height;
  169. s->plane[i].stride = stride;
  170. w8 = FFALIGN(s->plane[i].width / 8, 8);
  171. h8 = FFALIGN(s->plane[i].height / 8, 2);
  172. w4 = w8 * 2;
  173. h4 = h8 * 2;
  174. w2 = w4 * 2;
  175. h2 = h4 * 2;
  176. s->plane[i].idwt_buf =
  177. av_malloc_array(height * stride, sizeof(*s->plane[i].idwt_buf));
  178. s->plane[i].idwt_tmp =
  179. av_malloc_array(height * stride, sizeof(*s->plane[i].idwt_tmp));
  180. if (!s->plane[i].idwt_buf || !s->plane[i].idwt_tmp)
  181. return AVERROR(ENOMEM);
  182. s->plane[i].subband[0] = s->plane[i].idwt_buf;
  183. s->plane[i].subband[1] = s->plane[i].idwt_buf + 2 * w8 * h8;
  184. s->plane[i].subband[2] = s->plane[i].idwt_buf + 1 * w8 * h8;
  185. s->plane[i].subband[3] = s->plane[i].idwt_buf + 3 * w8 * h8;
  186. s->plane[i].subband[4] = s->plane[i].idwt_buf + 2 * w4 * h4;
  187. s->plane[i].subband[5] = s->plane[i].idwt_buf + 1 * w4 * h4;
  188. s->plane[i].subband[6] = s->plane[i].idwt_buf + 3 * w4 * h4;
  189. s->plane[i].subband[7] = s->plane[i].idwt_buf + 2 * w2 * h2;
  190. s->plane[i].subband[8] = s->plane[i].idwt_buf + 1 * w2 * h2;
  191. s->plane[i].subband[9] = s->plane[i].idwt_buf + 3 * w2 * h2;
  192. for (j = 0; j < DWT_LEVELS; j++) {
  193. for (k = 0; k < FF_ARRAY_ELEMS(s->plane[i].band[j]); k++) {
  194. s->plane[i].band[j][k].a_width = w8 << j;
  195. s->plane[i].band[j][k].a_height = h8 << j;
  196. }
  197. }
  198. /* ll2 and ll1 commented out because they are done in-place */
  199. s->plane[i].l_h[0] = s->plane[i].idwt_tmp;
  200. s->plane[i].l_h[1] = s->plane[i].idwt_tmp + 2 * w8 * h8;
  201. // s->plane[i].l_h[2] = ll2;
  202. s->plane[i].l_h[3] = s->plane[i].idwt_tmp;
  203. s->plane[i].l_h[4] = s->plane[i].idwt_tmp + 2 * w4 * h4;
  204. // s->plane[i].l_h[5] = ll1;
  205. s->plane[i].l_h[6] = s->plane[i].idwt_tmp;
  206. s->plane[i].l_h[7] = s->plane[i].idwt_tmp + 2 * w2 * h2;
  207. }
  208. s->a_height = s->coded_height;
  209. s->a_width = s->coded_width;
  210. s->a_format = s->coded_format;
  211. return 0;
  212. }
  213. static int parse_tag(CFHDContext *s, GetByteContext *gb,
  214. int16_t *tag_, uint16_t *value, int *planes)
  215. {
  216. /* Bit weird but implement the tag parsing as the spec says */
  217. uint16_t tagu = bytestream2_get_be16(gb);
  218. int16_t tag = tagu;
  219. int8_t tag8 = tagu >> 8;
  220. uint16_t abstag = abs(tag);
  221. int8_t abs_tag8 = abs(tag8);
  222. uint16_t data = bytestream2_get_be16(gb);
  223. *tag_ = tag;
  224. *value = data;
  225. if (abs_tag8 >= 0x60 && abs_tag8 <= 0x6F) {
  226. av_log(s->avctx, AV_LOG_DEBUG, "large len %"PRIX16"\n",
  227. ((tagu & 0xFF) << 16) | data);
  228. return 0;
  229. } else if (abstag >= 0x4000 && abstag <= 0x40FF) {
  230. av_log(s->avctx, AV_LOG_DEBUG, "Small chunk length %"PRIu16" %s\n",
  231. data * 4, tag < 0 ? "optional" : "required");
  232. bytestream2_skipu(gb, data * 4);
  233. return 0;
  234. }
  235. switch (tag) {
  236. case 1:
  237. av_log(s->avctx, AV_LOG_DEBUG, "Sample type? %"PRIu16"\n", data);
  238. break;
  239. case 2:
  240. {
  241. int i;
  242. av_log(s->avctx, AV_LOG_DEBUG,
  243. "tag=2 header - skipping %"PRIu16" tag/value pairs\n", data);
  244. if (data > bytestream2_get_bytes_left(gb) / 4) {
  245. av_log(s->avctx, AV_LOG_ERROR,
  246. "Too many tag/value pairs (%"PRIu16")\n", data);
  247. return AVERROR_INVALIDDATA;
  248. }
  249. for (i = 0; i < data; i++) {
  250. uint16_t tag2 = bytestream2_get_be16(gb);
  251. uint16_t val2 = bytestream2_get_be16(gb);
  252. av_log(s->avctx, AV_LOG_DEBUG, "Tag/Value = %"PRIX16" %"PRIX16"\n",
  253. tag2, val2);
  254. }
  255. break;
  256. }
  257. case 10:
  258. if (data != 0) {
  259. avpriv_report_missing_feature(s->avctx, "Transform type %"PRIu16, data);
  260. return AVERROR_PATCHWELCOME;
  261. }
  262. av_log(s->avctx, AV_LOG_DEBUG, "Transform-type? %"PRIu16"\n", data);
  263. break;
  264. case ChannelCount:
  265. av_log(s->avctx, AV_LOG_DEBUG, "Channel count: %"PRIu16"\n", data);
  266. if (data > 4) {
  267. avpriv_report_missing_feature(s->avctx, "Channel count %"PRIu16, data);
  268. return AVERROR_PATCHWELCOME;
  269. }
  270. s->channel_cnt = data;
  271. break;
  272. case SubbandCount:
  273. av_log(s->avctx, AV_LOG_DEBUG, "Subband count: %"PRIu16"\n", data);
  274. if (data != SUBBAND_COUNT) {
  275. avpriv_report_missing_feature(s->avctx, "Subband count %"PRIu16, data);
  276. return AVERROR_PATCHWELCOME;
  277. }
  278. break;
  279. case ImageWidth:
  280. av_log(s->avctx, AV_LOG_DEBUG, "Width %"PRIu16"\n", data);
  281. s->coded_width = data;
  282. break;
  283. case ImageHeight:
  284. av_log(s->avctx, AV_LOG_DEBUG, "Height %"PRIu16"\n", data);
  285. s->coded_height = data;
  286. break;
  287. case 23:
  288. avpriv_report_missing_feature(s->avctx, "Skip frame");
  289. return AVERROR_PATCHWELCOME;
  290. case 27:
  291. av_log(s->avctx, AV_LOG_DEBUG, "Lowpass width %"PRIu16"\n", data);
  292. if (data < 2 || data > s->plane[s->channel_num].band[0][0].a_width) {
  293. av_log(s->avctx, AV_LOG_ERROR, "Invalid lowpass width\n");
  294. return AVERROR_INVALIDDATA;
  295. }
  296. s->plane[s->channel_num].band[0][0].width = data;
  297. s->plane[s->channel_num].band[0][0].stride = data;
  298. break;
  299. case 28:
  300. av_log(s->avctx, AV_LOG_DEBUG, "Lowpass height %"PRIu16"\n", data);
  301. if (data < 2 || data > s->plane[s->channel_num].band[0][0].a_height) {
  302. av_log(s->avctx, AV_LOG_ERROR, "Invalid lowpass height\n");
  303. return AVERROR_INVALIDDATA;
  304. }
  305. s->plane[s->channel_num].band[0][0].height = data;
  306. break;
  307. case LowpassPrecision:
  308. av_log(s->avctx, AV_LOG_DEBUG, "Lowpass precision bits: %"PRIu16"\n", data);
  309. break;
  310. case 41:
  311. case 49:
  312. av_log(s->avctx, AV_LOG_DEBUG,
  313. "Highpass width%s %"PRIu16" channel %i level %i subband %i\n",
  314. tag == 49 ? "2" : "", data,
  315. s->channel_num, s->level, s->subband_num);
  316. if (data < 2) {
  317. av_log(s->avctx, AV_LOG_ERROR, "Invalid highpass width%s\n", tag == 49 ? "2" : "");
  318. return AVERROR_INVALIDDATA;
  319. }
  320. s->plane[s->channel_num].band[s->level][s->subband_num].width = data;
  321. s->plane[s->channel_num].band[s->level][s->subband_num].stride = FFALIGN(data, 8);
  322. break;
  323. case 42:
  324. case 50:
  325. av_log(s->avctx, AV_LOG_DEBUG, "Highpass height%s %"PRIu16"\n", tag == 50 ? "2" : "", data);
  326. if (data < 2) {
  327. av_log(s->avctx, AV_LOG_ERROR, "Invalid highpass height%s\n", tag == 50 ? "2" : "");
  328. return AVERROR_INVALIDDATA;
  329. }
  330. s->plane[s->channel_num].band[s->level][s->subband_num].height = data;
  331. break;
  332. case SubbandNumber:
  333. av_log(s->avctx, AV_LOG_DEBUG, "Subband number %"PRIu16"\n", data);
  334. if (data > 3) {
  335. av_log(s->avctx, AV_LOG_ERROR, "Invalid subband number\n");
  336. return AVERROR_INVALIDDATA;
  337. }
  338. if (s->subband_num != 0 && data == 1) {
  339. if (s->level + 1 >= DWT_LEVELS) {
  340. av_log(s->avctx, AV_LOG_ERROR, "Invalid level\n");
  341. return AVERROR_INVALIDDATA;
  342. }
  343. s->level++;
  344. }
  345. s->subband_num = data;
  346. break;
  347. case 51:
  348. av_log(s->avctx, AV_LOG_DEBUG, "Subband number actual %"PRIu16"\n", data);
  349. if (data >= SUBBAND_COUNT) {
  350. av_log(s->avctx, AV_LOG_ERROR, "Invalid subband number actual\n");
  351. return AVERROR_INVALIDDATA;
  352. }
  353. s->subband_num_actual = data;
  354. break;
  355. case Quantization:
  356. s->quantisation = data;
  357. av_log(s->avctx, AV_LOG_DEBUG, "Quantisation: %"PRIu16"\n", data);
  358. break;
  359. case ChannelNumber:
  360. av_log(s->avctx, AV_LOG_DEBUG, "Channel number %"PRIu16"\n", data);
  361. if (data >= *planes) {
  362. av_log(s->avctx, AV_LOG_ERROR, "Invalid channel number\n");
  363. return AVERROR_INVALIDDATA;
  364. }
  365. s->channel_num = data;
  366. init_plane_defaults(s);
  367. break;
  368. case 70:
  369. av_log(s->avctx, AV_LOG_DEBUG,
  370. "Subsampling or bit-depth flag? %"PRIu16"\n", data);
  371. if (!(data == 10 || data == 12)) {
  372. av_log(s->avctx, AV_LOG_ERROR, "Invalid bits per channel\n");
  373. return AVERROR_INVALIDDATA;
  374. }
  375. s->bpc = data;
  376. break;
  377. case 71:
  378. s->codebook = data;
  379. av_log(s->avctx, AV_LOG_DEBUG, "Codebook %i\n", s->codebook);
  380. break;
  381. case 72:
  382. s->codebook = data;
  383. av_log(s->avctx, AV_LOG_DEBUG, "Other codebook? %i\n", s->codebook);
  384. break;
  385. case 84:
  386. av_log(s->avctx, AV_LOG_DEBUG, "Sample format? %"PRIu16"\n", data);
  387. switch (data) {
  388. case 1:
  389. s->coded_format = AV_PIX_FMT_YUV422P10;
  390. break;
  391. case 3:
  392. s->coded_format = AV_PIX_FMT_GBRP12;
  393. break;
  394. case 4:
  395. s->coded_format = AV_PIX_FMT_GBRAP12;
  396. break;
  397. default:
  398. avpriv_report_missing_feature(s->avctx, "Sample format %"PRIu16, data);
  399. return AVERROR_PATCHWELCOME;
  400. }
  401. *planes = av_pix_fmt_count_planes(s->coded_format);
  402. break;
  403. case -85:
  404. av_log(s->avctx, AV_LOG_DEBUG, "Cropped height %"PRIu16"\n", data);
  405. s->cropped_height = data;
  406. break;
  407. case 101:
  408. av_log(s->avctx, AV_LOG_DEBUG, "Bits per component: %"PRIu16"\n", data);
  409. s->bpc = data;
  410. break;
  411. case PrescaleShift:
  412. s->prescale_shift[0] = (data >> 0) & 0x7;
  413. s->prescale_shift[1] = (data >> 3) & 0x7;
  414. s->prescale_shift[2] = (data >> 6) & 0x7;
  415. av_log(s->avctx, AV_LOG_DEBUG, "Prescale shift (VC-5): %"PRIX16"\n", data);
  416. break;
  417. default:
  418. av_log(s->avctx, AV_LOG_DEBUG, "Unknown tag %"PRIu16" data %"PRIX16"\n",
  419. tag, data);
  420. }
  421. return 0;
  422. }
  423. static int read_lowpass_coeffs(CFHDContext *s, GetByteContext *gb,
  424. int16_t *coeff_data)
  425. {
  426. int i, j;
  427. int lowpass_height = s->plane[s->channel_num].band[0][0].height;
  428. int lowpass_width = s->plane[s->channel_num].band[0][0].width;
  429. int lowpass_a_height = s->plane[s->channel_num].band[0][0].a_height;
  430. int lowpass_a_width = s->plane[s->channel_num].band[0][0].a_width;
  431. if (lowpass_height > lowpass_a_height ||
  432. lowpass_width > lowpass_a_width ||
  433. lowpass_a_width * lowpass_a_height * sizeof(*coeff_data) > bytestream2_get_bytes_left(gb)) {
  434. av_log(s->avctx, AV_LOG_ERROR, "Too many lowpass coefficients\n");
  435. return AVERROR_INVALIDDATA;
  436. }
  437. av_log(s->avctx, AV_LOG_DEBUG,
  438. "Start of lowpass coeffs component %d height:%d, width:%d\n",
  439. s->channel_num, lowpass_height, lowpass_width);
  440. for (i = 0; i < lowpass_height; i++) {
  441. for (j = 0; j < lowpass_width; j++)
  442. coeff_data[j] = bytestream2_get_be16u(gb);
  443. coeff_data += lowpass_width;
  444. }
  445. /* Align to mod-4 position to continue reading tags */
  446. bytestream2_seek(gb, bytestream2_tell(gb) & 3, SEEK_CUR);
  447. /* Copy last coefficient line if height is odd. */
  448. if (lowpass_height & 1) {
  449. int16_t *last_line = &coeff_data[lowpass_height * lowpass_width];
  450. memcpy(last_line, &last_line[-lowpass_width],
  451. lowpass_width * sizeof(*coeff_data));
  452. }
  453. av_log(s->avctx, AV_LOG_DEBUG, "Lowpass coefficients %i\n",
  454. lowpass_width * lowpass_height);
  455. return 0;
  456. }
  457. #define DECODE_SUBBAND_COEFFS(TABLE, COND) \
  458. while (1) { \
  459. int level, run, coeff; \
  460. BITSTREAM_RL_VLC(level, run, &s->bc, s->TABLE, VLC_BITS, 3); \
  461. \
  462. /* escape */ \
  463. if (COND) \
  464. break; \
  465. \
  466. count += run; \
  467. \
  468. if (count > expected) { \
  469. av_log(s->avctx, AV_LOG_ERROR, "Escape codeword not found, " \
  470. "probably corrupt data\n"); \
  471. return AVERROR_INVALIDDATA; \
  472. } \
  473. \
  474. coeff = dequant_and_decompand(level, s->quantisation); \
  475. for (i = 0; i < run; i++) \
  476. *coeff_data++ = coeff; \
  477. } \
  478. static int read_highpass_coeffs(CFHDContext *s, GetByteContext *gb,
  479. int16_t *coeff_data)
  480. {
  481. int i, ret;
  482. int highpass_height = s->plane[s->channel_num].band[s->level][s->subband_num].height;
  483. int highpass_width = s->plane[s->channel_num].band[s->level][s->subband_num].width;
  484. int highpass_a_width = s->plane[s->channel_num].band[s->level][s->subband_num].a_width;
  485. int highpass_a_height = s->plane[s->channel_num].band[s->level][s->subband_num].a_height;
  486. ptrdiff_t highpass_stride = s->plane[s->channel_num].band[s->level][s->subband_num].stride;
  487. int expected = highpass_height * highpass_stride;
  488. int a_expected = highpass_a_height * highpass_a_width;
  489. int count = 0;
  490. unsigned bytes;
  491. if (highpass_height > highpass_a_height ||
  492. highpass_width > highpass_a_width ||
  493. a_expected < expected) {
  494. av_log(s->avctx, AV_LOG_ERROR, "Too many highpass coefficients\n");
  495. return AVERROR_INVALIDDATA;
  496. }
  497. av_log(s->avctx, AV_LOG_DEBUG,
  498. "Start subband coeffs plane %i level %i codebook %i expected %i\n",
  499. s->channel_num, s->level, s->codebook, expected);
  500. if ((ret = bitstream_init8(&s->bc, gb->buffer,
  501. bytestream2_get_bytes_left(gb))) < 0)
  502. return ret;
  503. if (!s->codebook) {
  504. DECODE_SUBBAND_COEFFS(table_9_rl_vlc, level == 64)
  505. } else {
  506. DECODE_SUBBAND_COEFFS(table_18_rl_vlc, level == 255 && run == 2)
  507. }
  508. bytes = FFALIGN(AV_CEIL_RSHIFT(bitstream_tell(&s->bc), 3), 4);
  509. if (bytes > bytestream2_get_bytes_left(gb)) {
  510. av_log(s->avctx, AV_LOG_ERROR, "Bitstream overread error\n");
  511. return AVERROR_INVALIDDATA;
  512. } else
  513. bytestream2_seek(gb, bytes, SEEK_CUR);
  514. av_log(s->avctx, AV_LOG_DEBUG, "End subband coeffs %i extra %i\n",
  515. count, count - expected);
  516. s->codebook = 0;
  517. /* Copy last coefficient line if height is odd. */
  518. if (highpass_height & 1) {
  519. int16_t *last_line = &coeff_data[expected];
  520. memcpy(last_line, &last_line[-highpass_stride],
  521. highpass_stride * sizeof(*coeff_data));
  522. }
  523. return 0;
  524. }
  525. static int reconstruct_level(CFHDContext *s, AVFrame *pic, int plane, int level)
  526. {
  527. int i, j, idx = level - 1, idx2 = level > 1 ? 1 : 0;
  528. int16_t *low, *high, *output, *dst;
  529. int lowpass_height = s->plane[plane].band[idx][idx2].height;
  530. int lowpass_width = s->plane[plane].band[idx][idx2].width;
  531. ptrdiff_t highpass_stride = s->plane[plane].band[idx][1].stride;
  532. if (lowpass_height > s->plane[plane].band[idx][idx2].a_height ||
  533. lowpass_width > s->plane[plane].band[idx][idx2].a_width ||
  534. s->plane[plane].band[idx][1].width > s->plane[plane].band[idx][1].a_width ||
  535. !highpass_stride) {
  536. av_log(s->avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  537. return AVERROR_INVALIDDATA;
  538. }
  539. av_log(s->avctx, AV_LOG_DEBUG, "Level %d plane %i %i %i %ti\n",
  540. level, plane, lowpass_height, lowpass_width, highpass_stride);
  541. low = s->plane[plane].subband[0];
  542. high = s->plane[plane].subband[2 + 3 * idx];
  543. output = s->plane[plane].l_h[3 * idx];
  544. for (i = 0; i < lowpass_width; i++) {
  545. vert_filter(output, lowpass_width, low, lowpass_width, high,
  546. highpass_stride, lowpass_height);
  547. low++;
  548. high++;
  549. output++;
  550. }
  551. low = s->plane[plane].subband[1 + 3 * idx];
  552. high = s->plane[plane].subband[3 + 3 * idx];
  553. output = s->plane[plane].l_h[1 + 3 * idx];
  554. for (i = 0; i < lowpass_width; i++) {
  555. // note the stride of "low" is highpass_stride
  556. vert_filter(output, lowpass_width, low, highpass_stride, high,
  557. highpass_stride, lowpass_height);
  558. low++;
  559. high++;
  560. output++;
  561. }
  562. low = s->plane[plane].l_h[0 + 3 * idx];
  563. high = s->plane[plane].l_h[1 + 3 * idx];
  564. if (level != 3) {
  565. output = s->plane[plane].subband[0];
  566. for (i = 0; i < lowpass_height * 2; i++) {
  567. horiz_filter(output, low, high, lowpass_width);
  568. low += lowpass_width;
  569. high += lowpass_width;
  570. output += lowpass_width * 2;
  571. }
  572. if (s->bpc == 12 || level == 2) {
  573. output = s->plane[plane].subband[0];
  574. for (i = 0; i < lowpass_height * 2; i++) {
  575. for (j = 0; j < lowpass_width * 2; j++)
  576. output[j] <<= 2;
  577. output += lowpass_width * 2;
  578. }
  579. }
  580. } else {
  581. int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
  582. dst = (int16_t *)pic->data[act_plane];
  583. for (i = 0; i < lowpass_height * 2; i++) {
  584. horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
  585. low += lowpass_width;
  586. high += lowpass_width;
  587. dst += pic->linesize[act_plane] / 2;
  588. }
  589. }
  590. return 0;
  591. }
  592. static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
  593. AVPacket *avpkt)
  594. {
  595. CFHDContext *s = avctx->priv_data;
  596. GetByteContext gb;
  597. ThreadFrame frame = { .f = data };
  598. int ret = 0, planes, plane;
  599. int16_t tag;
  600. uint16_t value;
  601. init_frame_defaults(s);
  602. planes = av_pix_fmt_count_planes(s->coded_format);
  603. bytestream2_init(&gb, avpkt->data, avpkt->size);
  604. while (bytestream2_get_bytes_left(&gb) > 4) {
  605. if ((ret = parse_tag(s, &gb, &tag, &value, &planes)) < 0)
  606. return ret;
  607. /* Some kind of end of header tag */
  608. if (tag == 4 && value == 0x1A4A)
  609. break;
  610. }
  611. if (s->coded_width <= 0 || s->coded_height <= 0 || s->coded_format == AV_PIX_FMT_NONE) {
  612. av_log(avctx, AV_LOG_ERROR, "Video dimensions/format missing or invalid\n");
  613. return AVERROR_INVALIDDATA;
  614. }
  615. ret = ff_set_dimensions(s->avctx, s->coded_width, s->coded_height);
  616. if (ret < 0)
  617. return ret;
  618. if (s->cropped_height)
  619. s->avctx->height = s->cropped_height;
  620. s->avctx->pix_fmt = s->coded_format;
  621. if (s->a_width != s->coded_width || s->a_height != s->coded_height ||
  622. s->a_format != s->coded_format) {
  623. free_buffers(s);
  624. if ((ret = alloc_buffers(s)) < 0) {
  625. free_buffers(s);
  626. return ret;
  627. }
  628. }
  629. if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
  630. return ret;
  631. s->coded_width = 0;
  632. s->coded_height = 0;
  633. s->coded_format = AV_PIX_FMT_NONE;
  634. while (bytestream2_get_bytes_left(&gb) > 4) {
  635. int16_t *coeff_data;
  636. if ((ret = parse_tag(s, &gb, &tag, &value, &planes)) < 0)
  637. return ret;
  638. coeff_data = s->plane[s->channel_num].subband[s->subband_num_actual];
  639. if (tag == 4 && value == 0x0F0F) {
  640. if ((ret = read_lowpass_coeffs(s, &gb, coeff_data)) < 0)
  641. return ret;
  642. } else if (tag == 55 && s->subband_num_actual != 255) {
  643. if ((ret = read_highpass_coeffs(s, &gb, coeff_data)) < 0)
  644. return ret;
  645. }
  646. }
  647. if (s->coded_width || s->coded_height || s->coded_format != AV_PIX_FMT_NONE) {
  648. av_log(avctx, AV_LOG_ERROR, "Invalid dimensions\n");
  649. return AVERROR_INVALIDDATA;
  650. }
  651. planes = av_pix_fmt_count_planes(avctx->pix_fmt);
  652. for (plane = 0; plane < planes; plane++) {
  653. /* level 1 */
  654. if ((ret = reconstruct_level(s, data, plane, 1)) < 0)
  655. return ret;
  656. /* level 2 */
  657. if ((ret = reconstruct_level(s, data, plane, 2)) < 0)
  658. return ret;
  659. /* level 3 */
  660. if ((ret = reconstruct_level(s, data, plane, 3)) < 0)
  661. return ret;
  662. }
  663. *got_frame = 1;
  664. return avpkt->size;
  665. }
  666. static av_cold int cfhd_close(AVCodecContext *avctx)
  667. {
  668. CFHDContext *s = avctx->priv_data;
  669. free_buffers(s);
  670. ff_free_vlc(&s->vlc_9);
  671. ff_free_vlc(&s->vlc_18);
  672. return 0;
  673. }
  674. AVCodec ff_cfhd_decoder = {
  675. .name = "cfhd",
  676. .long_name = NULL_IF_CONFIG_SMALL("Cineform HD"),
  677. .type = AVMEDIA_TYPE_VIDEO,
  678. .id = AV_CODEC_ID_CFHD,
  679. .priv_data_size = sizeof(CFHDContext),
  680. .init = cfhd_init,
  681. .init_thread_copy = ONLY_IF_THREADS_ENABLED(cfhd_init),
  682. .close = cfhd_close,
  683. .decode = cfhd_decode,
  684. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS,
  685. .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
  686. };