You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1053 lines
40KB

  1. /*
  2. * Copyright (c) 2015-2016 Kieran Kunhya <kieran@kunhya.com>
  3. *
  4. * This file is part of FFmpeg.
  5. *
  6. * FFmpeg is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public
  8. * License as published by the Free Software Foundation; either
  9. * version 2.1 of the License, or (at your option) any later version.
  10. *
  11. * FFmpeg is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  14. * Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public
  17. * License along with FFmpeg; if not, write to the Free Software
  18. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20. /**
  21. * @file
  22. * Cineform HD video decoder
  23. */
  24. #include "libavutil/attributes.h"
  25. #include "libavutil/buffer.h"
  26. #include "libavutil/common.h"
  27. #include "libavutil/imgutils.h"
  28. #include "libavutil/intreadwrite.h"
  29. #include "libavutil/opt.h"
  30. #include "avcodec.h"
  31. #include "bytestream.h"
  32. #include "get_bits.h"
  33. #include "internal.h"
  34. #include "thread.h"
  35. #include "cfhd.h"
  36. #define ALPHA_COMPAND_DC_OFFSET 256
  37. #define ALPHA_COMPAND_GAIN 9400
  38. static av_cold int cfhd_init(AVCodecContext *avctx)
  39. {
  40. CFHDContext *s = avctx->priv_data;
  41. s->avctx = avctx;
  42. for (int i = 0; i < 64; i++) {
  43. int val = i;
  44. if (val >= 40) {
  45. if (val >= 54) {
  46. val -= 54;
  47. val <<= 2;
  48. val += 54;
  49. }
  50. val -= 40;
  51. val <<= 2;
  52. val += 40;
  53. }
  54. s->lut[0][i] = val;
  55. }
  56. for (int i = 0; i < 256; i++)
  57. s->lut[1][i] = i + ((768 * i * i * i) / (256 * 256 * 256));
  58. return ff_cfhd_init_vlcs(s);
  59. }
  60. static void init_plane_defaults(CFHDContext *s)
  61. {
  62. s->subband_num = 0;
  63. s->level = 0;
  64. s->subband_num_actual = 0;
  65. }
  66. static void init_peak_table_defaults(CFHDContext *s)
  67. {
  68. s->peak.level = 0;
  69. s->peak.offset = 0;
  70. memset(&s->peak.base, 0, sizeof(s->peak.base));
  71. }
  72. static void init_frame_defaults(CFHDContext *s)
  73. {
  74. s->coded_width = 0;
  75. s->coded_height = 0;
  76. s->cropped_height = 0;
  77. s->bpc = 10;
  78. s->channel_cnt = 4;
  79. s->subband_cnt = SUBBAND_COUNT;
  80. s->channel_num = 0;
  81. s->lowpass_precision = 16;
  82. s->quantisation = 1;
  83. s->wavelet_depth = 3;
  84. s->pshift = 1;
  85. s->codebook = 0;
  86. s->difference_coding = 0;
  87. s->progressive = 0;
  88. init_plane_defaults(s);
  89. init_peak_table_defaults(s);
  90. }
  91. static inline int dequant_and_decompand(CFHDContext *s, int level, int quantisation, int codebook)
  92. {
  93. if (codebook == 0 || codebook == 1) {
  94. return s->lut[codebook][abs(level)] * FFSIGN(level) * quantisation;
  95. } else
  96. return level * quantisation;
  97. }
  98. static inline void difference_coding(int16_t *band, int width, int height)
  99. {
  100. int i,j;
  101. for (i = 0; i < height; i++) {
  102. for (j = 1; j < width; j++) {
  103. band[j] += band[j-1];
  104. }
  105. band += width;
  106. }
  107. }
  108. static inline void peak_table(int16_t *band, Peak *peak, int length)
  109. {
  110. int i;
  111. for (i = 0; i < length; i++)
  112. if (abs(band[i]) > peak->level)
  113. band[i] = bytestream2_get_le16(&peak->base);
  114. }
  115. static inline void process_alpha(int16_t *alpha, int width)
  116. {
  117. int i, channel;
  118. for (i = 0; i < width; i++) {
  119. channel = alpha[i];
  120. channel -= ALPHA_COMPAND_DC_OFFSET;
  121. channel <<= 3;
  122. channel *= ALPHA_COMPAND_GAIN;
  123. channel >>= 16;
  124. channel = av_clip_uintp2(channel, 12);
  125. alpha[i] = channel;
  126. }
  127. }
  128. static inline void process_bayer(AVFrame *frame)
  129. {
  130. const int linesize = frame->linesize[0];
  131. uint16_t *r = (uint16_t *)frame->data[0];
  132. uint16_t *g1 = (uint16_t *)(frame->data[0] + 2);
  133. uint16_t *g2 = (uint16_t *)(frame->data[0] + frame->linesize[0]);
  134. uint16_t *b = (uint16_t *)(frame->data[0] + frame->linesize[0] + 2);
  135. const int mid = 2048;
  136. for (int y = 0; y < frame->height >> 1; y++) {
  137. for (int x = 0; x < frame->width; x += 2) {
  138. int R, G1, G2, B;
  139. int g, rg, bg, gd;
  140. g = r[x];
  141. rg = g1[x];
  142. bg = g2[x];
  143. gd = b[x];
  144. gd -= mid;
  145. R = (rg - mid) * 2 + g;
  146. G1 = g + gd;
  147. G2 = g - gd;
  148. B = (bg - mid) * 2 + g;
  149. R = av_clip_uintp2(R * 16, 16);
  150. G1 = av_clip_uintp2(G1 * 16, 16);
  151. G2 = av_clip_uintp2(G2 * 16, 16);
  152. B = av_clip_uintp2(B * 16, 16);
  153. r[x] = R;
  154. g1[x] = G1;
  155. g2[x] = G2;
  156. b[x] = B;
  157. }
  158. r += linesize;
  159. g1 += linesize;
  160. g2 += linesize;
  161. b += linesize;
  162. }
  163. }
  164. static inline void filter(int16_t *output, ptrdiff_t out_stride,
  165. int16_t *low, ptrdiff_t low_stride,
  166. int16_t *high, ptrdiff_t high_stride,
  167. int len, int clip)
  168. {
  169. int16_t tmp;
  170. int i;
  171. tmp = (11*low[0*low_stride] - 4*low[1*low_stride] + low[2*low_stride] + 4) >> 3;
  172. output[(2*0+0)*out_stride] = (tmp + high[0*high_stride]) >> 1;
  173. if (clip)
  174. output[(2*0+0)*out_stride] = av_clip_uintp2_c(output[(2*0+0)*out_stride], clip);
  175. tmp = ( 5*low[0*low_stride] + 4*low[1*low_stride] - low[2*low_stride] + 4) >> 3;
  176. output[(2*0+1)*out_stride] = (tmp - high[0*high_stride]) >> 1;
  177. if (clip)
  178. output[(2*0+1)*out_stride] = av_clip_uintp2_c(output[(2*0+1)*out_stride], clip);
  179. for (i = 1; i < len - 1; i++) {
  180. tmp = (low[(i-1)*low_stride] - low[(i+1)*low_stride] + 4) >> 3;
  181. output[(2*i+0)*out_stride] = (tmp + low[i*low_stride] + high[i*high_stride]) >> 1;
  182. if (clip)
  183. output[(2*i+0)*out_stride] = av_clip_uintp2_c(output[(2*i+0)*out_stride], clip);
  184. tmp = (low[(i+1)*low_stride] - low[(i-1)*low_stride] + 4) >> 3;
  185. output[(2*i+1)*out_stride] = (tmp + low[i*low_stride] - high[i*high_stride]) >> 1;
  186. if (clip)
  187. output[(2*i+1)*out_stride] = av_clip_uintp2_c(output[(2*i+1)*out_stride], clip);
  188. }
  189. tmp = ( 5*low[i*low_stride] + 4*low[(i-1)*low_stride] - low[(i-2)*low_stride] + 4) >> 3;
  190. output[(2*i+0)*out_stride] = (tmp + high[i*high_stride]) >> 1;
  191. if (clip)
  192. output[(2*i+0)*out_stride] = av_clip_uintp2_c(output[(2*i+0)*out_stride], clip);
  193. tmp = (11*low[i*low_stride] - 4*low[(i-1)*low_stride] + low[(i-2)*low_stride] + 4) >> 3;
  194. output[(2*i+1)*out_stride] = (tmp - high[i*high_stride]) >> 1;
  195. if (clip)
  196. output[(2*i+1)*out_stride] = av_clip_uintp2_c(output[(2*i+1)*out_stride], clip);
  197. }
  198. static inline void interlaced_vertical_filter(int16_t *output, int16_t *low, int16_t *high,
  199. int width, int linesize, int plane)
  200. {
  201. int i;
  202. int16_t even, odd;
  203. for (i = 0; i < width; i++) {
  204. even = (low[i] - high[i])/2;
  205. odd = (low[i] + high[i])/2;
  206. output[i] = av_clip_uintp2(even, 10);
  207. output[i + linesize] = av_clip_uintp2(odd, 10);
  208. }
  209. }
  210. static void horiz_filter(int16_t *output, int16_t *low, int16_t *high,
  211. int width)
  212. {
  213. filter(output, 1, low, 1, high, 1, width, 0);
  214. }
  215. static void horiz_filter_clip(int16_t *output, int16_t *low, int16_t *high,
  216. int width, int clip)
  217. {
  218. filter(output, 1, low, 1, high, 1, width, clip);
  219. }
  220. static void horiz_filter_clip_bayer(int16_t *output, int16_t *low, int16_t *high,
  221. int width, int clip)
  222. {
  223. filter(output, 2, low, 1, high, 1, width, clip);
  224. }
  225. static void vert_filter(int16_t *output, ptrdiff_t out_stride,
  226. int16_t *low, ptrdiff_t low_stride,
  227. int16_t *high, ptrdiff_t high_stride, int len)
  228. {
  229. filter(output, out_stride, low, low_stride, high, high_stride, len, 0);
  230. }
  231. static void free_buffers(CFHDContext *s)
  232. {
  233. int i, j;
  234. for (i = 0; i < FF_ARRAY_ELEMS(s->plane); i++) {
  235. av_freep(&s->plane[i].idwt_buf);
  236. av_freep(&s->plane[i].idwt_tmp);
  237. for (j = 0; j < 9; j++)
  238. s->plane[i].subband[j] = NULL;
  239. for (j = 0; j < 8; j++)
  240. s->plane[i].l_h[j] = NULL;
  241. }
  242. s->a_height = 0;
  243. s->a_width = 0;
  244. }
  245. static int alloc_buffers(AVCodecContext *avctx)
  246. {
  247. CFHDContext *s = avctx->priv_data;
  248. int i, j, ret, planes;
  249. int chroma_x_shift, chroma_y_shift;
  250. unsigned k;
  251. if (s->coded_format == AV_PIX_FMT_BAYER_RGGB16) {
  252. s->coded_width *= 2;
  253. s->coded_height *= 2;
  254. }
  255. if ((ret = ff_set_dimensions(avctx, s->coded_width, s->coded_height)) < 0)
  256. return ret;
  257. avctx->pix_fmt = s->coded_format;
  258. if ((ret = av_pix_fmt_get_chroma_sub_sample(s->coded_format,
  259. &chroma_x_shift,
  260. &chroma_y_shift)) < 0)
  261. return ret;
  262. planes = av_pix_fmt_count_planes(s->coded_format);
  263. if (s->coded_format == AV_PIX_FMT_BAYER_RGGB16) {
  264. planes = 4;
  265. chroma_x_shift = 1;
  266. chroma_y_shift = 1;
  267. }
  268. for (i = 0; i < planes; i++) {
  269. int w8, h8, w4, h4, w2, h2;
  270. int width = i ? avctx->width >> chroma_x_shift : avctx->width;
  271. int height = i ? avctx->height >> chroma_y_shift : avctx->height;
  272. ptrdiff_t stride = FFALIGN(width / 8, 8) * 8;
  273. if (chroma_y_shift)
  274. height = FFALIGN(height / 8, 2) * 8;
  275. s->plane[i].width = width;
  276. s->plane[i].height = height;
  277. s->plane[i].stride = stride;
  278. w8 = FFALIGN(s->plane[i].width / 8, 8);
  279. h8 = FFALIGN(height, 8) / 8;
  280. w4 = w8 * 2;
  281. h4 = h8 * 2;
  282. w2 = w4 * 2;
  283. h2 = h4 * 2;
  284. s->plane[i].idwt_buf =
  285. av_mallocz_array(FFALIGN(height, 8) * stride, sizeof(*s->plane[i].idwt_buf));
  286. s->plane[i].idwt_tmp =
  287. av_malloc_array(FFALIGN(height, 8) * stride, sizeof(*s->plane[i].idwt_tmp));
  288. if (!s->plane[i].idwt_buf || !s->plane[i].idwt_tmp)
  289. return AVERROR(ENOMEM);
  290. s->plane[i].subband[0] = s->plane[i].idwt_buf;
  291. s->plane[i].subband[1] = s->plane[i].idwt_buf + 2 * w8 * h8;
  292. s->plane[i].subband[2] = s->plane[i].idwt_buf + 1 * w8 * h8;
  293. s->plane[i].subband[3] = s->plane[i].idwt_buf + 3 * w8 * h8;
  294. s->plane[i].subband[4] = s->plane[i].idwt_buf + 2 * w4 * h4;
  295. s->plane[i].subband[5] = s->plane[i].idwt_buf + 1 * w4 * h4;
  296. s->plane[i].subband[6] = s->plane[i].idwt_buf + 3 * w4 * h4;
  297. s->plane[i].subband[7] = s->plane[i].idwt_buf + 2 * w2 * h2;
  298. s->plane[i].subband[8] = s->plane[i].idwt_buf + 1 * w2 * h2;
  299. s->plane[i].subband[9] = s->plane[i].idwt_buf + 3 * w2 * h2;
  300. for (j = 0; j < DWT_LEVELS; j++) {
  301. for (k = 0; k < FF_ARRAY_ELEMS(s->plane[i].band[j]); k++) {
  302. s->plane[i].band[j][k].a_width = w8 << j;
  303. s->plane[i].band[j][k].a_height = h8 << j;
  304. }
  305. }
  306. /* ll2 and ll1 commented out because they are done in-place */
  307. s->plane[i].l_h[0] = s->plane[i].idwt_tmp;
  308. s->plane[i].l_h[1] = s->plane[i].idwt_tmp + 2 * w8 * h8;
  309. // s->plane[i].l_h[2] = ll2;
  310. s->plane[i].l_h[3] = s->plane[i].idwt_tmp;
  311. s->plane[i].l_h[4] = s->plane[i].idwt_tmp + 2 * w4 * h4;
  312. // s->plane[i].l_h[5] = ll1;
  313. s->plane[i].l_h[6] = s->plane[i].idwt_tmp;
  314. s->plane[i].l_h[7] = s->plane[i].idwt_tmp + 2 * w2 * h2;
  315. }
  316. s->a_height = s->coded_height;
  317. s->a_width = s->coded_width;
  318. s->a_format = s->coded_format;
  319. return 0;
  320. }
  321. static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
  322. AVPacket *avpkt)
  323. {
  324. CFHDContext *s = avctx->priv_data;
  325. GetByteContext gb;
  326. ThreadFrame frame = { .f = data };
  327. AVFrame *pic = data;
  328. int ret = 0, i, j, planes, plane, got_buffer = 0;
  329. int16_t *coeff_data;
  330. s->coded_format = AV_PIX_FMT_YUV422P10;
  331. init_frame_defaults(s);
  332. planes = av_pix_fmt_count_planes(s->coded_format);
  333. bytestream2_init(&gb, avpkt->data, avpkt->size);
  334. while (bytestream2_get_bytes_left(&gb) > 4) {
  335. /* Bit weird but implement the tag parsing as the spec says */
  336. uint16_t tagu = bytestream2_get_be16(&gb);
  337. int16_t tag = (int16_t)tagu;
  338. int8_t tag8 = (int8_t)(tagu >> 8);
  339. uint16_t abstag = abs(tag);
  340. int8_t abs_tag8 = abs(tag8);
  341. uint16_t data = bytestream2_get_be16(&gb);
  342. if (abs_tag8 >= 0x60 && abs_tag8 <= 0x6f) {
  343. av_log(avctx, AV_LOG_DEBUG, "large len %x\n", ((tagu & 0xff) << 16) | data);
  344. } else if (tag == SampleFlags) {
  345. av_log(avctx, AV_LOG_DEBUG, "Progressive?%"PRIu16"\n", data);
  346. s->progressive = data & 0x0001;
  347. } else if (tag == ImageWidth) {
  348. av_log(avctx, AV_LOG_DEBUG, "Width %"PRIu16"\n", data);
  349. s->coded_width = data;
  350. } else if (tag == ImageHeight) {
  351. av_log(avctx, AV_LOG_DEBUG, "Height %"PRIu16"\n", data);
  352. s->coded_height = data;
  353. } else if (tag == BitsPerComponent) {
  354. av_log(avctx, AV_LOG_DEBUG, "Bits per component: %"PRIu16"\n", data);
  355. if (data < 1 || data > 31) {
  356. av_log(avctx, AV_LOG_ERROR, "Bits per component %d is invalid\n", data);
  357. ret = AVERROR(EINVAL);
  358. break;
  359. }
  360. s->bpc = data;
  361. } else if (tag == ChannelCount) {
  362. av_log(avctx, AV_LOG_DEBUG, "Channel Count: %"PRIu16"\n", data);
  363. s->channel_cnt = data;
  364. if (data > 4) {
  365. av_log(avctx, AV_LOG_ERROR, "Channel Count of %"PRIu16" is unsupported\n", data);
  366. ret = AVERROR_PATCHWELCOME;
  367. break;
  368. }
  369. } else if (tag == SubbandCount) {
  370. av_log(avctx, AV_LOG_DEBUG, "Subband Count: %"PRIu16"\n", data);
  371. if (data != SUBBAND_COUNT) {
  372. av_log(avctx, AV_LOG_ERROR, "Subband Count of %"PRIu16" is unsupported\n", data);
  373. ret = AVERROR_PATCHWELCOME;
  374. break;
  375. }
  376. } else if (tag == ChannelNumber) {
  377. s->channel_num = data;
  378. av_log(avctx, AV_LOG_DEBUG, "Channel number %"PRIu16"\n", data);
  379. if (s->channel_num >= planes) {
  380. av_log(avctx, AV_LOG_ERROR, "Invalid channel number\n");
  381. ret = AVERROR(EINVAL);
  382. break;
  383. }
  384. init_plane_defaults(s);
  385. } else if (tag == SubbandNumber) {
  386. if (s->subband_num != 0 && data == 1) // hack
  387. s->level++;
  388. av_log(avctx, AV_LOG_DEBUG, "Subband number %"PRIu16"\n", data);
  389. s->subband_num = data;
  390. if (s->level >= DWT_LEVELS) {
  391. av_log(avctx, AV_LOG_ERROR, "Invalid level\n");
  392. ret = AVERROR(EINVAL);
  393. break;
  394. }
  395. if (s->subband_num > 3) {
  396. av_log(avctx, AV_LOG_ERROR, "Invalid subband number\n");
  397. ret = AVERROR(EINVAL);
  398. break;
  399. }
  400. } else if (tag == SubbandBand) {
  401. av_log(avctx, AV_LOG_DEBUG, "Subband number actual %"PRIu16"\n", data);
  402. s->subband_num_actual = data;
  403. if (s->subband_num_actual >= 10) {
  404. av_log(avctx, AV_LOG_ERROR, "Invalid subband number actual\n");
  405. ret = AVERROR(EINVAL);
  406. break;
  407. }
  408. } else if (tag == LowpassPrecision)
  409. av_log(avctx, AV_LOG_DEBUG, "Lowpass precision bits: %"PRIu16"\n", data);
  410. else if (tag == Quantization) {
  411. s->quantisation = data;
  412. av_log(avctx, AV_LOG_DEBUG, "Quantisation: %"PRIu16"\n", data);
  413. } else if (tag == PrescaleShift) {
  414. s->prescale_shift[0] = (data >> 0) & 0x7;
  415. s->prescale_shift[1] = (data >> 3) & 0x7;
  416. s->prescale_shift[2] = (data >> 6) & 0x7;
  417. av_log(avctx, AV_LOG_DEBUG, "Prescale shift (VC-5): %x\n", data);
  418. } else if (tag == LowpassWidth) {
  419. av_log(avctx, AV_LOG_DEBUG, "Lowpass width %"PRIu16"\n", data);
  420. if (data < 3 || data > s->plane[s->channel_num].band[0][0].a_width) {
  421. av_log(avctx, AV_LOG_ERROR, "Invalid lowpass width\n");
  422. ret = AVERROR(EINVAL);
  423. break;
  424. }
  425. s->plane[s->channel_num].band[0][0].width = data;
  426. s->plane[s->channel_num].band[0][0].stride = data;
  427. } else if (tag == LowpassHeight) {
  428. av_log(avctx, AV_LOG_DEBUG, "Lowpass height %"PRIu16"\n", data);
  429. if (data < 3 || data > s->plane[s->channel_num].band[0][0].a_height) {
  430. av_log(avctx, AV_LOG_ERROR, "Invalid lowpass height\n");
  431. ret = AVERROR(EINVAL);
  432. break;
  433. }
  434. s->plane[s->channel_num].band[0][0].height = data;
  435. } else if (tag == SampleType)
  436. av_log(avctx, AV_LOG_DEBUG, "Sample type? %"PRIu16"\n", data);
  437. else if (tag == TransformType) {
  438. if (data != 0) {
  439. avpriv_report_missing_feature(avctx, "Transform type of %"PRIu16, data);
  440. ret = AVERROR_PATCHWELCOME;
  441. break;
  442. }
  443. av_log(avctx, AV_LOG_DEBUG, "Transform-type? %"PRIu16"\n", data);
  444. } else if (abstag >= 0x4000 && abstag <= 0x40ff) {
  445. if (abstag == 0x4001)
  446. s->peak.level = 0;
  447. av_log(avctx, AV_LOG_DEBUG, "Small chunk length %d %s\n", data * 4, tag < 0 ? "optional" : "required");
  448. bytestream2_skipu(&gb, data * 4);
  449. } else if (tag == 23) {
  450. av_log(avctx, AV_LOG_DEBUG, "Skip frame\n");
  451. avpriv_report_missing_feature(avctx, "Skip frame");
  452. ret = AVERROR_PATCHWELCOME;
  453. break;
  454. } else if (tag == SampleIndexTable) {
  455. av_log(avctx, AV_LOG_DEBUG, "tag=2 header - skipping %i tag/value pairs\n", data);
  456. if (data > bytestream2_get_bytes_left(&gb) / 4) {
  457. av_log(avctx, AV_LOG_ERROR, "too many tag/value pairs (%d)\n", data);
  458. ret = AVERROR_INVALIDDATA;
  459. break;
  460. }
  461. for (i = 0; i < data; i++) {
  462. uint16_t tag2 = bytestream2_get_be16(&gb);
  463. uint16_t val2 = bytestream2_get_be16(&gb);
  464. av_log(avctx, AV_LOG_DEBUG, "Tag/Value = %x %x\n", tag2, val2);
  465. }
  466. } else if (tag == HighpassWidth) {
  467. av_log(avctx, AV_LOG_DEBUG, "Highpass width %i channel %i level %i subband %i\n", data, s->channel_num, s->level, s->subband_num);
  468. if (data < 3) {
  469. av_log(avctx, AV_LOG_ERROR, "Invalid highpass width\n");
  470. ret = AVERROR(EINVAL);
  471. break;
  472. }
  473. s->plane[s->channel_num].band[s->level][s->subband_num].width = data;
  474. s->plane[s->channel_num].band[s->level][s->subband_num].stride = FFALIGN(data, 8);
  475. } else if (tag == HighpassHeight) {
  476. av_log(avctx, AV_LOG_DEBUG, "Highpass height %i\n", data);
  477. if (data < 3) {
  478. av_log(avctx, AV_LOG_ERROR, "Invalid highpass height\n");
  479. ret = AVERROR(EINVAL);
  480. break;
  481. }
  482. s->plane[s->channel_num].band[s->level][s->subband_num].height = data;
  483. } else if (tag == BandWidth) {
  484. av_log(avctx, AV_LOG_DEBUG, "Highpass width2 %i\n", data);
  485. if (data < 3) {
  486. av_log(avctx, AV_LOG_ERROR, "Invalid highpass width2\n");
  487. ret = AVERROR(EINVAL);
  488. break;
  489. }
  490. s->plane[s->channel_num].band[s->level][s->subband_num].width = data;
  491. s->plane[s->channel_num].band[s->level][s->subband_num].stride = FFALIGN(data, 8);
  492. } else if (tag == BandHeight) {
  493. av_log(avctx, AV_LOG_DEBUG, "Highpass height2 %i\n", data);
  494. if (data < 3) {
  495. av_log(avctx, AV_LOG_ERROR, "Invalid highpass height2\n");
  496. ret = AVERROR(EINVAL);
  497. break;
  498. }
  499. s->plane[s->channel_num].band[s->level][s->subband_num].height = data;
  500. } else if (tag == InputFormat) {
  501. av_log(avctx, AV_LOG_DEBUG, "Input format %i\n", data);
  502. } else if (tag == BandCodingFlags) {
  503. s->codebook = data & 0xf;
  504. s->difference_coding = (data >> 4) & 1;
  505. av_log(avctx, AV_LOG_DEBUG, "Other codebook? %i\n", s->codebook);
  506. } else if (tag == Precision) {
  507. av_log(avctx, AV_LOG_DEBUG, "Precision %i\n", data);
  508. if (!(data == 10 || data == 12)) {
  509. av_log(avctx, AV_LOG_ERROR, "Invalid bits per channel\n");
  510. ret = AVERROR(EINVAL);
  511. break;
  512. }
  513. avctx->bits_per_raw_sample = s->bpc = data;
  514. } else if (tag == EncodedFormat) {
  515. av_log(avctx, AV_LOG_DEBUG, "Sample format? %i\n", data);
  516. if (data == 1) {
  517. s->coded_format = AV_PIX_FMT_YUV422P10;
  518. } else if (data == 2) {
  519. s->coded_format = AV_PIX_FMT_BAYER_RGGB16;
  520. } else if (data == 3) {
  521. s->coded_format = AV_PIX_FMT_GBRP12;
  522. } else if (data == 4) {
  523. s->coded_format = AV_PIX_FMT_GBRAP12;
  524. } else {
  525. avpriv_report_missing_feature(avctx, "Sample format of %"PRIu16, data);
  526. ret = AVERROR_PATCHWELCOME;
  527. break;
  528. }
  529. planes = data == 2 ? 4 : av_pix_fmt_count_planes(s->coded_format);
  530. } else if (tag == -85) {
  531. av_log(avctx, AV_LOG_DEBUG, "Cropped height %"PRIu16"\n", data);
  532. s->cropped_height = data;
  533. } else if (tag == -75) {
  534. s->peak.offset &= ~0xffff;
  535. s->peak.offset |= (data & 0xffff);
  536. s->peak.base = gb;
  537. s->peak.level = 0;
  538. } else if (tag == -76) {
  539. s->peak.offset &= 0xffff;
  540. s->peak.offset |= (data & 0xffffU)<<16;
  541. s->peak.base = gb;
  542. s->peak.level = 0;
  543. } else if (tag == -74 && s->peak.offset) {
  544. s->peak.level = data;
  545. bytestream2_seek(&s->peak.base, s->peak.offset - 4, SEEK_CUR);
  546. } else
  547. av_log(avctx, AV_LOG_DEBUG, "Unknown tag %i data %x\n", tag, data);
  548. /* Some kind of end of header tag */
  549. if (tag == BitstreamMarker && data == 0x1a4a && s->coded_width && s->coded_height &&
  550. s->coded_format != AV_PIX_FMT_NONE) {
  551. if (s->a_width != s->coded_width || s->a_height != s->coded_height ||
  552. s->a_format != s->coded_format) {
  553. free_buffers(s);
  554. if ((ret = alloc_buffers(avctx)) < 0) {
  555. free_buffers(s);
  556. return ret;
  557. }
  558. }
  559. ret = ff_set_dimensions(avctx, s->coded_width, s->coded_height);
  560. if (ret < 0)
  561. return ret;
  562. if (s->cropped_height) {
  563. unsigned height = s->cropped_height << (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16);
  564. if (avctx->height < height)
  565. return AVERROR_INVALIDDATA;
  566. avctx->height = height;
  567. }
  568. frame.f->width =
  569. frame.f->height = 0;
  570. if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
  571. return ret;
  572. s->coded_width = 0;
  573. s->coded_height = 0;
  574. s->coded_format = AV_PIX_FMT_NONE;
  575. got_buffer = 1;
  576. }
  577. coeff_data = s->plane[s->channel_num].subband[s->subband_num_actual];
  578. /* Lowpass coefficients */
  579. if (tag == BitstreamMarker && data == 0xf0f && s->a_width && s->a_height) {
  580. int lowpass_height = s->plane[s->channel_num].band[0][0].height;
  581. int lowpass_width = s->plane[s->channel_num].band[0][0].width;
  582. int lowpass_a_height = s->plane[s->channel_num].band[0][0].a_height;
  583. int lowpass_a_width = s->plane[s->channel_num].band[0][0].a_width;
  584. if (!got_buffer) {
  585. av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
  586. ret = AVERROR(EINVAL);
  587. goto end;
  588. }
  589. if (lowpass_height > lowpass_a_height || lowpass_width > lowpass_a_width ||
  590. lowpass_a_width * lowpass_a_height * sizeof(int16_t) > bytestream2_get_bytes_left(&gb)) {
  591. av_log(avctx, AV_LOG_ERROR, "Too many lowpass coefficients\n");
  592. ret = AVERROR(EINVAL);
  593. goto end;
  594. }
  595. av_log(avctx, AV_LOG_DEBUG, "Start of lowpass coeffs component %d height:%d, width:%d\n", s->channel_num, lowpass_height, lowpass_width);
  596. for (i = 0; i < lowpass_height; i++) {
  597. for (j = 0; j < lowpass_width; j++)
  598. coeff_data[j] = bytestream2_get_be16u(&gb);
  599. coeff_data += lowpass_width;
  600. }
  601. /* Align to mod-4 position to continue reading tags */
  602. bytestream2_seek(&gb, bytestream2_tell(&gb) & 3, SEEK_CUR);
  603. /* Copy last line of coefficients if odd height */
  604. if (lowpass_height & 1) {
  605. memcpy(&coeff_data[lowpass_height * lowpass_width],
  606. &coeff_data[(lowpass_height - 1) * lowpass_width],
  607. lowpass_width * sizeof(*coeff_data));
  608. }
  609. av_log(avctx, AV_LOG_DEBUG, "Lowpass coefficients %d\n", lowpass_width * lowpass_height);
  610. }
  611. if (tag == BandHeader && s->subband_num_actual != 255 && s->a_width && s->a_height) {
  612. int highpass_height = s->plane[s->channel_num].band[s->level][s->subband_num].height;
  613. int highpass_width = s->plane[s->channel_num].band[s->level][s->subband_num].width;
  614. int highpass_a_width = s->plane[s->channel_num].band[s->level][s->subband_num].a_width;
  615. int highpass_a_height = s->plane[s->channel_num].band[s->level][s->subband_num].a_height;
  616. int highpass_stride = s->plane[s->channel_num].band[s->level][s->subband_num].stride;
  617. int expected;
  618. int a_expected = highpass_a_height * highpass_a_width;
  619. int level, run, coeff;
  620. int count = 0, bytes;
  621. if (!got_buffer) {
  622. av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
  623. ret = AVERROR(EINVAL);
  624. goto end;
  625. }
  626. if (highpass_height > highpass_a_height || highpass_width > highpass_a_width || a_expected < highpass_height * (uint64_t)highpass_stride) {
  627. av_log(avctx, AV_LOG_ERROR, "Too many highpass coefficients\n");
  628. ret = AVERROR(EINVAL);
  629. goto end;
  630. }
  631. expected = highpass_height * highpass_stride;
  632. av_log(avctx, AV_LOG_DEBUG, "Start subband coeffs plane %i level %i codebook %i expected %i\n", s->channel_num, s->level, s->codebook, expected);
  633. init_get_bits(&s->gb, gb.buffer, bytestream2_get_bytes_left(&gb) * 8);
  634. {
  635. OPEN_READER(re, &s->gb);
  636. if (!s->codebook) {
  637. while (1) {
  638. UPDATE_CACHE(re, &s->gb);
  639. GET_RL_VLC(level, run, re, &s->gb, s->table_9_rl_vlc,
  640. VLC_BITS, 3, 1);
  641. /* escape */
  642. if (level == 64)
  643. break;
  644. count += run;
  645. if (count > expected)
  646. break;
  647. coeff = dequant_and_decompand(s, level, s->quantisation, 0);
  648. for (i = 0; i < run; i++)
  649. *coeff_data++ = coeff;
  650. }
  651. } else {
  652. while (1) {
  653. UPDATE_CACHE(re, &s->gb);
  654. GET_RL_VLC(level, run, re, &s->gb, s->table_18_rl_vlc,
  655. VLC_BITS, 3, 1);
  656. /* escape */
  657. if (level == 255 && run == 2)
  658. break;
  659. count += run;
  660. if (count > expected)
  661. break;
  662. coeff = dequant_and_decompand(s, level, s->quantisation, s->codebook);
  663. for (i = 0; i < run; i++)
  664. *coeff_data++ = coeff;
  665. }
  666. }
  667. CLOSE_READER(re, &s->gb);
  668. }
  669. if (count > expected) {
  670. av_log(avctx, AV_LOG_ERROR, "Escape codeword not found, probably corrupt data\n");
  671. ret = AVERROR(EINVAL);
  672. goto end;
  673. }
  674. if (s->peak.level)
  675. peak_table(coeff_data - count, &s->peak, count);
  676. if (s->difference_coding)
  677. difference_coding(s->plane[s->channel_num].subband[s->subband_num_actual], highpass_width, highpass_height);
  678. bytes = FFALIGN(AV_CEIL_RSHIFT(get_bits_count(&s->gb), 3), 4);
  679. if (bytes > bytestream2_get_bytes_left(&gb)) {
  680. av_log(avctx, AV_LOG_ERROR, "Bitstream overread error\n");
  681. ret = AVERROR(EINVAL);
  682. goto end;
  683. } else
  684. bytestream2_seek(&gb, bytes, SEEK_CUR);
  685. av_log(avctx, AV_LOG_DEBUG, "End subband coeffs %i extra %i\n", count, count - expected);
  686. s->codebook = 0;
  687. /* Copy last line of coefficients if odd height */
  688. if (highpass_height & 1) {
  689. memcpy(&coeff_data[highpass_height * highpass_stride],
  690. &coeff_data[(highpass_height - 1) * highpass_stride],
  691. highpass_stride * sizeof(*coeff_data));
  692. }
  693. }
  694. }
  695. if (!s->a_width || !s->a_height || s->a_format == AV_PIX_FMT_NONE ||
  696. s->coded_width || s->coded_height || s->coded_format != AV_PIX_FMT_NONE) {
  697. av_log(avctx, AV_LOG_ERROR, "Invalid dimensions\n");
  698. ret = AVERROR(EINVAL);
  699. goto end;
  700. }
  701. if (!got_buffer) {
  702. av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
  703. ret = AVERROR(EINVAL);
  704. goto end;
  705. }
  706. planes = av_pix_fmt_count_planes(avctx->pix_fmt);
  707. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  708. if (!s->progressive)
  709. return AVERROR_INVALIDDATA;
  710. planes = 4;
  711. }
  712. for (plane = 0; plane < planes && !ret; plane++) {
  713. /* level 1 */
  714. int lowpass_height = s->plane[plane].band[0][0].height;
  715. int lowpass_width = s->plane[plane].band[0][0].width;
  716. int highpass_stride = s->plane[plane].band[0][1].stride;
  717. int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
  718. ptrdiff_t dst_linesize;
  719. int16_t *low, *high, *output, *dst;
  720. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  721. act_plane = 0;
  722. dst_linesize = pic->linesize[act_plane];
  723. } else {
  724. dst_linesize = pic->linesize[act_plane] / 2;
  725. }
  726. if (lowpass_height > s->plane[plane].band[0][0].a_height || lowpass_width > s->plane[plane].band[0][0].a_width ||
  727. !highpass_stride || s->plane[plane].band[0][1].width > s->plane[plane].band[0][1].a_width) {
  728. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  729. ret = AVERROR(EINVAL);
  730. goto end;
  731. }
  732. av_log(avctx, AV_LOG_DEBUG, "Decoding level 1 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  733. low = s->plane[plane].subband[0];
  734. high = s->plane[plane].subband[2];
  735. output = s->plane[plane].l_h[0];
  736. for (i = 0; i < lowpass_width; i++) {
  737. vert_filter(output, lowpass_width, low, lowpass_width, high, highpass_stride, lowpass_height);
  738. low++;
  739. high++;
  740. output++;
  741. }
  742. low = s->plane[plane].subband[1];
  743. high = s->plane[plane].subband[3];
  744. output = s->plane[plane].l_h[1];
  745. for (i = 0; i < lowpass_width; i++) {
  746. // note the stride of "low" is highpass_stride
  747. vert_filter(output, lowpass_width, low, highpass_stride, high, highpass_stride, lowpass_height);
  748. low++;
  749. high++;
  750. output++;
  751. }
  752. low = s->plane[plane].l_h[0];
  753. high = s->plane[plane].l_h[1];
  754. output = s->plane[plane].subband[0];
  755. for (i = 0; i < lowpass_height * 2; i++) {
  756. horiz_filter(output, low, high, lowpass_width);
  757. low += lowpass_width;
  758. high += lowpass_width;
  759. output += lowpass_width * 2;
  760. }
  761. if (s->bpc == 12) {
  762. output = s->plane[plane].subband[0];
  763. for (i = 0; i < lowpass_height * 2; i++) {
  764. for (j = 0; j < lowpass_width * 2; j++)
  765. output[j] *= 4;
  766. output += lowpass_width * 2;
  767. }
  768. }
  769. /* level 2 */
  770. lowpass_height = s->plane[plane].band[1][1].height;
  771. lowpass_width = s->plane[plane].band[1][1].width;
  772. highpass_stride = s->plane[plane].band[1][1].stride;
  773. if (lowpass_height > s->plane[plane].band[1][1].a_height || lowpass_width > s->plane[plane].band[1][1].a_width ||
  774. !highpass_stride || s->plane[plane].band[1][1].width > s->plane[plane].band[1][1].a_width) {
  775. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  776. ret = AVERROR(EINVAL);
  777. goto end;
  778. }
  779. av_log(avctx, AV_LOG_DEBUG, "Level 2 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  780. low = s->plane[plane].subband[0];
  781. high = s->plane[plane].subband[5];
  782. output = s->plane[plane].l_h[3];
  783. for (i = 0; i < lowpass_width; i++) {
  784. vert_filter(output, lowpass_width, low, lowpass_width, high, highpass_stride, lowpass_height);
  785. low++;
  786. high++;
  787. output++;
  788. }
  789. low = s->plane[plane].subband[4];
  790. high = s->plane[plane].subband[6];
  791. output = s->plane[plane].l_h[4];
  792. for (i = 0; i < lowpass_width; i++) {
  793. vert_filter(output, lowpass_width, low, highpass_stride, high, highpass_stride, lowpass_height);
  794. low++;
  795. high++;
  796. output++;
  797. }
  798. low = s->plane[plane].l_h[3];
  799. high = s->plane[plane].l_h[4];
  800. output = s->plane[plane].subband[0];
  801. for (i = 0; i < lowpass_height * 2; i++) {
  802. horiz_filter(output, low, high, lowpass_width);
  803. low += lowpass_width;
  804. high += lowpass_width;
  805. output += lowpass_width * 2;
  806. }
  807. output = s->plane[plane].subband[0];
  808. for (i = 0; i < lowpass_height * 2; i++) {
  809. for (j = 0; j < lowpass_width * 2; j++)
  810. output[j] *= 4;
  811. output += lowpass_width * 2;
  812. }
  813. /* level 3 */
  814. lowpass_height = s->plane[plane].band[2][1].height;
  815. lowpass_width = s->plane[plane].band[2][1].width;
  816. highpass_stride = s->plane[plane].band[2][1].stride;
  817. if (lowpass_height > s->plane[plane].band[2][1].a_height || lowpass_width > s->plane[plane].band[2][1].a_width ||
  818. !highpass_stride || s->plane[plane].band[2][1].width > s->plane[plane].band[2][1].a_width) {
  819. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  820. ret = AVERROR(EINVAL);
  821. goto end;
  822. }
  823. av_log(avctx, AV_LOG_DEBUG, "Level 3 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  824. if (s->progressive) {
  825. low = s->plane[plane].subband[0];
  826. high = s->plane[plane].subband[8];
  827. output = s->plane[plane].l_h[6];
  828. for (i = 0; i < lowpass_width; i++) {
  829. vert_filter(output, lowpass_width, low, lowpass_width, high, highpass_stride, lowpass_height);
  830. low++;
  831. high++;
  832. output++;
  833. }
  834. low = s->plane[plane].subband[7];
  835. high = s->plane[plane].subband[9];
  836. output = s->plane[plane].l_h[7];
  837. for (i = 0; i < lowpass_width; i++) {
  838. vert_filter(output, lowpass_width, low, highpass_stride, high, highpass_stride, lowpass_height);
  839. low++;
  840. high++;
  841. output++;
  842. }
  843. dst = (int16_t *)pic->data[act_plane];
  844. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  845. if (plane & 1)
  846. dst++;
  847. if (plane > 1)
  848. dst += pic->linesize[act_plane] >> 1;
  849. }
  850. low = s->plane[plane].l_h[6];
  851. high = s->plane[plane].l_h[7];
  852. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16 &&
  853. (lowpass_height * 2 > avctx->coded_height / 2 ||
  854. lowpass_width * 2 > avctx->coded_width / 2 )
  855. ) {
  856. ret = AVERROR_INVALIDDATA;
  857. goto end;
  858. }
  859. for (i = 0; i < lowpass_height * 2; i++) {
  860. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16)
  861. horiz_filter_clip_bayer(dst, low, high, lowpass_width, s->bpc);
  862. else
  863. horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
  864. if (avctx->pix_fmt == AV_PIX_FMT_GBRAP12 && act_plane == 3)
  865. process_alpha(dst, lowpass_width * 2);
  866. low += lowpass_width;
  867. high += lowpass_width;
  868. dst += dst_linesize;
  869. }
  870. } else {
  871. av_log(avctx, AV_LOG_DEBUG, "interlaced frame ? %d", pic->interlaced_frame);
  872. pic->interlaced_frame = 1;
  873. low = s->plane[plane].subband[0];
  874. high = s->plane[plane].subband[7];
  875. output = s->plane[plane].l_h[6];
  876. for (i = 0; i < lowpass_height; i++) {
  877. horiz_filter(output, low, high, lowpass_width);
  878. low += lowpass_width;
  879. high += lowpass_width;
  880. output += lowpass_width * 2;
  881. }
  882. low = s->plane[plane].subband[8];
  883. high = s->plane[plane].subband[9];
  884. output = s->plane[plane].l_h[7];
  885. for (i = 0; i < lowpass_height; i++) {
  886. horiz_filter(output, low, high, lowpass_width);
  887. low += lowpass_width;
  888. high += lowpass_width;
  889. output += lowpass_width * 2;
  890. }
  891. dst = (int16_t *)pic->data[act_plane];
  892. low = s->plane[plane].l_h[6];
  893. high = s->plane[plane].l_h[7];
  894. for (i = 0; i < lowpass_height; i++) {
  895. interlaced_vertical_filter(dst, low, high, lowpass_width * 2, pic->linesize[act_plane]/2, act_plane);
  896. low += lowpass_width * 2;
  897. high += lowpass_width * 2;
  898. dst += pic->linesize[act_plane];
  899. }
  900. }
  901. }
  902. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16)
  903. process_bayer(pic);
  904. end:
  905. if (ret < 0)
  906. return ret;
  907. *got_frame = 1;
  908. return avpkt->size;
  909. }
  910. static av_cold int cfhd_close(AVCodecContext *avctx)
  911. {
  912. CFHDContext *s = avctx->priv_data;
  913. free_buffers(s);
  914. ff_free_vlc(&s->vlc_9);
  915. ff_free_vlc(&s->vlc_18);
  916. return 0;
  917. }
  918. AVCodec ff_cfhd_decoder = {
  919. .name = "cfhd",
  920. .long_name = NULL_IF_CONFIG_SMALL("Cineform HD"),
  921. .type = AVMEDIA_TYPE_VIDEO,
  922. .id = AV_CODEC_ID_CFHD,
  923. .priv_data_size = sizeof(CFHDContext),
  924. .init = cfhd_init,
  925. .close = cfhd_close,
  926. .decode = cfhd_decode,
  927. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS,
  928. .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
  929. };