You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1070 lines
40KB

  1. /*
  2. * Copyright (c) 2015-2016 Kieran Kunhya <kieran@kunhya.com>
  3. *
  4. * This file is part of FFmpeg.
  5. *
  6. * FFmpeg is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public
  8. * License as published by the Free Software Foundation; either
  9. * version 2.1 of the License, or (at your option) any later version.
  10. *
  11. * FFmpeg is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  14. * Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public
  17. * License along with FFmpeg; if not, write to the Free Software
  18. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20. /**
  21. * @file
  22. * Cineform HD video decoder
  23. */
  24. #include "libavutil/attributes.h"
  25. #include "libavutil/buffer.h"
  26. #include "libavutil/common.h"
  27. #include "libavutil/imgutils.h"
  28. #include "libavutil/intreadwrite.h"
  29. #include "libavutil/opt.h"
  30. #include "avcodec.h"
  31. #include "bytestream.h"
  32. #include "get_bits.h"
  33. #include "internal.h"
  34. #include "thread.h"
  35. #include "cfhd.h"
  36. #define ALPHA_COMPAND_DC_OFFSET 256
  37. #define ALPHA_COMPAND_GAIN 9400
  38. enum CFHDParam {
  39. SampleType = 1,
  40. SampleIndexTable = 2,
  41. BitstreamMarker = 4,
  42. TransformType = 10,
  43. ChannelCount = 12,
  44. SubbandCount = 14,
  45. ImageWidth = 20,
  46. ImageHeight = 21,
  47. LowpassWidth = 27,
  48. LowpassHeight = 28,
  49. LowpassPrecision = 35,
  50. HighpassWidth = 41,
  51. HighpassHeight = 42,
  52. SubbandNumber = 48,
  53. Quantization = 53,
  54. BandHeader = 55,
  55. ChannelNumber = 62,
  56. SampleFlags = 68,
  57. EncodedFormat = 84,
  58. BitsPerComponent = 101,
  59. ChannelWidth = 104,
  60. ChannelHeight = 105,
  61. PrescaleShift = 109,
  62. };
  63. static av_cold int cfhd_init(AVCodecContext *avctx)
  64. {
  65. CFHDContext *s = avctx->priv_data;
  66. avctx->bits_per_raw_sample = 10;
  67. s->avctx = avctx;
  68. return ff_cfhd_init_vlcs(s);
  69. }
  70. static void init_plane_defaults(CFHDContext *s)
  71. {
  72. s->subband_num = 0;
  73. s->level = 0;
  74. s->subband_num_actual = 0;
  75. }
  76. static void init_peak_table_defaults(CFHDContext *s)
  77. {
  78. s->peak.level = 0;
  79. s->peak.offset = 0;
  80. memset(&s->peak.base, 0, sizeof(s->peak.base));
  81. }
  82. static void init_frame_defaults(CFHDContext *s)
  83. {
  84. s->coded_width = 0;
  85. s->coded_height = 0;
  86. s->cropped_height = 0;
  87. s->bpc = 10;
  88. s->channel_cnt = 4;
  89. s->subband_cnt = SUBBAND_COUNT;
  90. s->channel_num = 0;
  91. s->lowpass_precision = 16;
  92. s->quantisation = 1;
  93. s->wavelet_depth = 3;
  94. s->pshift = 1;
  95. s->codebook = 0;
  96. s->difference_coding = 0;
  97. s->progressive = 0;
  98. init_plane_defaults(s);
  99. init_peak_table_defaults(s);
  100. }
  101. /* TODO: merge with VLC tables or use LUT */
  102. static inline int dequant_and_decompand(int level, int quantisation, int codebook)
  103. {
  104. if (codebook == 0 || codebook == 1) {
  105. int64_t abslevel = abs(level);
  106. if (level < 264)
  107. return (abslevel + ((768 * abslevel * abslevel * abslevel) / (255 * 255 * 255))) *
  108. FFSIGN(level) * quantisation;
  109. else
  110. return level * quantisation;
  111. } else
  112. return level * quantisation;
  113. }
  114. static inline void difference_coding(int16_t *band, int width, int height)
  115. {
  116. int i,j;
  117. for (i = 0; i < height; i++) {
  118. for (j = 1; j < width; j++) {
  119. band[j] += band[j-1];
  120. }
  121. band += width;
  122. }
  123. }
  124. static inline void peak_table(int16_t *band, Peak *peak, int length)
  125. {
  126. int i;
  127. for (i = 0; i < length; i++)
  128. if (abs(band[i]) > peak->level)
  129. band[i] = bytestream2_get_le16(&peak->base);
  130. }
  131. static inline void process_alpha(int16_t *alpha, int width)
  132. {
  133. int i, channel;
  134. for (i = 0; i < width; i++) {
  135. channel = alpha[i];
  136. channel -= ALPHA_COMPAND_DC_OFFSET;
  137. channel <<= 3;
  138. channel *= ALPHA_COMPAND_GAIN;
  139. channel >>= 16;
  140. channel = av_clip_uintp2(channel, 12);
  141. alpha[i] = channel;
  142. }
  143. }
  144. static inline void process_bayer(AVFrame *frame)
  145. {
  146. const int linesize = frame->linesize[0];
  147. uint16_t *r = (uint16_t *)frame->data[0];
  148. uint16_t *g1 = (uint16_t *)(frame->data[0] + 2);
  149. uint16_t *g2 = (uint16_t *)(frame->data[0] + frame->linesize[0]);
  150. uint16_t *b = (uint16_t *)(frame->data[0] + frame->linesize[0] + 2);
  151. const int mid = 2048;
  152. for (int y = 0; y < frame->height >> 1; y++) {
  153. for (int x = 0; x < frame->width; x += 2) {
  154. int R, G1, G2, B;
  155. int g, rg, bg, gd;
  156. g = r[x];
  157. rg = g1[x];
  158. bg = g2[x];
  159. gd = b[x];
  160. gd -= mid;
  161. R = (rg - mid) * 2 + g;
  162. G1 = g + gd;
  163. G2 = g - gd;
  164. B = (bg - mid) * 2 + g;
  165. R = av_clip_uintp2(R * 16, 16);
  166. G1 = av_clip_uintp2(G1 * 16, 16);
  167. G2 = av_clip_uintp2(G2 * 16, 16);
  168. B = av_clip_uintp2(B * 16, 16);
  169. r[x] = R;
  170. g1[x] = G1;
  171. g2[x] = G2;
  172. b[x] = B;
  173. }
  174. r += linesize;
  175. g1 += linesize;
  176. g2 += linesize;
  177. b += linesize;
  178. }
  179. }
  180. static inline void filter(int16_t *output, ptrdiff_t out_stride,
  181. int16_t *low, ptrdiff_t low_stride,
  182. int16_t *high, ptrdiff_t high_stride,
  183. int len, int clip)
  184. {
  185. int16_t tmp;
  186. int i;
  187. for (i = 0; i < len; i++) {
  188. if (i == 0) {
  189. tmp = (11*low[0*low_stride] - 4*low[1*low_stride] + low[2*low_stride] + 4) >> 3;
  190. output[(2*i+0)*out_stride] = (tmp + high[0*high_stride]) >> 1;
  191. if (clip)
  192. output[(2*i+0)*out_stride] = av_clip_uintp2_c(output[(2*i+0)*out_stride], clip);
  193. tmp = ( 5*low[0*low_stride] + 4*low[1*low_stride] - low[2*low_stride] + 4) >> 3;
  194. output[(2*i+1)*out_stride] = (tmp - high[0*high_stride]) >> 1;
  195. if (clip)
  196. output[(2*i+1)*out_stride] = av_clip_uintp2_c(output[(2*i+1)*out_stride], clip);
  197. } else if (i == len-1) {
  198. tmp = ( 5*low[i*low_stride] + 4*low[(i-1)*low_stride] - low[(i-2)*low_stride] + 4) >> 3;
  199. output[(2*i+0)*out_stride] = (tmp + high[i*high_stride]) >> 1;
  200. if (clip)
  201. output[(2*i+0)*out_stride] = av_clip_uintp2_c(output[(2*i+0)*out_stride], clip);
  202. tmp = (11*low[i*low_stride] - 4*low[(i-1)*low_stride] + low[(i-2)*low_stride] + 4) >> 3;
  203. output[(2*i+1)*out_stride] = (tmp - high[i*high_stride]) >> 1;
  204. if (clip)
  205. output[(2*i+1)*out_stride] = av_clip_uintp2_c(output[(2*i+1)*out_stride], clip);
  206. } else {
  207. tmp = (low[(i-1)*low_stride] - low[(i+1)*low_stride] + 4) >> 3;
  208. output[(2*i+0)*out_stride] = (tmp + low[i*low_stride] + high[i*high_stride]) >> 1;
  209. if (clip)
  210. output[(2*i+0)*out_stride] = av_clip_uintp2_c(output[(2*i+0)*out_stride], clip);
  211. tmp = (low[(i+1)*low_stride] - low[(i-1)*low_stride] + 4) >> 3;
  212. output[(2*i+1)*out_stride] = (tmp + low[i*low_stride] - high[i*high_stride]) >> 1;
  213. if (clip)
  214. output[(2*i+1)*out_stride] = av_clip_uintp2_c(output[(2*i+1)*out_stride], clip);
  215. }
  216. }
  217. }
  218. static inline void interlaced_vertical_filter(int16_t *output, int16_t *low, int16_t *high,
  219. int width, int linesize, int plane)
  220. {
  221. int i;
  222. int16_t even, odd;
  223. for (i = 0; i < width; i++) {
  224. even = (low[i] - high[i])/2;
  225. odd = (low[i] + high[i])/2;
  226. output[i] = av_clip_uintp2(even, 10);
  227. output[i + linesize] = av_clip_uintp2(odd, 10);
  228. }
  229. }
  230. static void horiz_filter(int16_t *output, int16_t *low, int16_t *high,
  231. int width)
  232. {
  233. filter(output, 1, low, 1, high, 1, width, 0);
  234. }
  235. static void horiz_filter_clip(int16_t *output, int16_t *low, int16_t *high,
  236. int width, int clip)
  237. {
  238. filter(output, 1, low, 1, high, 1, width, clip);
  239. }
  240. static void horiz_filter_clip_bayer(int16_t *output, int16_t *low, int16_t *high,
  241. int width, int clip)
  242. {
  243. filter(output, 2, low, 1, high, 1, width, clip);
  244. }
  245. static void vert_filter(int16_t *output, ptrdiff_t out_stride,
  246. int16_t *low, ptrdiff_t low_stride,
  247. int16_t *high, ptrdiff_t high_stride, int len)
  248. {
  249. filter(output, out_stride, low, low_stride, high, high_stride, len, 0);
  250. }
  251. static void free_buffers(CFHDContext *s)
  252. {
  253. int i, j;
  254. for (i = 0; i < FF_ARRAY_ELEMS(s->plane); i++) {
  255. av_freep(&s->plane[i].idwt_buf);
  256. av_freep(&s->plane[i].idwt_tmp);
  257. for (j = 0; j < 9; j++)
  258. s->plane[i].subband[j] = NULL;
  259. for (j = 0; j < 8; j++)
  260. s->plane[i].l_h[j] = NULL;
  261. }
  262. s->a_height = 0;
  263. s->a_width = 0;
  264. }
  265. static int alloc_buffers(AVCodecContext *avctx)
  266. {
  267. CFHDContext *s = avctx->priv_data;
  268. int i, j, ret, planes;
  269. int chroma_x_shift, chroma_y_shift;
  270. unsigned k;
  271. if (s->coded_format == AV_PIX_FMT_BAYER_RGGB16) {
  272. s->coded_width *= 2;
  273. s->coded_height *= 2;
  274. }
  275. if ((ret = ff_set_dimensions(avctx, s->coded_width, s->coded_height)) < 0)
  276. return ret;
  277. avctx->pix_fmt = s->coded_format;
  278. if ((ret = av_pix_fmt_get_chroma_sub_sample(s->coded_format,
  279. &chroma_x_shift,
  280. &chroma_y_shift)) < 0)
  281. return ret;
  282. planes = av_pix_fmt_count_planes(s->coded_format);
  283. if (s->coded_format == AV_PIX_FMT_BAYER_RGGB16) {
  284. planes = 4;
  285. chroma_x_shift = 1;
  286. chroma_y_shift = 1;
  287. }
  288. for (i = 0; i < planes; i++) {
  289. int w8, h8, w4, h4, w2, h2;
  290. int width = i ? avctx->width >> chroma_x_shift : avctx->width;
  291. int height = i ? avctx->height >> chroma_y_shift : avctx->height;
  292. ptrdiff_t stride = FFALIGN(width / 8, 8) * 8;
  293. if (chroma_y_shift)
  294. height = FFALIGN(height / 8, 2) * 8;
  295. s->plane[i].width = width;
  296. s->plane[i].height = height;
  297. s->plane[i].stride = stride;
  298. w8 = FFALIGN(s->plane[i].width / 8, 8);
  299. h8 = height / 8;
  300. w4 = w8 * 2;
  301. h4 = h8 * 2;
  302. w2 = w4 * 2;
  303. h2 = h4 * 2;
  304. s->plane[i].idwt_buf =
  305. av_mallocz_array(height * stride, sizeof(*s->plane[i].idwt_buf));
  306. s->plane[i].idwt_tmp =
  307. av_malloc_array(height * stride, sizeof(*s->plane[i].idwt_tmp));
  308. if (!s->plane[i].idwt_buf || !s->plane[i].idwt_tmp)
  309. return AVERROR(ENOMEM);
  310. s->plane[i].subband[0] = s->plane[i].idwt_buf;
  311. s->plane[i].subband[1] = s->plane[i].idwt_buf + 2 * w8 * h8;
  312. s->plane[i].subband[2] = s->plane[i].idwt_buf + 1 * w8 * h8;
  313. s->plane[i].subband[3] = s->plane[i].idwt_buf + 3 * w8 * h8;
  314. s->plane[i].subband[4] = s->plane[i].idwt_buf + 2 * w4 * h4;
  315. s->plane[i].subband[5] = s->plane[i].idwt_buf + 1 * w4 * h4;
  316. s->plane[i].subband[6] = s->plane[i].idwt_buf + 3 * w4 * h4;
  317. s->plane[i].subband[7] = s->plane[i].idwt_buf + 2 * w2 * h2;
  318. s->plane[i].subband[8] = s->plane[i].idwt_buf + 1 * w2 * h2;
  319. s->plane[i].subband[9] = s->plane[i].idwt_buf + 3 * w2 * h2;
  320. for (j = 0; j < DWT_LEVELS; j++) {
  321. for (k = 0; k < FF_ARRAY_ELEMS(s->plane[i].band[j]); k++) {
  322. s->plane[i].band[j][k].a_width = w8 << j;
  323. s->plane[i].band[j][k].a_height = h8 << j;
  324. }
  325. }
  326. /* ll2 and ll1 commented out because they are done in-place */
  327. s->plane[i].l_h[0] = s->plane[i].idwt_tmp;
  328. s->plane[i].l_h[1] = s->plane[i].idwt_tmp + 2 * w8 * h8;
  329. // s->plane[i].l_h[2] = ll2;
  330. s->plane[i].l_h[3] = s->plane[i].idwt_tmp;
  331. s->plane[i].l_h[4] = s->plane[i].idwt_tmp + 2 * w4 * h4;
  332. // s->plane[i].l_h[5] = ll1;
  333. s->plane[i].l_h[6] = s->plane[i].idwt_tmp;
  334. s->plane[i].l_h[7] = s->plane[i].idwt_tmp + 2 * w2 * h2;
  335. }
  336. s->a_height = s->coded_height;
  337. s->a_width = s->coded_width;
  338. s->a_format = s->coded_format;
  339. return 0;
  340. }
  341. static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
  342. AVPacket *avpkt)
  343. {
  344. CFHDContext *s = avctx->priv_data;
  345. GetByteContext gb;
  346. ThreadFrame frame = { .f = data };
  347. AVFrame *pic = data;
  348. int ret = 0, i, j, planes, plane, got_buffer = 0;
  349. int16_t *coeff_data;
  350. s->coded_format = AV_PIX_FMT_YUV422P10;
  351. init_frame_defaults(s);
  352. planes = av_pix_fmt_count_planes(s->coded_format);
  353. bytestream2_init(&gb, avpkt->data, avpkt->size);
  354. while (bytestream2_get_bytes_left(&gb) > 4) {
  355. /* Bit weird but implement the tag parsing as the spec says */
  356. uint16_t tagu = bytestream2_get_be16(&gb);
  357. int16_t tag = (int16_t)tagu;
  358. int8_t tag8 = (int8_t)(tagu >> 8);
  359. uint16_t abstag = abs(tag);
  360. int8_t abs_tag8 = abs(tag8);
  361. uint16_t data = bytestream2_get_be16(&gb);
  362. if (abs_tag8 >= 0x60 && abs_tag8 <= 0x6f) {
  363. av_log(avctx, AV_LOG_DEBUG, "large len %x\n", ((tagu & 0xff) << 16) | data);
  364. } else if (tag == SampleFlags) {
  365. av_log(avctx, AV_LOG_DEBUG, "Progressive?%"PRIu16"\n", data);
  366. s->progressive = data & 0x0001;
  367. } else if (tag == ImageWidth) {
  368. av_log(avctx, AV_LOG_DEBUG, "Width %"PRIu16"\n", data);
  369. s->coded_width = data;
  370. } else if (tag == ImageHeight) {
  371. av_log(avctx, AV_LOG_DEBUG, "Height %"PRIu16"\n", data);
  372. s->coded_height = data;
  373. } else if (tag == 101) {
  374. av_log(avctx, AV_LOG_DEBUG, "Bits per component: %"PRIu16"\n", data);
  375. if (data < 1 || data > 31) {
  376. av_log(avctx, AV_LOG_ERROR, "Bits per component %d is invalid\n", data);
  377. ret = AVERROR(EINVAL);
  378. break;
  379. }
  380. s->bpc = data;
  381. } else if (tag == ChannelCount) {
  382. av_log(avctx, AV_LOG_DEBUG, "Channel Count: %"PRIu16"\n", data);
  383. s->channel_cnt = data;
  384. if (data > 4) {
  385. av_log(avctx, AV_LOG_ERROR, "Channel Count of %"PRIu16" is unsupported\n", data);
  386. ret = AVERROR_PATCHWELCOME;
  387. break;
  388. }
  389. } else if (tag == SubbandCount) {
  390. av_log(avctx, AV_LOG_DEBUG, "Subband Count: %"PRIu16"\n", data);
  391. if (data != SUBBAND_COUNT) {
  392. av_log(avctx, AV_LOG_ERROR, "Subband Count of %"PRIu16" is unsupported\n", data);
  393. ret = AVERROR_PATCHWELCOME;
  394. break;
  395. }
  396. } else if (tag == ChannelNumber) {
  397. s->channel_num = data;
  398. av_log(avctx, AV_LOG_DEBUG, "Channel number %"PRIu16"\n", data);
  399. if (s->channel_num >= planes) {
  400. av_log(avctx, AV_LOG_ERROR, "Invalid channel number\n");
  401. ret = AVERROR(EINVAL);
  402. break;
  403. }
  404. init_plane_defaults(s);
  405. } else if (tag == SubbandNumber) {
  406. if (s->subband_num != 0 && data == 1) // hack
  407. s->level++;
  408. av_log(avctx, AV_LOG_DEBUG, "Subband number %"PRIu16"\n", data);
  409. s->subband_num = data;
  410. if (s->level >= DWT_LEVELS) {
  411. av_log(avctx, AV_LOG_ERROR, "Invalid level\n");
  412. ret = AVERROR(EINVAL);
  413. break;
  414. }
  415. if (s->subband_num > 3) {
  416. av_log(avctx, AV_LOG_ERROR, "Invalid subband number\n");
  417. ret = AVERROR(EINVAL);
  418. break;
  419. }
  420. } else if (tag == 51) {
  421. av_log(avctx, AV_LOG_DEBUG, "Subband number actual %"PRIu16"\n", data);
  422. s->subband_num_actual = data;
  423. if (s->subband_num_actual >= 10) {
  424. av_log(avctx, AV_LOG_ERROR, "Invalid subband number actual\n");
  425. ret = AVERROR(EINVAL);
  426. break;
  427. }
  428. } else if (tag == LowpassPrecision)
  429. av_log(avctx, AV_LOG_DEBUG, "Lowpass precision bits: %"PRIu16"\n", data);
  430. else if (tag == Quantization) {
  431. s->quantisation = data;
  432. av_log(avctx, AV_LOG_DEBUG, "Quantisation: %"PRIu16"\n", data);
  433. } else if (tag == PrescaleShift) {
  434. s->prescale_shift[0] = (data >> 0) & 0x7;
  435. s->prescale_shift[1] = (data >> 3) & 0x7;
  436. s->prescale_shift[2] = (data >> 6) & 0x7;
  437. av_log(avctx, AV_LOG_DEBUG, "Prescale shift (VC-5): %x\n", data);
  438. } else if (tag == LowpassWidth) {
  439. av_log(avctx, AV_LOG_DEBUG, "Lowpass width %"PRIu16"\n", data);
  440. if (data < 3 || data > s->plane[s->channel_num].band[0][0].a_width) {
  441. av_log(avctx, AV_LOG_ERROR, "Invalid lowpass width\n");
  442. ret = AVERROR(EINVAL);
  443. break;
  444. }
  445. s->plane[s->channel_num].band[0][0].width = data;
  446. s->plane[s->channel_num].band[0][0].stride = data;
  447. } else if (tag == LowpassHeight) {
  448. av_log(avctx, AV_LOG_DEBUG, "Lowpass height %"PRIu16"\n", data);
  449. if (data < 3 || data > s->plane[s->channel_num].band[0][0].a_height) {
  450. av_log(avctx, AV_LOG_ERROR, "Invalid lowpass height\n");
  451. ret = AVERROR(EINVAL);
  452. break;
  453. }
  454. s->plane[s->channel_num].band[0][0].height = data;
  455. } else if (tag == SampleType)
  456. av_log(avctx, AV_LOG_DEBUG, "Sample type? %"PRIu16"\n", data);
  457. else if (tag == TransformType) {
  458. if (data != 0) {
  459. avpriv_report_missing_feature(avctx, "Transform type of %"PRIu16, data);
  460. ret = AVERROR_PATCHWELCOME;
  461. break;
  462. }
  463. av_log(avctx, AV_LOG_DEBUG, "Transform-type? %"PRIu16"\n", data);
  464. } else if (abstag >= 0x4000 && abstag <= 0x40ff) {
  465. if (abstag == 0x4001)
  466. s->peak.level = 0;
  467. av_log(avctx, AV_LOG_DEBUG, "Small chunk length %d %s\n", data * 4, tag < 0 ? "optional" : "required");
  468. bytestream2_skipu(&gb, data * 4);
  469. } else if (tag == 23) {
  470. av_log(avctx, AV_LOG_DEBUG, "Skip frame\n");
  471. avpriv_report_missing_feature(avctx, "Skip frame");
  472. ret = AVERROR_PATCHWELCOME;
  473. break;
  474. } else if (tag == SampleIndexTable) {
  475. av_log(avctx, AV_LOG_DEBUG, "tag=2 header - skipping %i tag/value pairs\n", data);
  476. if (data > bytestream2_get_bytes_left(&gb) / 4) {
  477. av_log(avctx, AV_LOG_ERROR, "too many tag/value pairs (%d)\n", data);
  478. ret = AVERROR_INVALIDDATA;
  479. break;
  480. }
  481. for (i = 0; i < data; i++) {
  482. uint16_t tag2 = bytestream2_get_be16(&gb);
  483. uint16_t val2 = bytestream2_get_be16(&gb);
  484. av_log(avctx, AV_LOG_DEBUG, "Tag/Value = %x %x\n", tag2, val2);
  485. }
  486. } else if (tag == HighpassWidth) {
  487. av_log(avctx, AV_LOG_DEBUG, "Highpass width %i channel %i level %i subband %i\n", data, s->channel_num, s->level, s->subband_num);
  488. if (data < 3) {
  489. av_log(avctx, AV_LOG_ERROR, "Invalid highpass width\n");
  490. ret = AVERROR(EINVAL);
  491. break;
  492. }
  493. s->plane[s->channel_num].band[s->level][s->subband_num].width = data;
  494. s->plane[s->channel_num].band[s->level][s->subband_num].stride = FFALIGN(data, 8);
  495. } else if (tag == HighpassHeight) {
  496. av_log(avctx, AV_LOG_DEBUG, "Highpass height %i\n", data);
  497. if (data < 3) {
  498. av_log(avctx, AV_LOG_ERROR, "Invalid highpass height\n");
  499. ret = AVERROR(EINVAL);
  500. break;
  501. }
  502. s->plane[s->channel_num].band[s->level][s->subband_num].height = data;
  503. } else if (tag == 49) {
  504. av_log(avctx, AV_LOG_DEBUG, "Highpass width2 %i\n", data);
  505. if (data < 3) {
  506. av_log(avctx, AV_LOG_ERROR, "Invalid highpass width2\n");
  507. ret = AVERROR(EINVAL);
  508. break;
  509. }
  510. s->plane[s->channel_num].band[s->level][s->subband_num].width = data;
  511. s->plane[s->channel_num].band[s->level][s->subband_num].stride = FFALIGN(data, 8);
  512. } else if (tag == 50) {
  513. av_log(avctx, AV_LOG_DEBUG, "Highpass height2 %i\n", data);
  514. if (data < 3) {
  515. av_log(avctx, AV_LOG_ERROR, "Invalid highpass height2\n");
  516. ret = AVERROR(EINVAL);
  517. break;
  518. }
  519. s->plane[s->channel_num].band[s->level][s->subband_num].height = data;
  520. } else if (tag == 71) {
  521. s->codebook = data;
  522. av_log(avctx, AV_LOG_DEBUG, "Codebook %i\n", s->codebook);
  523. } else if (tag == 72) {
  524. s->codebook = data & 0xf;
  525. s->difference_coding = (data >> 4) & 1;
  526. av_log(avctx, AV_LOG_DEBUG, "Other codebook? %i\n", s->codebook);
  527. } else if (tag == 70) {
  528. av_log(avctx, AV_LOG_DEBUG, "Subsampling or bit-depth flag? %i\n", data);
  529. if (!(data == 10 || data == 12)) {
  530. av_log(avctx, AV_LOG_ERROR, "Invalid bits per channel\n");
  531. ret = AVERROR(EINVAL);
  532. break;
  533. }
  534. s->bpc = data;
  535. } else if (tag == EncodedFormat) {
  536. av_log(avctx, AV_LOG_DEBUG, "Sample format? %i\n", data);
  537. if (data == 1) {
  538. s->coded_format = AV_PIX_FMT_YUV422P10;
  539. } else if (data == 2) {
  540. s->coded_format = AV_PIX_FMT_BAYER_RGGB16;
  541. } else if (data == 3) {
  542. s->coded_format = AV_PIX_FMT_GBRP12;
  543. } else if (data == 4) {
  544. s->coded_format = AV_PIX_FMT_GBRAP12;
  545. } else {
  546. avpriv_report_missing_feature(avctx, "Sample format of %"PRIu16, data);
  547. ret = AVERROR_PATCHWELCOME;
  548. break;
  549. }
  550. planes = data == 2 ? 4 : av_pix_fmt_count_planes(s->coded_format);
  551. } else if (tag == -85) {
  552. av_log(avctx, AV_LOG_DEBUG, "Cropped height %"PRIu16"\n", data);
  553. s->cropped_height = data;
  554. } else if (tag == -75) {
  555. s->peak.offset &= ~0xffff;
  556. s->peak.offset |= (data & 0xffff);
  557. s->peak.base = gb;
  558. s->peak.level = 0;
  559. } else if (tag == -76) {
  560. s->peak.offset &= 0xffff;
  561. s->peak.offset |= (data & 0xffffU)<<16;
  562. s->peak.base = gb;
  563. s->peak.level = 0;
  564. } else if (tag == -74 && s->peak.offset) {
  565. s->peak.level = data;
  566. bytestream2_seek(&s->peak.base, s->peak.offset - 4, SEEK_CUR);
  567. } else
  568. av_log(avctx, AV_LOG_DEBUG, "Unknown tag %i data %x\n", tag, data);
  569. /* Some kind of end of header tag */
  570. if (tag == BitstreamMarker && data == 0x1a4a && s->coded_width && s->coded_height &&
  571. s->coded_format != AV_PIX_FMT_NONE) {
  572. if (s->a_width != s->coded_width || s->a_height != s->coded_height ||
  573. s->a_format != s->coded_format) {
  574. free_buffers(s);
  575. if ((ret = alloc_buffers(avctx)) < 0) {
  576. free_buffers(s);
  577. return ret;
  578. }
  579. }
  580. ret = ff_set_dimensions(avctx, s->coded_width, s->coded_height);
  581. if (ret < 0)
  582. return ret;
  583. if (s->cropped_height) {
  584. unsigned height = s->cropped_height << (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16);
  585. if (avctx->height < height)
  586. return AVERROR_INVALIDDATA;
  587. avctx->height = height;
  588. }
  589. frame.f->width =
  590. frame.f->height = 0;
  591. if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
  592. return ret;
  593. s->coded_width = 0;
  594. s->coded_height = 0;
  595. s->coded_format = AV_PIX_FMT_NONE;
  596. got_buffer = 1;
  597. }
  598. coeff_data = s->plane[s->channel_num].subband[s->subband_num_actual];
  599. /* Lowpass coefficients */
  600. if (tag == BitstreamMarker && data == 0xf0f && s->a_width && s->a_height) {
  601. int lowpass_height = s->plane[s->channel_num].band[0][0].height;
  602. int lowpass_width = s->plane[s->channel_num].band[0][0].width;
  603. int lowpass_a_height = s->plane[s->channel_num].band[0][0].a_height;
  604. int lowpass_a_width = s->plane[s->channel_num].band[0][0].a_width;
  605. if (!got_buffer) {
  606. av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
  607. ret = AVERROR(EINVAL);
  608. goto end;
  609. }
  610. if (lowpass_height > lowpass_a_height || lowpass_width > lowpass_a_width ||
  611. lowpass_a_width * lowpass_a_height * sizeof(int16_t) > bytestream2_get_bytes_left(&gb)) {
  612. av_log(avctx, AV_LOG_ERROR, "Too many lowpass coefficients\n");
  613. ret = AVERROR(EINVAL);
  614. goto end;
  615. }
  616. av_log(avctx, AV_LOG_DEBUG, "Start of lowpass coeffs component %d height:%d, width:%d\n", s->channel_num, lowpass_height, lowpass_width);
  617. for (i = 0; i < lowpass_height; i++) {
  618. for (j = 0; j < lowpass_width; j++)
  619. coeff_data[j] = bytestream2_get_be16u(&gb);
  620. coeff_data += lowpass_width;
  621. }
  622. /* Align to mod-4 position to continue reading tags */
  623. bytestream2_seek(&gb, bytestream2_tell(&gb) & 3, SEEK_CUR);
  624. /* Copy last line of coefficients if odd height */
  625. if (lowpass_height & 1) {
  626. memcpy(&coeff_data[lowpass_height * lowpass_width],
  627. &coeff_data[(lowpass_height - 1) * lowpass_width],
  628. lowpass_width * sizeof(*coeff_data));
  629. }
  630. av_log(avctx, AV_LOG_DEBUG, "Lowpass coefficients %d\n", lowpass_width * lowpass_height);
  631. }
  632. if (tag == BandHeader && s->subband_num_actual != 255 && s->a_width && s->a_height) {
  633. int highpass_height = s->plane[s->channel_num].band[s->level][s->subband_num].height;
  634. int highpass_width = s->plane[s->channel_num].band[s->level][s->subband_num].width;
  635. int highpass_a_width = s->plane[s->channel_num].band[s->level][s->subband_num].a_width;
  636. int highpass_a_height = s->plane[s->channel_num].band[s->level][s->subband_num].a_height;
  637. int highpass_stride = s->plane[s->channel_num].band[s->level][s->subband_num].stride;
  638. int expected;
  639. int a_expected = highpass_a_height * highpass_a_width;
  640. int level, run, coeff;
  641. int count = 0, bytes;
  642. if (!got_buffer) {
  643. av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
  644. ret = AVERROR(EINVAL);
  645. goto end;
  646. }
  647. if (highpass_height > highpass_a_height || highpass_width > highpass_a_width || a_expected < highpass_height * (uint64_t)highpass_stride) {
  648. av_log(avctx, AV_LOG_ERROR, "Too many highpass coefficients\n");
  649. ret = AVERROR(EINVAL);
  650. goto end;
  651. }
  652. expected = highpass_height * highpass_stride;
  653. av_log(avctx, AV_LOG_DEBUG, "Start subband coeffs plane %i level %i codebook %i expected %i\n", s->channel_num, s->level, s->codebook, expected);
  654. init_get_bits(&s->gb, gb.buffer, bytestream2_get_bytes_left(&gb) * 8);
  655. {
  656. OPEN_READER(re, &s->gb);
  657. if (!s->codebook) {
  658. while (1) {
  659. UPDATE_CACHE(re, &s->gb);
  660. GET_RL_VLC(level, run, re, &s->gb, s->table_9_rl_vlc,
  661. VLC_BITS, 3, 1);
  662. /* escape */
  663. if (level == 64)
  664. break;
  665. count += run;
  666. if (count > expected)
  667. break;
  668. coeff = dequant_and_decompand(level, s->quantisation, 0);
  669. for (i = 0; i < run; i++)
  670. *coeff_data++ = coeff;
  671. }
  672. } else {
  673. while (1) {
  674. UPDATE_CACHE(re, &s->gb);
  675. GET_RL_VLC(level, run, re, &s->gb, s->table_18_rl_vlc,
  676. VLC_BITS, 3, 1);
  677. /* escape */
  678. if (level == 255 && run == 2)
  679. break;
  680. count += run;
  681. if (count > expected)
  682. break;
  683. coeff = dequant_and_decompand(level, s->quantisation, s->codebook);
  684. for (i = 0; i < run; i++)
  685. *coeff_data++ = coeff;
  686. }
  687. }
  688. CLOSE_READER(re, &s->gb);
  689. }
  690. if (count > expected) {
  691. av_log(avctx, AV_LOG_ERROR, "Escape codeword not found, probably corrupt data\n");
  692. ret = AVERROR(EINVAL);
  693. goto end;
  694. }
  695. if (s->peak.level)
  696. peak_table(coeff_data - count, &s->peak, count);
  697. if (s->difference_coding)
  698. difference_coding(s->plane[s->channel_num].subband[s->subband_num_actual], highpass_width, highpass_height);
  699. bytes = FFALIGN(AV_CEIL_RSHIFT(get_bits_count(&s->gb), 3), 4);
  700. if (bytes > bytestream2_get_bytes_left(&gb)) {
  701. av_log(avctx, AV_LOG_ERROR, "Bitstream overread error\n");
  702. ret = AVERROR(EINVAL);
  703. goto end;
  704. } else
  705. bytestream2_seek(&gb, bytes, SEEK_CUR);
  706. av_log(avctx, AV_LOG_DEBUG, "End subband coeffs %i extra %i\n", count, count - expected);
  707. s->codebook = 0;
  708. /* Copy last line of coefficients if odd height */
  709. if (highpass_height & 1) {
  710. memcpy(&coeff_data[highpass_height * highpass_stride],
  711. &coeff_data[(highpass_height - 1) * highpass_stride],
  712. highpass_stride * sizeof(*coeff_data));
  713. }
  714. }
  715. }
  716. if (!s->a_width || !s->a_height || s->a_format == AV_PIX_FMT_NONE ||
  717. s->coded_width || s->coded_height || s->coded_format != AV_PIX_FMT_NONE) {
  718. av_log(avctx, AV_LOG_ERROR, "Invalid dimensions\n");
  719. ret = AVERROR(EINVAL);
  720. goto end;
  721. }
  722. if (!got_buffer) {
  723. av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
  724. ret = AVERROR(EINVAL);
  725. goto end;
  726. }
  727. planes = av_pix_fmt_count_planes(avctx->pix_fmt);
  728. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  729. if (!s->progressive)
  730. return AVERROR_INVALIDDATA;
  731. planes = 4;
  732. }
  733. for (plane = 0; plane < planes && !ret; plane++) {
  734. /* level 1 */
  735. int lowpass_height = s->plane[plane].band[0][0].height;
  736. int lowpass_width = s->plane[plane].band[0][0].width;
  737. int highpass_stride = s->plane[plane].band[0][1].stride;
  738. int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
  739. ptrdiff_t dst_linesize;
  740. int16_t *low, *high, *output, *dst;
  741. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  742. act_plane = 0;
  743. dst_linesize = pic->linesize[act_plane];
  744. } else {
  745. dst_linesize = pic->linesize[act_plane] / 2;
  746. }
  747. if (lowpass_height > s->plane[plane].band[0][0].a_height || lowpass_width > s->plane[plane].band[0][0].a_width ||
  748. !highpass_stride || s->plane[plane].band[0][1].width > s->plane[plane].band[0][1].a_width) {
  749. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  750. ret = AVERROR(EINVAL);
  751. goto end;
  752. }
  753. av_log(avctx, AV_LOG_DEBUG, "Decoding level 1 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  754. low = s->plane[plane].subband[0];
  755. high = s->plane[plane].subband[2];
  756. output = s->plane[plane].l_h[0];
  757. for (i = 0; i < lowpass_width; i++) {
  758. vert_filter(output, lowpass_width, low, lowpass_width, high, highpass_stride, lowpass_height);
  759. low++;
  760. high++;
  761. output++;
  762. }
  763. low = s->plane[plane].subband[1];
  764. high = s->plane[plane].subband[3];
  765. output = s->plane[plane].l_h[1];
  766. for (i = 0; i < lowpass_width; i++) {
  767. // note the stride of "low" is highpass_stride
  768. vert_filter(output, lowpass_width, low, highpass_stride, high, highpass_stride, lowpass_height);
  769. low++;
  770. high++;
  771. output++;
  772. }
  773. low = s->plane[plane].l_h[0];
  774. high = s->plane[plane].l_h[1];
  775. output = s->plane[plane].subband[0];
  776. for (i = 0; i < lowpass_height * 2; i++) {
  777. horiz_filter(output, low, high, lowpass_width);
  778. low += lowpass_width;
  779. high += lowpass_width;
  780. output += lowpass_width * 2;
  781. }
  782. if (s->bpc == 12) {
  783. output = s->plane[plane].subband[0];
  784. for (i = 0; i < lowpass_height * 2; i++) {
  785. for (j = 0; j < lowpass_width * 2; j++)
  786. output[j] *= 4;
  787. output += lowpass_width * 2;
  788. }
  789. }
  790. /* level 2 */
  791. lowpass_height = s->plane[plane].band[1][1].height;
  792. lowpass_width = s->plane[plane].band[1][1].width;
  793. highpass_stride = s->plane[plane].band[1][1].stride;
  794. if (lowpass_height > s->plane[plane].band[1][1].a_height || lowpass_width > s->plane[plane].band[1][1].a_width ||
  795. !highpass_stride || s->plane[plane].band[1][1].width > s->plane[plane].band[1][1].a_width) {
  796. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  797. ret = AVERROR(EINVAL);
  798. goto end;
  799. }
  800. av_log(avctx, AV_LOG_DEBUG, "Level 2 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  801. low = s->plane[plane].subband[0];
  802. high = s->plane[plane].subband[5];
  803. output = s->plane[plane].l_h[3];
  804. for (i = 0; i < lowpass_width; i++) {
  805. vert_filter(output, lowpass_width, low, lowpass_width, high, highpass_stride, lowpass_height);
  806. low++;
  807. high++;
  808. output++;
  809. }
  810. low = s->plane[plane].subband[4];
  811. high = s->plane[plane].subband[6];
  812. output = s->plane[plane].l_h[4];
  813. for (i = 0; i < lowpass_width; i++) {
  814. vert_filter(output, lowpass_width, low, highpass_stride, high, highpass_stride, lowpass_height);
  815. low++;
  816. high++;
  817. output++;
  818. }
  819. low = s->plane[plane].l_h[3];
  820. high = s->plane[plane].l_h[4];
  821. output = s->plane[plane].subband[0];
  822. for (i = 0; i < lowpass_height * 2; i++) {
  823. horiz_filter(output, low, high, lowpass_width);
  824. low += lowpass_width;
  825. high += lowpass_width;
  826. output += lowpass_width * 2;
  827. }
  828. output = s->plane[plane].subband[0];
  829. for (i = 0; i < lowpass_height * 2; i++) {
  830. for (j = 0; j < lowpass_width * 2; j++)
  831. output[j] *= 4;
  832. output += lowpass_width * 2;
  833. }
  834. /* level 3 */
  835. lowpass_height = s->plane[plane].band[2][1].height;
  836. lowpass_width = s->plane[plane].band[2][1].width;
  837. highpass_stride = s->plane[plane].band[2][1].stride;
  838. if (lowpass_height > s->plane[plane].band[2][1].a_height || lowpass_width > s->plane[plane].band[2][1].a_width ||
  839. !highpass_stride || s->plane[plane].band[2][1].width > s->plane[plane].band[2][1].a_width) {
  840. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  841. ret = AVERROR(EINVAL);
  842. goto end;
  843. }
  844. av_log(avctx, AV_LOG_DEBUG, "Level 3 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  845. if (s->progressive) {
  846. low = s->plane[plane].subband[0];
  847. high = s->plane[plane].subband[8];
  848. output = s->plane[plane].l_h[6];
  849. for (i = 0; i < lowpass_width; i++) {
  850. vert_filter(output, lowpass_width, low, lowpass_width, high, highpass_stride, lowpass_height);
  851. low++;
  852. high++;
  853. output++;
  854. }
  855. low = s->plane[plane].subband[7];
  856. high = s->plane[plane].subband[9];
  857. output = s->plane[plane].l_h[7];
  858. for (i = 0; i < lowpass_width; i++) {
  859. vert_filter(output, lowpass_width, low, highpass_stride, high, highpass_stride, lowpass_height);
  860. low++;
  861. high++;
  862. output++;
  863. }
  864. dst = (int16_t *)pic->data[act_plane];
  865. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  866. if (plane & 1)
  867. dst++;
  868. if (plane > 1)
  869. dst += pic->linesize[act_plane] >> 1;
  870. }
  871. low = s->plane[plane].l_h[6];
  872. high = s->plane[plane].l_h[7];
  873. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16 &&
  874. (lowpass_height * 2 > avctx->coded_height / 2 ||
  875. lowpass_width * 2 > avctx->coded_width / 2 )
  876. ) {
  877. ret = AVERROR_INVALIDDATA;
  878. goto end;
  879. }
  880. for (i = 0; i < lowpass_height * 2; i++) {
  881. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16)
  882. horiz_filter_clip_bayer(dst, low, high, lowpass_width, s->bpc);
  883. else
  884. horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
  885. if (avctx->pix_fmt == AV_PIX_FMT_GBRAP12 && act_plane == 3)
  886. process_alpha(dst, lowpass_width * 2);
  887. low += lowpass_width;
  888. high += lowpass_width;
  889. dst += dst_linesize;
  890. }
  891. } else {
  892. av_log(avctx, AV_LOG_DEBUG, "interlaced frame ? %d", pic->interlaced_frame);
  893. pic->interlaced_frame = 1;
  894. low = s->plane[plane].subband[0];
  895. high = s->plane[plane].subband[7];
  896. output = s->plane[plane].l_h[6];
  897. for (i = 0; i < lowpass_height; i++) {
  898. horiz_filter(output, low, high, lowpass_width);
  899. low += lowpass_width;
  900. high += lowpass_width;
  901. output += lowpass_width * 2;
  902. }
  903. low = s->plane[plane].subband[8];
  904. high = s->plane[plane].subband[9];
  905. output = s->plane[plane].l_h[7];
  906. for (i = 0; i < lowpass_height; i++) {
  907. horiz_filter(output, low, high, lowpass_width);
  908. low += lowpass_width;
  909. high += lowpass_width;
  910. output += lowpass_width * 2;
  911. }
  912. dst = (int16_t *)pic->data[act_plane];
  913. low = s->plane[plane].l_h[6];
  914. high = s->plane[plane].l_h[7];
  915. for (i = 0; i < lowpass_height; i++) {
  916. interlaced_vertical_filter(dst, low, high, lowpass_width * 2, pic->linesize[act_plane]/2, act_plane);
  917. low += lowpass_width * 2;
  918. high += lowpass_width * 2;
  919. dst += pic->linesize[act_plane];
  920. }
  921. }
  922. }
  923. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16)
  924. process_bayer(pic);
  925. end:
  926. if (ret < 0)
  927. return ret;
  928. *got_frame = 1;
  929. return avpkt->size;
  930. }
  931. static av_cold int cfhd_close(AVCodecContext *avctx)
  932. {
  933. CFHDContext *s = avctx->priv_data;
  934. free_buffers(s);
  935. ff_free_vlc(&s->vlc_9);
  936. ff_free_vlc(&s->vlc_18);
  937. return 0;
  938. }
  939. AVCodec ff_cfhd_decoder = {
  940. .name = "cfhd",
  941. .long_name = NULL_IF_CONFIG_SMALL("Cineform HD"),
  942. .type = AVMEDIA_TYPE_VIDEO,
  943. .id = AV_CODEC_ID_CFHD,
  944. .priv_data_size = sizeof(CFHDContext),
  945. .init = cfhd_init,
  946. .close = cfhd_close,
  947. .decode = cfhd_decode,
  948. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS,
  949. .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
  950. };