You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1474 lines
60KB

  1. /*
  2. * Copyright (c) 2015-2016 Kieran Kunhya <kieran@kunhya.com>
  3. *
  4. * This file is part of FFmpeg.
  5. *
  6. * FFmpeg is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public
  8. * License as published by the Free Software Foundation; either
  9. * version 2.1 of the License, or (at your option) any later version.
  10. *
  11. * FFmpeg is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  14. * Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public
  17. * License along with FFmpeg; if not, write to the Free Software
  18. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20. /**
  21. * @file
  22. * Cineform HD video decoder
  23. */
  24. #include "libavutil/attributes.h"
  25. #include "libavutil/buffer.h"
  26. #include "libavutil/common.h"
  27. #include "libavutil/imgutils.h"
  28. #include "libavutil/intreadwrite.h"
  29. #include "libavutil/opt.h"
  30. #include "avcodec.h"
  31. #include "bytestream.h"
  32. #include "get_bits.h"
  33. #include "internal.h"
  34. #include "thread.h"
  35. #include "cfhd.h"
  36. #define ALPHA_COMPAND_DC_OFFSET 256
  37. #define ALPHA_COMPAND_GAIN 9400
  38. static av_cold int cfhd_init(AVCodecContext *avctx)
  39. {
  40. CFHDContext *s = avctx->priv_data;
  41. s->avctx = avctx;
  42. for (int i = 0; i < 64; i++) {
  43. int val = i;
  44. if (val >= 40) {
  45. if (val >= 54) {
  46. val -= 54;
  47. val <<= 2;
  48. val += 54;
  49. }
  50. val -= 40;
  51. val <<= 2;
  52. val += 40;
  53. }
  54. s->lut[0][i] = val;
  55. }
  56. for (int i = 0; i < 256; i++)
  57. s->lut[1][i] = i + ((768LL * i * i * i) / (256 * 256 * 256));
  58. return ff_cfhd_init_vlcs(s);
  59. }
  60. static void init_plane_defaults(CFHDContext *s)
  61. {
  62. s->subband_num = 0;
  63. s->level = 0;
  64. s->subband_num_actual = 0;
  65. }
  66. static void init_peak_table_defaults(CFHDContext *s)
  67. {
  68. s->peak.level = 0;
  69. s->peak.offset = 0;
  70. memset(&s->peak.base, 0, sizeof(s->peak.base));
  71. }
  72. static void init_frame_defaults(CFHDContext *s)
  73. {
  74. s->coded_width = 0;
  75. s->coded_height = 0;
  76. s->coded_format = AV_PIX_FMT_YUV422P10;
  77. s->cropped_height = 0;
  78. s->bpc = 10;
  79. s->channel_cnt = 3;
  80. s->subband_cnt = SUBBAND_COUNT;
  81. s->channel_num = 0;
  82. s->lowpass_precision = 16;
  83. s->quantisation = 1;
  84. s->codebook = 0;
  85. s->difference_coding = 0;
  86. s->frame_type = 0;
  87. s->sample_type = 0;
  88. if (s->transform_type != 2)
  89. s->transform_type = -1;
  90. init_plane_defaults(s);
  91. init_peak_table_defaults(s);
  92. }
  93. static inline int dequant_and_decompand(CFHDContext *s, int level, int quantisation, int codebook)
  94. {
  95. if (codebook == 0 || codebook == 1) {
  96. return s->lut[codebook][abs(level)] * FFSIGN(level) * quantisation;
  97. } else
  98. return level * quantisation;
  99. }
  100. static inline void difference_coding(int16_t *band, int width, int height)
  101. {
  102. int i,j;
  103. for (i = 0; i < height; i++) {
  104. for (j = 1; j < width; j++) {
  105. band[j] += band[j-1];
  106. }
  107. band += width;
  108. }
  109. }
  110. static inline void peak_table(int16_t *band, Peak *peak, int length)
  111. {
  112. int i;
  113. for (i = 0; i < length; i++)
  114. if (abs(band[i]) > peak->level)
  115. band[i] = bytestream2_get_le16(&peak->base);
  116. }
  117. static inline void process_alpha(int16_t *alpha, int width)
  118. {
  119. int i, channel;
  120. for (i = 0; i < width; i++) {
  121. channel = alpha[i];
  122. channel -= ALPHA_COMPAND_DC_OFFSET;
  123. channel <<= 3;
  124. channel *= ALPHA_COMPAND_GAIN;
  125. channel >>= 16;
  126. channel = av_clip_uintp2(channel, 12);
  127. alpha[i] = channel;
  128. }
  129. }
  130. static inline void process_bayer(AVFrame *frame, int bpc)
  131. {
  132. const int linesize = frame->linesize[0];
  133. uint16_t *r = (uint16_t *)frame->data[0];
  134. uint16_t *g1 = (uint16_t *)(frame->data[0] + 2);
  135. uint16_t *g2 = (uint16_t *)(frame->data[0] + frame->linesize[0]);
  136. uint16_t *b = (uint16_t *)(frame->data[0] + frame->linesize[0] + 2);
  137. const int mid = 1 << (bpc - 1);
  138. const int factor = 1 << (16 - bpc);
  139. for (int y = 0; y < frame->height >> 1; y++) {
  140. for (int x = 0; x < frame->width; x += 2) {
  141. int R, G1, G2, B;
  142. int g, rg, bg, gd;
  143. g = r[x];
  144. rg = g1[x];
  145. bg = g2[x];
  146. gd = b[x];
  147. gd -= mid;
  148. R = (rg - mid) * 2 + g;
  149. G1 = g + gd;
  150. G2 = g - gd;
  151. B = (bg - mid) * 2 + g;
  152. R = av_clip_uintp2(R * factor, 16);
  153. G1 = av_clip_uintp2(G1 * factor, 16);
  154. G2 = av_clip_uintp2(G2 * factor, 16);
  155. B = av_clip_uintp2(B * factor, 16);
  156. r[x] = R;
  157. g1[x] = G1;
  158. g2[x] = G2;
  159. b[x] = B;
  160. }
  161. r += linesize;
  162. g1 += linesize;
  163. g2 += linesize;
  164. b += linesize;
  165. }
  166. }
  167. static inline void interlaced_vertical_filter(int16_t *output, int16_t *low, int16_t *high,
  168. int width, int linesize, int plane)
  169. {
  170. int i;
  171. int16_t even, odd;
  172. for (i = 0; i < width; i++) {
  173. even = (low[i] - high[i])/2;
  174. odd = (low[i] + high[i])/2;
  175. output[i] = av_clip_uintp2(even, 10);
  176. output[i + linesize] = av_clip_uintp2(odd, 10);
  177. }
  178. }
  179. static inline void inverse_temporal_filter(int16_t *low, int16_t *high, int width)
  180. {
  181. for (int i = 0; i < width; i++) {
  182. int even = (low[i] - high[i]) / 2;
  183. int odd = (low[i] + high[i]) / 2;
  184. low[i] = even;
  185. high[i] = odd;
  186. }
  187. }
  188. static void free_buffers(CFHDContext *s)
  189. {
  190. int i, j;
  191. for (i = 0; i < FF_ARRAY_ELEMS(s->plane); i++) {
  192. Plane *p = &s->plane[i];
  193. av_freep(&s->plane[i].idwt_buf);
  194. av_freep(&s->plane[i].idwt_tmp);
  195. s->plane[i].idwt_size = 0;
  196. for (j = 0; j < SUBBAND_COUNT_3D; j++)
  197. s->plane[i].subband[j] = NULL;
  198. for (j = 0; j < 10; j++)
  199. s->plane[i].l_h[j] = NULL;
  200. for (j = 0; j < DWT_LEVELS_3D; j++)
  201. p->band[j][0].read_ok =
  202. p->band[j][1].read_ok =
  203. p->band[j][2].read_ok =
  204. p->band[j][3].read_ok = 0;
  205. }
  206. s->a_height = 0;
  207. s->a_width = 0;
  208. s->a_transform_type = INT_MIN;
  209. }
  210. static int alloc_buffers(AVCodecContext *avctx)
  211. {
  212. CFHDContext *s = avctx->priv_data;
  213. int i, j, ret, planes, bayer = 0;
  214. int chroma_x_shift, chroma_y_shift;
  215. unsigned k;
  216. if ((ret = ff_set_dimensions(avctx, s->coded_width, s->coded_height)) < 0)
  217. return ret;
  218. avctx->pix_fmt = s->coded_format;
  219. ff_cfhddsp_init(&s->dsp, s->bpc, avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16);
  220. if ((ret = av_pix_fmt_get_chroma_sub_sample(s->coded_format,
  221. &chroma_x_shift,
  222. &chroma_y_shift)) < 0)
  223. return ret;
  224. planes = av_pix_fmt_count_planes(s->coded_format);
  225. if (s->coded_format == AV_PIX_FMT_BAYER_RGGB16) {
  226. planes = 4;
  227. chroma_x_shift = 1;
  228. chroma_y_shift = 1;
  229. bayer = 1;
  230. }
  231. for (i = 0; i < planes; i++) {
  232. int w8, h8, w4, h4, w2, h2;
  233. int width = (i || bayer) ? s->coded_width >> chroma_x_shift : s->coded_width;
  234. int height = (i || bayer) ? s->coded_height >> chroma_y_shift : s->coded_height;
  235. ptrdiff_t stride = (FFALIGN(width / 8, 8) + 64) * 8;
  236. if (chroma_y_shift && !bayer)
  237. height = FFALIGN(height / 8, 2) * 8;
  238. s->plane[i].width = width;
  239. s->plane[i].height = height;
  240. s->plane[i].stride = stride;
  241. w8 = FFALIGN(s->plane[i].width / 8, 8) + 64;
  242. h8 = FFALIGN(height, 8) / 8;
  243. w4 = w8 * 2;
  244. h4 = h8 * 2;
  245. w2 = w4 * 2;
  246. h2 = h4 * 2;
  247. if (s->transform_type == 0) {
  248. s->plane[i].idwt_size = FFALIGN(height, 8) * stride;
  249. s->plane[i].idwt_buf =
  250. av_mallocz_array(s->plane[i].idwt_size, sizeof(*s->plane[i].idwt_buf));
  251. s->plane[i].idwt_tmp =
  252. av_malloc_array(s->plane[i].idwt_size, sizeof(*s->plane[i].idwt_tmp));
  253. } else {
  254. s->plane[i].idwt_size = FFALIGN(height, 8) * stride * 2;
  255. s->plane[i].idwt_buf =
  256. av_mallocz_array(s->plane[i].idwt_size, sizeof(*s->plane[i].idwt_buf));
  257. s->plane[i].idwt_tmp =
  258. av_malloc_array(s->plane[i].idwt_size, sizeof(*s->plane[i].idwt_tmp));
  259. }
  260. if (!s->plane[i].idwt_buf || !s->plane[i].idwt_tmp)
  261. return AVERROR(ENOMEM);
  262. s->plane[i].subband[0] = s->plane[i].idwt_buf;
  263. s->plane[i].subband[1] = s->plane[i].idwt_buf + 2 * w8 * h8;
  264. s->plane[i].subband[2] = s->plane[i].idwt_buf + 1 * w8 * h8;
  265. s->plane[i].subband[3] = s->plane[i].idwt_buf + 3 * w8 * h8;
  266. s->plane[i].subband[4] = s->plane[i].idwt_buf + 2 * w4 * h4;
  267. s->plane[i].subband[5] = s->plane[i].idwt_buf + 1 * w4 * h4;
  268. s->plane[i].subband[6] = s->plane[i].idwt_buf + 3 * w4 * h4;
  269. if (s->transform_type == 0) {
  270. s->plane[i].subband[7] = s->plane[i].idwt_buf + 2 * w2 * h2;
  271. s->plane[i].subband[8] = s->plane[i].idwt_buf + 1 * w2 * h2;
  272. s->plane[i].subband[9] = s->plane[i].idwt_buf + 3 * w2 * h2;
  273. } else {
  274. int16_t *frame2 =
  275. s->plane[i].subband[7] = s->plane[i].idwt_buf + 4 * w2 * h2;
  276. s->plane[i].subband[8] = frame2 + 2 * w4 * h4;
  277. s->plane[i].subband[9] = frame2 + 1 * w4 * h4;
  278. s->plane[i].subband[10] = frame2 + 3 * w4 * h4;
  279. s->plane[i].subband[11] = frame2 + 2 * w2 * h2;
  280. s->plane[i].subband[12] = frame2 + 1 * w2 * h2;
  281. s->plane[i].subband[13] = frame2 + 3 * w2 * h2;
  282. s->plane[i].subband[14] = s->plane[i].idwt_buf + 2 * w2 * h2;
  283. s->plane[i].subband[15] = s->plane[i].idwt_buf + 1 * w2 * h2;
  284. s->plane[i].subband[16] = s->plane[i].idwt_buf + 3 * w2 * h2;
  285. }
  286. if (s->transform_type == 0) {
  287. for (j = 0; j < DWT_LEVELS; j++) {
  288. for (k = 0; k < FF_ARRAY_ELEMS(s->plane[i].band[j]); k++) {
  289. s->plane[i].band[j][k].a_width = w8 << j;
  290. s->plane[i].band[j][k].a_height = h8 << j;
  291. }
  292. }
  293. } else {
  294. for (j = 0; j < DWT_LEVELS_3D; j++) {
  295. int t = j < 1 ? 0 : (j < 3 ? 1 : 2);
  296. for (k = 0; k < FF_ARRAY_ELEMS(s->plane[i].band[j]); k++) {
  297. s->plane[i].band[j][k].a_width = w8 << t;
  298. s->plane[i].band[j][k].a_height = h8 << t;
  299. }
  300. }
  301. }
  302. /* ll2 and ll1 commented out because they are done in-place */
  303. s->plane[i].l_h[0] = s->plane[i].idwt_tmp;
  304. s->plane[i].l_h[1] = s->plane[i].idwt_tmp + 2 * w8 * h8;
  305. // s->plane[i].l_h[2] = ll2;
  306. s->plane[i].l_h[3] = s->plane[i].idwt_tmp;
  307. s->plane[i].l_h[4] = s->plane[i].idwt_tmp + 2 * w4 * h4;
  308. // s->plane[i].l_h[5] = ll1;
  309. s->plane[i].l_h[6] = s->plane[i].idwt_tmp;
  310. s->plane[i].l_h[7] = s->plane[i].idwt_tmp + 2 * w2 * h2;
  311. if (s->transform_type != 0) {
  312. int16_t *frame2 = s->plane[i].idwt_tmp + 4 * w2 * h2;
  313. s->plane[i].l_h[8] = frame2;
  314. s->plane[i].l_h[9] = frame2 + 2 * w2 * h2;
  315. }
  316. }
  317. s->a_transform_type = s->transform_type;
  318. s->a_height = s->coded_height;
  319. s->a_width = s->coded_width;
  320. s->a_format = s->coded_format;
  321. return 0;
  322. }
  323. static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
  324. AVPacket *avpkt)
  325. {
  326. CFHDContext *s = avctx->priv_data;
  327. CFHDDSPContext *dsp = &s->dsp;
  328. GetByteContext gb;
  329. ThreadFrame frame = { .f = data };
  330. AVFrame *pic = data;
  331. int ret = 0, i, j, plane, got_buffer = 0;
  332. int16_t *coeff_data;
  333. init_frame_defaults(s);
  334. s->planes = av_pix_fmt_count_planes(s->coded_format);
  335. bytestream2_init(&gb, avpkt->data, avpkt->size);
  336. while (bytestream2_get_bytes_left(&gb) >= 4) {
  337. /* Bit weird but implement the tag parsing as the spec says */
  338. uint16_t tagu = bytestream2_get_be16(&gb);
  339. int16_t tag = (int16_t)tagu;
  340. int8_t tag8 = (int8_t)(tagu >> 8);
  341. uint16_t abstag = abs(tag);
  342. int8_t abs_tag8 = abs(tag8);
  343. uint16_t data = bytestream2_get_be16(&gb);
  344. if (abs_tag8 >= 0x60 && abs_tag8 <= 0x6f) {
  345. av_log(avctx, AV_LOG_DEBUG, "large len %x\n", ((tagu & 0xff) << 16) | data);
  346. } else if (tag == SampleFlags) {
  347. av_log(avctx, AV_LOG_DEBUG, "Progressive? %"PRIu16"\n", data);
  348. s->progressive = data & 0x0001;
  349. } else if (tag == FrameType) {
  350. s->frame_type = data;
  351. av_log(avctx, AV_LOG_DEBUG, "Frame type %"PRIu16"\n", data);
  352. } else if (abstag == VersionMajor) {
  353. av_log(avctx, AV_LOG_DEBUG, "Version major %"PRIu16"\n", data);
  354. } else if (abstag == VersionMinor) {
  355. av_log(avctx, AV_LOG_DEBUG, "Version minor %"PRIu16"\n", data);
  356. } else if (abstag == VersionRevision) {
  357. av_log(avctx, AV_LOG_DEBUG, "Version revision %"PRIu16"\n", data);
  358. } else if (abstag == VersionEdit) {
  359. av_log(avctx, AV_LOG_DEBUG, "Version edit %"PRIu16"\n", data);
  360. } else if (abstag == Version) {
  361. av_log(avctx, AV_LOG_DEBUG, "Version %"PRIu16"\n", data);
  362. } else if (tag == ImageWidth) {
  363. av_log(avctx, AV_LOG_DEBUG, "Width %"PRIu16"\n", data);
  364. s->coded_width = data;
  365. } else if (tag == ImageHeight) {
  366. av_log(avctx, AV_LOG_DEBUG, "Height %"PRIu16"\n", data);
  367. s->coded_height = data;
  368. } else if (tag == ChannelCount) {
  369. av_log(avctx, AV_LOG_DEBUG, "Channel Count: %"PRIu16"\n", data);
  370. s->channel_cnt = data;
  371. if (data > 4) {
  372. av_log(avctx, AV_LOG_ERROR, "Channel Count of %"PRIu16" is unsupported\n", data);
  373. ret = AVERROR_PATCHWELCOME;
  374. goto end;
  375. }
  376. } else if (tag == SubbandCount) {
  377. av_log(avctx, AV_LOG_DEBUG, "Subband Count: %"PRIu16"\n", data);
  378. if (data != SUBBAND_COUNT && data != SUBBAND_COUNT_3D) {
  379. av_log(avctx, AV_LOG_ERROR, "Subband Count of %"PRIu16" is unsupported\n", data);
  380. ret = AVERROR_PATCHWELCOME;
  381. goto end;
  382. }
  383. } else if (tag == ChannelNumber) {
  384. s->channel_num = data;
  385. av_log(avctx, AV_LOG_DEBUG, "Channel number %"PRIu16"\n", data);
  386. if (s->channel_num >= s->planes) {
  387. av_log(avctx, AV_LOG_ERROR, "Invalid channel number\n");
  388. ret = AVERROR(EINVAL);
  389. goto end;
  390. }
  391. init_plane_defaults(s);
  392. } else if (tag == SubbandNumber) {
  393. if (s->subband_num != 0 && data == 1 && (s->transform_type == 0 || s->transform_type == 2)) // hack
  394. s->level++;
  395. av_log(avctx, AV_LOG_DEBUG, "Subband number %"PRIu16"\n", data);
  396. s->subband_num = data;
  397. if ((s->transform_type == 0 && s->level >= DWT_LEVELS) ||
  398. (s->transform_type == 2 && s->level >= DWT_LEVELS_3D)) {
  399. av_log(avctx, AV_LOG_ERROR, "Invalid level\n");
  400. ret = AVERROR(EINVAL);
  401. goto end;
  402. }
  403. if (s->subband_num > 3) {
  404. av_log(avctx, AV_LOG_ERROR, "Invalid subband number\n");
  405. ret = AVERROR(EINVAL);
  406. goto end;
  407. }
  408. } else if (tag == SubbandBand) {
  409. av_log(avctx, AV_LOG_DEBUG, "Subband number actual %"PRIu16"\n", data);
  410. if ((s->transform_type == 0 && data >= SUBBAND_COUNT) ||
  411. (s->transform_type == 2 && data >= SUBBAND_COUNT_3D && data != 255)) {
  412. av_log(avctx, AV_LOG_ERROR, "Invalid subband number actual\n");
  413. ret = AVERROR(EINVAL);
  414. goto end;
  415. }
  416. if (s->transform_type == 0 || s->transform_type == 2)
  417. s->subband_num_actual = data;
  418. else
  419. av_log(avctx, AV_LOG_WARNING, "Ignoring subband num actual %"PRIu16"\n", data);
  420. } else if (tag == LowpassPrecision)
  421. av_log(avctx, AV_LOG_DEBUG, "Lowpass precision bits: %"PRIu16"\n", data);
  422. else if (tag == Quantization) {
  423. s->quantisation = data;
  424. av_log(avctx, AV_LOG_DEBUG, "Quantisation: %"PRIu16"\n", data);
  425. } else if (tag == PrescaleTable) {
  426. for (i = 0; i < 8; i++)
  427. s->prescale_table[i] = (data >> (14 - i * 2)) & 0x3;
  428. av_log(avctx, AV_LOG_DEBUG, "Prescale table: %x\n", data);
  429. } else if (tag == BandEncoding) {
  430. if (!data || data > 5) {
  431. av_log(avctx, AV_LOG_ERROR, "Invalid band encoding\n");
  432. ret = AVERROR(EINVAL);
  433. goto end;
  434. }
  435. s->band_encoding = data;
  436. av_log(avctx, AV_LOG_DEBUG, "Encode Method for Subband %d : %x\n", s->subband_num_actual, data);
  437. } else if (tag == LowpassWidth) {
  438. av_log(avctx, AV_LOG_DEBUG, "Lowpass width %"PRIu16"\n", data);
  439. s->plane[s->channel_num].band[0][0].width = data;
  440. s->plane[s->channel_num].band[0][0].stride = data;
  441. } else if (tag == LowpassHeight) {
  442. av_log(avctx, AV_LOG_DEBUG, "Lowpass height %"PRIu16"\n", data);
  443. s->plane[s->channel_num].band[0][0].height = data;
  444. } else if (tag == SampleType) {
  445. s->sample_type = data;
  446. av_log(avctx, AV_LOG_DEBUG, "Sample type? %"PRIu16"\n", data);
  447. } else if (tag == TransformType) {
  448. if (data > 2) {
  449. av_log(avctx, AV_LOG_ERROR, "Invalid transform type\n");
  450. ret = AVERROR(EINVAL);
  451. goto end;
  452. } else if (data == 1) {
  453. av_log(avctx, AV_LOG_ERROR, "unsupported transform type\n");
  454. ret = AVERROR_PATCHWELCOME;
  455. goto end;
  456. }
  457. if (s->transform_type == -1) {
  458. s->transform_type = data;
  459. av_log(avctx, AV_LOG_DEBUG, "Transform type %"PRIu16"\n", data);
  460. } else {
  461. av_log(avctx, AV_LOG_DEBUG, "Ignoring additional transform type %"PRIu16"\n", data);
  462. }
  463. } else if (abstag >= 0x4000 && abstag <= 0x40ff) {
  464. if (abstag == 0x4001)
  465. s->peak.level = 0;
  466. av_log(avctx, AV_LOG_DEBUG, "Small chunk length %d %s\n", data * 4, tag < 0 ? "optional" : "required");
  467. bytestream2_skipu(&gb, data * 4);
  468. } else if (tag == FrameIndex) {
  469. av_log(avctx, AV_LOG_DEBUG, "Frame index %"PRIu16"\n", data);
  470. s->frame_index = data;
  471. } else if (tag == SampleIndexTable) {
  472. av_log(avctx, AV_LOG_DEBUG, "Sample index table - skipping %i values\n", data);
  473. if (data > bytestream2_get_bytes_left(&gb) / 4) {
  474. av_log(avctx, AV_LOG_ERROR, "too many values (%d)\n", data);
  475. ret = AVERROR_INVALIDDATA;
  476. goto end;
  477. }
  478. for (i = 0; i < data; i++) {
  479. uint32_t offset = bytestream2_get_be32(&gb);
  480. av_log(avctx, AV_LOG_DEBUG, "Offset = %"PRIu32"\n", offset);
  481. }
  482. } else if (tag == HighpassWidth) {
  483. av_log(avctx, AV_LOG_DEBUG, "Highpass width %i channel %i level %i subband %i\n", data, s->channel_num, s->level, s->subband_num);
  484. if (data < 3) {
  485. av_log(avctx, AV_LOG_ERROR, "Invalid highpass width\n");
  486. ret = AVERROR(EINVAL);
  487. goto end;
  488. }
  489. s->plane[s->channel_num].band[s->level][s->subband_num].width = data;
  490. s->plane[s->channel_num].band[s->level][s->subband_num].stride = FFALIGN(data, 8);
  491. } else if (tag == HighpassHeight) {
  492. av_log(avctx, AV_LOG_DEBUG, "Highpass height %i\n", data);
  493. if (data < 3) {
  494. av_log(avctx, AV_LOG_ERROR, "Invalid highpass height\n");
  495. ret = AVERROR(EINVAL);
  496. goto end;
  497. }
  498. s->plane[s->channel_num].band[s->level][s->subband_num].height = data;
  499. } else if (tag == BandWidth) {
  500. av_log(avctx, AV_LOG_DEBUG, "Highpass width2 %i\n", data);
  501. if (data < 3) {
  502. av_log(avctx, AV_LOG_ERROR, "Invalid highpass width2\n");
  503. ret = AVERROR(EINVAL);
  504. goto end;
  505. }
  506. s->plane[s->channel_num].band[s->level][s->subband_num].width = data;
  507. s->plane[s->channel_num].band[s->level][s->subband_num].stride = FFALIGN(data, 8);
  508. } else if (tag == BandHeight) {
  509. av_log(avctx, AV_LOG_DEBUG, "Highpass height2 %i\n", data);
  510. if (data < 3) {
  511. av_log(avctx, AV_LOG_ERROR, "Invalid highpass height2\n");
  512. ret = AVERROR(EINVAL);
  513. goto end;
  514. }
  515. s->plane[s->channel_num].band[s->level][s->subband_num].height = data;
  516. } else if (tag == InputFormat) {
  517. av_log(avctx, AV_LOG_DEBUG, "Input format %i\n", data);
  518. if (s->coded_format == AV_PIX_FMT_NONE ||
  519. s->coded_format == AV_PIX_FMT_YUV422P10) {
  520. if (data >= 100 && data <= 105) {
  521. s->coded_format = AV_PIX_FMT_BAYER_RGGB16;
  522. } else if (data >= 122 && data <= 128) {
  523. s->coded_format = AV_PIX_FMT_GBRP12;
  524. } else if (data == 30) {
  525. s->coded_format = AV_PIX_FMT_GBRAP12;
  526. } else {
  527. s->coded_format = AV_PIX_FMT_YUV422P10;
  528. }
  529. s->planes = s->coded_format == AV_PIX_FMT_BAYER_RGGB16 ? 4 : av_pix_fmt_count_planes(s->coded_format);
  530. }
  531. } else if (tag == BandCodingFlags) {
  532. s->codebook = data & 0xf;
  533. s->difference_coding = (data >> 4) & 1;
  534. av_log(avctx, AV_LOG_DEBUG, "Other codebook? %i\n", s->codebook);
  535. } else if (tag == Precision) {
  536. av_log(avctx, AV_LOG_DEBUG, "Precision %i\n", data);
  537. if (!(data == 10 || data == 12)) {
  538. av_log(avctx, AV_LOG_ERROR, "Invalid bits per channel\n");
  539. ret = AVERROR(EINVAL);
  540. goto end;
  541. }
  542. avctx->bits_per_raw_sample = s->bpc = data;
  543. } else if (tag == EncodedFormat) {
  544. av_log(avctx, AV_LOG_DEBUG, "Sample format? %i\n", data);
  545. if (data == 1) {
  546. s->coded_format = AV_PIX_FMT_YUV422P10;
  547. } else if (data == 2) {
  548. s->coded_format = AV_PIX_FMT_BAYER_RGGB16;
  549. } else if (data == 3) {
  550. s->coded_format = AV_PIX_FMT_GBRP12;
  551. } else if (data == 4) {
  552. s->coded_format = AV_PIX_FMT_GBRAP12;
  553. } else {
  554. avpriv_report_missing_feature(avctx, "Sample format of %"PRIu16, data);
  555. ret = AVERROR_PATCHWELCOME;
  556. goto end;
  557. }
  558. s->planes = data == 2 ? 4 : av_pix_fmt_count_planes(s->coded_format);
  559. } else if (tag == -DisplayHeight) {
  560. av_log(avctx, AV_LOG_DEBUG, "Cropped height %"PRIu16"\n", data);
  561. s->cropped_height = data;
  562. } else if (tag == -PeakOffsetLow) {
  563. s->peak.offset &= ~0xffff;
  564. s->peak.offset |= (data & 0xffff);
  565. s->peak.base = gb;
  566. s->peak.level = 0;
  567. } else if (tag == -PeakOffsetHigh) {
  568. s->peak.offset &= 0xffff;
  569. s->peak.offset |= (data & 0xffffU)<<16;
  570. s->peak.base = gb;
  571. s->peak.level = 0;
  572. } else if (tag == -PeakLevel && s->peak.offset) {
  573. s->peak.level = data;
  574. if (s->peak.offset < 4 - bytestream2_tell(&s->peak.base) ||
  575. s->peak.offset > 4 + bytestream2_get_bytes_left(&s->peak.base)
  576. ) {
  577. ret = AVERROR_INVALIDDATA;
  578. goto end;
  579. }
  580. bytestream2_seek(&s->peak.base, s->peak.offset - 4, SEEK_CUR);
  581. } else
  582. av_log(avctx, AV_LOG_DEBUG, "Unknown tag %i data %x\n", tag, data);
  583. if (tag == BitstreamMarker && data == 0xf0f &&
  584. s->coded_format != AV_PIX_FMT_NONE) {
  585. int lowpass_height = s->plane[s->channel_num].band[0][0].height;
  586. int lowpass_width = s->plane[s->channel_num].band[0][0].width;
  587. int factor = s->coded_format == AV_PIX_FMT_BAYER_RGGB16 ? 2 : 1;
  588. if (s->coded_width) {
  589. s->coded_width *= factor;
  590. }
  591. if (s->coded_height) {
  592. s->coded_height *= factor;
  593. }
  594. if (!s->a_width && !s->coded_width) {
  595. s->coded_width = lowpass_width * factor * 8;
  596. }
  597. if (!s->a_height && !s->coded_height) {
  598. s->coded_height = lowpass_height * factor * 8;
  599. }
  600. if (s->a_width && !s->coded_width)
  601. s->coded_width = s->a_width;
  602. if (s->a_height && !s->coded_height)
  603. s->coded_height = s->a_height;
  604. if (s->a_width != s->coded_width || s->a_height != s->coded_height ||
  605. s->a_format != s->coded_format ||
  606. s->transform_type != s->a_transform_type) {
  607. free_buffers(s);
  608. if ((ret = alloc_buffers(avctx)) < 0) {
  609. free_buffers(s);
  610. return ret;
  611. }
  612. }
  613. ret = ff_set_dimensions(avctx, s->coded_width, s->coded_height);
  614. if (ret < 0)
  615. return ret;
  616. if (s->cropped_height) {
  617. unsigned height = s->cropped_height << (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16);
  618. if (avctx->height < height)
  619. return AVERROR_INVALIDDATA;
  620. avctx->height = height;
  621. }
  622. frame.f->width =
  623. frame.f->height = 0;
  624. if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
  625. return ret;
  626. s->coded_width = 0;
  627. s->coded_height = 0;
  628. s->coded_format = AV_PIX_FMT_NONE;
  629. got_buffer = 1;
  630. } else if (tag == FrameIndex && data == 1 && s->sample_type == 1 && s->frame_type == 2) {
  631. frame.f->width =
  632. frame.f->height = 0;
  633. if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
  634. return ret;
  635. s->coded_width = 0;
  636. s->coded_height = 0;
  637. s->coded_format = AV_PIX_FMT_NONE;
  638. got_buffer = 1;
  639. }
  640. if (s->subband_num_actual == 255)
  641. goto finish;
  642. coeff_data = s->plane[s->channel_num].subband[s->subband_num_actual];
  643. /* Lowpass coefficients */
  644. if (tag == BitstreamMarker && data == 0xf0f) {
  645. int lowpass_height, lowpass_width, lowpass_a_height, lowpass_a_width;
  646. if (!s->a_width || !s->a_height) {
  647. ret = AVERROR_INVALIDDATA;
  648. goto end;
  649. }
  650. lowpass_height = s->plane[s->channel_num].band[0][0].height;
  651. lowpass_width = s->plane[s->channel_num].band[0][0].width;
  652. lowpass_a_height = s->plane[s->channel_num].band[0][0].a_height;
  653. lowpass_a_width = s->plane[s->channel_num].band[0][0].a_width;
  654. if (lowpass_width < 3 ||
  655. lowpass_width > lowpass_a_width) {
  656. av_log(avctx, AV_LOG_ERROR, "Invalid lowpass width\n");
  657. ret = AVERROR(EINVAL);
  658. goto end;
  659. }
  660. if (lowpass_height < 3 ||
  661. lowpass_height > lowpass_a_height) {
  662. av_log(avctx, AV_LOG_ERROR, "Invalid lowpass height\n");
  663. ret = AVERROR(EINVAL);
  664. goto end;
  665. }
  666. if (!got_buffer) {
  667. av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
  668. ret = AVERROR(EINVAL);
  669. goto end;
  670. }
  671. if (lowpass_height > lowpass_a_height || lowpass_width > lowpass_a_width ||
  672. lowpass_width * lowpass_height * sizeof(int16_t) > bytestream2_get_bytes_left(&gb)) {
  673. av_log(avctx, AV_LOG_ERROR, "Too many lowpass coefficients\n");
  674. ret = AVERROR(EINVAL);
  675. goto end;
  676. }
  677. av_log(avctx, AV_LOG_DEBUG, "Start of lowpass coeffs component %d height:%d, width:%d\n", s->channel_num, lowpass_height, lowpass_width);
  678. for (i = 0; i < lowpass_height; i++) {
  679. for (j = 0; j < lowpass_width; j++)
  680. coeff_data[j] = bytestream2_get_be16u(&gb);
  681. coeff_data += lowpass_width;
  682. }
  683. /* Align to mod-4 position to continue reading tags */
  684. bytestream2_seek(&gb, bytestream2_tell(&gb) & 3, SEEK_CUR);
  685. /* Copy last line of coefficients if odd height */
  686. if (lowpass_height & 1) {
  687. memcpy(&coeff_data[lowpass_height * lowpass_width],
  688. &coeff_data[(lowpass_height - 1) * lowpass_width],
  689. lowpass_width * sizeof(*coeff_data));
  690. }
  691. s->plane[s->channel_num].band[0][0].read_ok = 1;
  692. av_log(avctx, AV_LOG_DEBUG, "Lowpass coefficients %d\n", lowpass_width * lowpass_height);
  693. }
  694. av_assert0(s->subband_num_actual != 255);
  695. if (tag == BandHeader || tag == BandSecondPass) {
  696. int highpass_height, highpass_width, highpass_a_width, highpass_a_height, highpass_stride, a_expected;
  697. int expected;
  698. int level, run, coeff;
  699. int count = 0, bytes;
  700. if (!s->a_width || !s->a_height) {
  701. ret = AVERROR_INVALIDDATA;
  702. goto end;
  703. }
  704. highpass_height = s->plane[s->channel_num].band[s->level][s->subband_num].height;
  705. highpass_width = s->plane[s->channel_num].band[s->level][s->subband_num].width;
  706. highpass_a_width = s->plane[s->channel_num].band[s->level][s->subband_num].a_width;
  707. highpass_a_height = s->plane[s->channel_num].band[s->level][s->subband_num].a_height;
  708. highpass_stride = s->plane[s->channel_num].band[s->level][s->subband_num].stride;
  709. a_expected = highpass_a_height * highpass_a_width;
  710. if (!got_buffer) {
  711. av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
  712. ret = AVERROR(EINVAL);
  713. goto end;
  714. }
  715. if (highpass_height > highpass_a_height || highpass_width > highpass_a_width || a_expected < highpass_height * (uint64_t)highpass_stride) {
  716. av_log(avctx, AV_LOG_ERROR, "Too many highpass coefficients\n");
  717. ret = AVERROR(EINVAL);
  718. goto end;
  719. }
  720. expected = highpass_height * highpass_stride;
  721. av_log(avctx, AV_LOG_DEBUG, "Start subband coeffs plane %i level %i codebook %i expected %i\n", s->channel_num, s->level, s->codebook, expected);
  722. ret = init_get_bits8(&s->gb, gb.buffer, bytestream2_get_bytes_left(&gb));
  723. if (ret < 0)
  724. goto end;
  725. {
  726. OPEN_READER(re, &s->gb);
  727. const int lossless = s->band_encoding == 5;
  728. if (s->codebook == 0 && s->transform_type == 2 && s->subband_num_actual == 7)
  729. s->codebook = 1;
  730. if (!s->codebook) {
  731. while (1) {
  732. UPDATE_CACHE(re, &s->gb);
  733. GET_RL_VLC(level, run, re, &s->gb, s->table_9_rl_vlc,
  734. VLC_BITS, 3, 1);
  735. /* escape */
  736. if (level == 64)
  737. break;
  738. count += run;
  739. if (count > expected)
  740. break;
  741. if (!lossless)
  742. coeff = dequant_and_decompand(s, level, s->quantisation, 0);
  743. else
  744. coeff = level;
  745. if (tag == BandSecondPass) {
  746. const uint16_t q = s->quantisation;
  747. for (i = 0; i < run; i++) {
  748. *coeff_data |= coeff * 256;
  749. *coeff_data++ *= q;
  750. }
  751. } else {
  752. for (i = 0; i < run; i++)
  753. *coeff_data++ = coeff;
  754. }
  755. }
  756. } else {
  757. while (1) {
  758. UPDATE_CACHE(re, &s->gb);
  759. GET_RL_VLC(level, run, re, &s->gb, s->table_18_rl_vlc,
  760. VLC_BITS, 3, 1);
  761. /* escape */
  762. if (level == 255 && run == 2)
  763. break;
  764. count += run;
  765. if (count > expected)
  766. break;
  767. if (!lossless)
  768. coeff = dequant_and_decompand(s, level, s->quantisation, s->codebook);
  769. else
  770. coeff = level;
  771. if (tag == BandSecondPass) {
  772. const uint16_t q = s->quantisation;
  773. for (i = 0; i < run; i++) {
  774. *coeff_data |= coeff * 256;
  775. *coeff_data++ *= q;
  776. }
  777. } else {
  778. for (i = 0; i < run; i++)
  779. *coeff_data++ = coeff;
  780. }
  781. }
  782. }
  783. CLOSE_READER(re, &s->gb);
  784. }
  785. if (count > expected) {
  786. av_log(avctx, AV_LOG_ERROR, "Escape codeword not found, probably corrupt data\n");
  787. ret = AVERROR(EINVAL);
  788. goto end;
  789. }
  790. if (s->peak.level)
  791. peak_table(coeff_data - count, &s->peak, count);
  792. if (s->difference_coding)
  793. difference_coding(s->plane[s->channel_num].subband[s->subband_num_actual], highpass_width, highpass_height);
  794. bytes = FFALIGN(AV_CEIL_RSHIFT(get_bits_count(&s->gb), 3), 4);
  795. if (bytes > bytestream2_get_bytes_left(&gb)) {
  796. av_log(avctx, AV_LOG_ERROR, "Bitstream overread error\n");
  797. ret = AVERROR(EINVAL);
  798. goto end;
  799. } else
  800. bytestream2_seek(&gb, bytes, SEEK_CUR);
  801. av_log(avctx, AV_LOG_DEBUG, "End subband coeffs %i extra %i\n", count, count - expected);
  802. s->plane[s->channel_num].band[s->level][s->subband_num].read_ok = 1;
  803. finish:
  804. if (s->subband_num_actual != 255)
  805. s->codebook = 0;
  806. }
  807. }
  808. s->planes = av_pix_fmt_count_planes(avctx->pix_fmt);
  809. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  810. s->progressive = 1;
  811. s->planes = 4;
  812. }
  813. ff_thread_finish_setup(avctx);
  814. if (!s->a_width || !s->a_height || s->a_format == AV_PIX_FMT_NONE ||
  815. s->a_transform_type == INT_MIN ||
  816. s->coded_width || s->coded_height || s->coded_format != AV_PIX_FMT_NONE) {
  817. av_log(avctx, AV_LOG_ERROR, "Invalid dimensions\n");
  818. ret = AVERROR(EINVAL);
  819. goto end;
  820. }
  821. if (!got_buffer) {
  822. av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
  823. ret = AVERROR(EINVAL);
  824. goto end;
  825. }
  826. for (plane = 0; plane < s->planes; plane++) {
  827. int o, level;
  828. for (level = 0; level < (s->transform_type == 0 ? DWT_LEVELS : DWT_LEVELS_3D) ; level++) {
  829. if (s->transform_type == 2)
  830. if (level == 2 || level == 5)
  831. continue;
  832. for (o = !!level; o < 4 ; o++) {
  833. if (!s->plane[plane].band[level][o].read_ok) {
  834. ret = AVERROR_INVALIDDATA;
  835. goto end;
  836. }
  837. }
  838. }
  839. }
  840. if (s->transform_type == 0 && s->sample_type != 1) {
  841. for (plane = 0; plane < s->planes && !ret; plane++) {
  842. /* level 1 */
  843. int lowpass_height = s->plane[plane].band[0][0].height;
  844. int output_stride = s->plane[plane].band[0][0].a_width;
  845. int lowpass_width = s->plane[plane].band[0][0].width;
  846. int highpass_stride = s->plane[plane].band[0][1].stride;
  847. int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
  848. ptrdiff_t dst_linesize;
  849. int16_t *low, *high, *output, *dst;
  850. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  851. act_plane = 0;
  852. dst_linesize = pic->linesize[act_plane];
  853. } else {
  854. dst_linesize = pic->linesize[act_plane] / 2;
  855. }
  856. if (lowpass_height > s->plane[plane].band[0][0].a_height || lowpass_width > s->plane[plane].band[0][0].a_width ||
  857. !highpass_stride || s->plane[plane].band[0][1].width > s->plane[plane].band[0][1].a_width ||
  858. lowpass_width < 3 || lowpass_height < 3) {
  859. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  860. ret = AVERROR(EINVAL);
  861. goto end;
  862. }
  863. av_log(avctx, AV_LOG_DEBUG, "Decoding level 1 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  864. low = s->plane[plane].subband[0];
  865. high = s->plane[plane].subband[2];
  866. output = s->plane[plane].l_h[0];
  867. dsp->vert_filter(output, output_stride, low, lowpass_width, high, highpass_stride, lowpass_width, lowpass_height);
  868. low = s->plane[plane].subband[1];
  869. high = s->plane[plane].subband[3];
  870. output = s->plane[plane].l_h[1];
  871. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  872. low = s->plane[plane].l_h[0];
  873. high = s->plane[plane].l_h[1];
  874. output = s->plane[plane].subband[0];
  875. dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
  876. if (s->bpc == 12) {
  877. output = s->plane[plane].subband[0];
  878. for (i = 0; i < lowpass_height * 2; i++) {
  879. for (j = 0; j < lowpass_width * 2; j++)
  880. output[j] *= 4;
  881. output += output_stride * 2;
  882. }
  883. }
  884. /* level 2 */
  885. lowpass_height = s->plane[plane].band[1][1].height;
  886. output_stride = s->plane[plane].band[1][1].a_width;
  887. lowpass_width = s->plane[plane].band[1][1].width;
  888. highpass_stride = s->plane[plane].band[1][1].stride;
  889. if (lowpass_height > s->plane[plane].band[1][1].a_height || lowpass_width > s->plane[plane].band[1][1].a_width ||
  890. !highpass_stride || s->plane[plane].band[1][1].width > s->plane[plane].band[1][1].a_width ||
  891. lowpass_width < 3 || lowpass_height < 3) {
  892. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  893. ret = AVERROR(EINVAL);
  894. goto end;
  895. }
  896. av_log(avctx, AV_LOG_DEBUG, "Level 2 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  897. low = s->plane[plane].subband[0];
  898. high = s->plane[plane].subband[5];
  899. output = s->plane[plane].l_h[3];
  900. dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  901. low = s->plane[plane].subband[4];
  902. high = s->plane[plane].subband[6];
  903. output = s->plane[plane].l_h[4];
  904. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  905. low = s->plane[plane].l_h[3];
  906. high = s->plane[plane].l_h[4];
  907. output = s->plane[plane].subband[0];
  908. dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
  909. output = s->plane[plane].subband[0];
  910. for (i = 0; i < lowpass_height * 2; i++) {
  911. for (j = 0; j < lowpass_width * 2; j++)
  912. output[j] *= 4;
  913. output += output_stride * 2;
  914. }
  915. /* level 3 */
  916. lowpass_height = s->plane[plane].band[2][1].height;
  917. output_stride = s->plane[plane].band[2][1].a_width;
  918. lowpass_width = s->plane[plane].band[2][1].width;
  919. highpass_stride = s->plane[plane].band[2][1].stride;
  920. if (lowpass_height > s->plane[plane].band[2][1].a_height || lowpass_width > s->plane[plane].band[2][1].a_width ||
  921. !highpass_stride || s->plane[plane].band[2][1].width > s->plane[plane].band[2][1].a_width ||
  922. lowpass_height < 3 || lowpass_width < 3 || lowpass_width * 2 > s->plane[plane].width) {
  923. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  924. ret = AVERROR(EINVAL);
  925. goto end;
  926. }
  927. av_log(avctx, AV_LOG_DEBUG, "Level 3 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  928. if (s->progressive) {
  929. low = s->plane[plane].subband[0];
  930. high = s->plane[plane].subband[8];
  931. output = s->plane[plane].l_h[6];
  932. dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  933. low = s->plane[plane].subband[7];
  934. high = s->plane[plane].subband[9];
  935. output = s->plane[plane].l_h[7];
  936. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  937. dst = (int16_t *)pic->data[act_plane];
  938. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  939. if (plane & 1)
  940. dst++;
  941. if (plane > 1)
  942. dst += pic->linesize[act_plane] >> 1;
  943. }
  944. low = s->plane[plane].l_h[6];
  945. high = s->plane[plane].l_h[7];
  946. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16 &&
  947. (lowpass_height * 2 > avctx->coded_height / 2 ||
  948. lowpass_width * 2 > avctx->coded_width / 2 )
  949. ) {
  950. ret = AVERROR_INVALIDDATA;
  951. goto end;
  952. }
  953. for (i = 0; i < s->plane[act_plane].height; i++) {
  954. dsp->horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
  955. if (avctx->pix_fmt == AV_PIX_FMT_GBRAP12 && act_plane == 3)
  956. process_alpha(dst, lowpass_width * 2);
  957. low += output_stride;
  958. high += output_stride;
  959. dst += dst_linesize;
  960. }
  961. } else {
  962. av_log(avctx, AV_LOG_DEBUG, "interlaced frame ? %d", pic->interlaced_frame);
  963. pic->interlaced_frame = 1;
  964. low = s->plane[plane].subband[0];
  965. high = s->plane[plane].subband[7];
  966. output = s->plane[plane].l_h[6];
  967. dsp->horiz_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  968. low = s->plane[plane].subband[8];
  969. high = s->plane[plane].subband[9];
  970. output = s->plane[plane].l_h[7];
  971. dsp->horiz_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  972. dst = (int16_t *)pic->data[act_plane];
  973. low = s->plane[plane].l_h[6];
  974. high = s->plane[plane].l_h[7];
  975. for (i = 0; i < s->plane[act_plane].height / 2; i++) {
  976. interlaced_vertical_filter(dst, low, high, lowpass_width * 2, pic->linesize[act_plane]/2, act_plane);
  977. low += output_stride * 2;
  978. high += output_stride * 2;
  979. dst += pic->linesize[act_plane];
  980. }
  981. }
  982. }
  983. } else if (s->transform_type == 2 && (avctx->internal->is_copy || s->frame_index == 1 || s->sample_type != 1)) {
  984. for (plane = 0; plane < s->planes && !ret; plane++) {
  985. int lowpass_height = s->plane[plane].band[0][0].height;
  986. int output_stride = s->plane[plane].band[0][0].a_width;
  987. int lowpass_width = s->plane[plane].band[0][0].width;
  988. int highpass_stride = s->plane[plane].band[0][1].stride;
  989. int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
  990. int16_t *low, *high, *output, *dst;
  991. ptrdiff_t dst_linesize;
  992. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  993. act_plane = 0;
  994. dst_linesize = pic->linesize[act_plane];
  995. } else {
  996. dst_linesize = pic->linesize[act_plane] / 2;
  997. }
  998. if (lowpass_height > s->plane[plane].band[0][0].a_height || lowpass_width > s->plane[plane].band[0][0].a_width ||
  999. !highpass_stride || s->plane[plane].band[0][1].width > s->plane[plane].band[0][1].a_width ||
  1000. lowpass_width < 3 || lowpass_height < 3) {
  1001. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  1002. ret = AVERROR(EINVAL);
  1003. goto end;
  1004. }
  1005. av_log(avctx, AV_LOG_DEBUG, "Decoding level 1 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  1006. low = s->plane[plane].subband[0];
  1007. high = s->plane[plane].subband[2];
  1008. output = s->plane[plane].l_h[0];
  1009. dsp->vert_filter(output, output_stride, low, lowpass_width, high, highpass_stride, lowpass_width, lowpass_height);
  1010. low = s->plane[plane].subband[1];
  1011. high = s->plane[plane].subband[3];
  1012. output = s->plane[plane].l_h[1];
  1013. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1014. low = s->plane[plane].l_h[0];
  1015. high = s->plane[plane].l_h[1];
  1016. output = s->plane[plane].l_h[7];
  1017. dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
  1018. if (s->bpc == 12) {
  1019. output = s->plane[plane].l_h[7];
  1020. for (i = 0; i < lowpass_height * 2; i++) {
  1021. for (j = 0; j < lowpass_width * 2; j++)
  1022. output[j] *= 4;
  1023. output += output_stride * 2;
  1024. }
  1025. }
  1026. lowpass_height = s->plane[plane].band[1][1].height;
  1027. output_stride = s->plane[plane].band[1][1].a_width;
  1028. lowpass_width = s->plane[plane].band[1][1].width;
  1029. highpass_stride = s->plane[plane].band[1][1].stride;
  1030. if (lowpass_height > s->plane[plane].band[1][1].a_height || lowpass_width > s->plane[plane].band[1][1].a_width ||
  1031. !highpass_stride || s->plane[plane].band[1][1].width > s->plane[plane].band[1][1].a_width ||
  1032. lowpass_width < 3 || lowpass_height < 3) {
  1033. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  1034. ret = AVERROR(EINVAL);
  1035. goto end;
  1036. }
  1037. av_log(avctx, AV_LOG_DEBUG, "Level 2 lowpass plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  1038. low = s->plane[plane].l_h[7];
  1039. high = s->plane[plane].subband[5];
  1040. output = s->plane[plane].l_h[3];
  1041. dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1042. low = s->plane[plane].subband[4];
  1043. high = s->plane[plane].subband[6];
  1044. output = s->plane[plane].l_h[4];
  1045. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1046. low = s->plane[plane].l_h[3];
  1047. high = s->plane[plane].l_h[4];
  1048. output = s->plane[plane].l_h[7];
  1049. dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
  1050. output = s->plane[plane].l_h[7];
  1051. for (i = 0; i < lowpass_height * 2; i++) {
  1052. for (j = 0; j < lowpass_width * 2; j++)
  1053. output[j] *= 4;
  1054. output += output_stride * 2;
  1055. }
  1056. low = s->plane[plane].subband[7];
  1057. high = s->plane[plane].subband[9];
  1058. output = s->plane[plane].l_h[3];
  1059. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1060. low = s->plane[plane].subband[8];
  1061. high = s->plane[plane].subband[10];
  1062. output = s->plane[plane].l_h[4];
  1063. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1064. low = s->plane[plane].l_h[3];
  1065. high = s->plane[plane].l_h[4];
  1066. output = s->plane[plane].l_h[9];
  1067. dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
  1068. lowpass_height = s->plane[plane].band[4][1].height;
  1069. output_stride = s->plane[plane].band[4][1].a_width;
  1070. lowpass_width = s->plane[plane].band[4][1].width;
  1071. highpass_stride = s->plane[plane].band[4][1].stride;
  1072. av_log(avctx, AV_LOG_DEBUG, "temporal level %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  1073. if (lowpass_height > s->plane[plane].band[4][1].a_height || lowpass_width > s->plane[plane].band[4][1].a_width ||
  1074. !highpass_stride || s->plane[plane].band[4][1].width > s->plane[plane].band[4][1].a_width ||
  1075. lowpass_width < 3 || lowpass_height < 3) {
  1076. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  1077. ret = AVERROR(EINVAL);
  1078. goto end;
  1079. }
  1080. low = s->plane[plane].l_h[7];
  1081. high = s->plane[plane].l_h[9];
  1082. output = s->plane[plane].l_h[7];
  1083. for (i = 0; i < lowpass_height; i++) {
  1084. inverse_temporal_filter(low, high, lowpass_width);
  1085. low += output_stride;
  1086. high += output_stride;
  1087. }
  1088. if (s->progressive) {
  1089. low = s->plane[plane].l_h[7];
  1090. high = s->plane[plane].subband[15];
  1091. output = s->plane[plane].l_h[6];
  1092. dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1093. low = s->plane[plane].subband[14];
  1094. high = s->plane[plane].subband[16];
  1095. output = s->plane[plane].l_h[7];
  1096. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1097. low = s->plane[plane].l_h[9];
  1098. high = s->plane[plane].subband[12];
  1099. output = s->plane[plane].l_h[8];
  1100. dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1101. low = s->plane[plane].subband[11];
  1102. high = s->plane[plane].subband[13];
  1103. output = s->plane[plane].l_h[9];
  1104. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1105. if (s->sample_type == 1)
  1106. continue;
  1107. dst = (int16_t *)pic->data[act_plane];
  1108. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  1109. if (plane & 1)
  1110. dst++;
  1111. if (plane > 1)
  1112. dst += pic->linesize[act_plane] >> 1;
  1113. }
  1114. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16 &&
  1115. (lowpass_height * 2 > avctx->coded_height / 2 ||
  1116. lowpass_width * 2 > avctx->coded_width / 2 )
  1117. ) {
  1118. ret = AVERROR_INVALIDDATA;
  1119. goto end;
  1120. }
  1121. low = s->plane[plane].l_h[6];
  1122. high = s->plane[plane].l_h[7];
  1123. for (i = 0; i < s->plane[act_plane].height; i++) {
  1124. dsp->horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
  1125. low += output_stride;
  1126. high += output_stride;
  1127. dst += dst_linesize;
  1128. }
  1129. } else {
  1130. pic->interlaced_frame = 1;
  1131. low = s->plane[plane].l_h[7];
  1132. high = s->plane[plane].subband[14];
  1133. output = s->plane[plane].l_h[6];
  1134. dsp->horiz_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1135. low = s->plane[plane].subband[15];
  1136. high = s->plane[plane].subband[16];
  1137. output = s->plane[plane].l_h[7];
  1138. dsp->horiz_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1139. low = s->plane[plane].l_h[9];
  1140. high = s->plane[plane].subband[11];
  1141. output = s->plane[plane].l_h[8];
  1142. dsp->horiz_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1143. low = s->plane[plane].subband[12];
  1144. high = s->plane[plane].subband[13];
  1145. output = s->plane[plane].l_h[9];
  1146. dsp->horiz_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1147. if (s->sample_type == 1)
  1148. continue;
  1149. dst = (int16_t *)pic->data[act_plane];
  1150. low = s->plane[plane].l_h[6];
  1151. high = s->plane[plane].l_h[7];
  1152. for (i = 0; i < s->plane[act_plane].height / 2; i++) {
  1153. interlaced_vertical_filter(dst, low, high, lowpass_width * 2, pic->linesize[act_plane]/2, act_plane);
  1154. low += output_stride * 2;
  1155. high += output_stride * 2;
  1156. dst += pic->linesize[act_plane];
  1157. }
  1158. }
  1159. }
  1160. }
  1161. if (s->transform_type == 2 && s->sample_type == 1) {
  1162. int16_t *low, *high, *dst;
  1163. int output_stride, lowpass_height, lowpass_width;
  1164. ptrdiff_t dst_linesize;
  1165. for (plane = 0; plane < s->planes; plane++) {
  1166. int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
  1167. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  1168. act_plane = 0;
  1169. dst_linesize = pic->linesize[act_plane];
  1170. } else {
  1171. dst_linesize = pic->linesize[act_plane] / 2;
  1172. }
  1173. lowpass_height = s->plane[plane].band[4][1].height;
  1174. output_stride = s->plane[plane].band[4][1].a_width;
  1175. lowpass_width = s->plane[plane].band[4][1].width;
  1176. if (lowpass_height > s->plane[plane].band[4][1].a_height || lowpass_width > s->plane[plane].band[4][1].a_width ||
  1177. s->plane[plane].band[4][1].width > s->plane[plane].band[4][1].a_width ||
  1178. lowpass_width < 3 || lowpass_height < 3) {
  1179. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  1180. ret = AVERROR(EINVAL);
  1181. goto end;
  1182. }
  1183. if (s->progressive) {
  1184. dst = (int16_t *)pic->data[act_plane];
  1185. low = s->plane[plane].l_h[8];
  1186. high = s->plane[plane].l_h[9];
  1187. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  1188. if (plane & 1)
  1189. dst++;
  1190. if (plane > 1)
  1191. dst += pic->linesize[act_plane] >> 1;
  1192. }
  1193. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16 &&
  1194. (lowpass_height * 2 > avctx->coded_height / 2 ||
  1195. lowpass_width * 2 > avctx->coded_width / 2 )
  1196. ) {
  1197. ret = AVERROR_INVALIDDATA;
  1198. goto end;
  1199. }
  1200. for (i = 0; i < s->plane[act_plane].height; i++) {
  1201. dsp->horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
  1202. low += output_stride;
  1203. high += output_stride;
  1204. dst += dst_linesize;
  1205. }
  1206. } else {
  1207. dst = (int16_t *)pic->data[act_plane];
  1208. low = s->plane[plane].l_h[8];
  1209. high = s->plane[plane].l_h[9];
  1210. for (i = 0; i < s->plane[act_plane].height / 2; i++) {
  1211. interlaced_vertical_filter(dst, low, high, lowpass_width * 2, pic->linesize[act_plane]/2, act_plane);
  1212. low += output_stride * 2;
  1213. high += output_stride * 2;
  1214. dst += pic->linesize[act_plane];
  1215. }
  1216. }
  1217. }
  1218. }
  1219. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16)
  1220. process_bayer(pic, s->bpc);
  1221. end:
  1222. if (ret < 0)
  1223. return ret;
  1224. *got_frame = 1;
  1225. return avpkt->size;
  1226. }
  1227. static av_cold int cfhd_close(AVCodecContext *avctx)
  1228. {
  1229. CFHDContext *s = avctx->priv_data;
  1230. free_buffers(s);
  1231. ff_free_vlc(&s->vlc_9);
  1232. ff_free_vlc(&s->vlc_18);
  1233. return 0;
  1234. }
  1235. #if HAVE_THREADS
  1236. static int update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
  1237. {
  1238. CFHDContext *psrc = src->priv_data;
  1239. CFHDContext *pdst = dst->priv_data;
  1240. int ret;
  1241. if (dst == src || psrc->transform_type == 0)
  1242. return 0;
  1243. if (pdst->plane[0].idwt_size != psrc->plane[0].idwt_size ||
  1244. pdst->a_format != psrc->a_format ||
  1245. pdst->a_width != psrc->a_width ||
  1246. pdst->a_height != psrc->a_height ||
  1247. pdst->a_transform_type != psrc->a_transform_type)
  1248. free_buffers(pdst);
  1249. pdst->a_format = psrc->a_format;
  1250. pdst->a_width = psrc->a_width;
  1251. pdst->a_height = psrc->a_height;
  1252. pdst->a_transform_type = psrc->a_transform_type;
  1253. pdst->transform_type = psrc->transform_type;
  1254. pdst->progressive = psrc->progressive;
  1255. pdst->planes = psrc->planes;
  1256. if (!pdst->plane[0].idwt_buf) {
  1257. pdst->coded_width = pdst->a_width;
  1258. pdst->coded_height = pdst->a_height;
  1259. pdst->coded_format = pdst->a_format;
  1260. pdst->transform_type = pdst->a_transform_type;
  1261. ret = alloc_buffers(dst);
  1262. if (ret < 0)
  1263. return ret;
  1264. }
  1265. for (int plane = 0; plane < pdst->planes; plane++) {
  1266. memcpy(pdst->plane[plane].band, psrc->plane[plane].band, sizeof(pdst->plane[plane].band));
  1267. memcpy(pdst->plane[plane].idwt_buf, psrc->plane[plane].idwt_buf,
  1268. pdst->plane[plane].idwt_size * sizeof(int16_t));
  1269. }
  1270. return 0;
  1271. }
  1272. #endif
  1273. AVCodec ff_cfhd_decoder = {
  1274. .name = "cfhd",
  1275. .long_name = NULL_IF_CONFIG_SMALL("GoPro CineForm HD"),
  1276. .type = AVMEDIA_TYPE_VIDEO,
  1277. .id = AV_CODEC_ID_CFHD,
  1278. .priv_data_size = sizeof(CFHDContext),
  1279. .init = cfhd_init,
  1280. .close = cfhd_close,
  1281. .decode = cfhd_decode,
  1282. .update_thread_context = ONLY_IF_THREADS_ENABLED(update_thread_context),
  1283. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS,
  1284. .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
  1285. };