You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1391 lines
56KB

  1. /*
  2. * Copyright (c) 2015-2016 Kieran Kunhya <kieran@kunhya.com>
  3. *
  4. * This file is part of FFmpeg.
  5. *
  6. * FFmpeg is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public
  8. * License as published by the Free Software Foundation; either
  9. * version 2.1 of the License, or (at your option) any later version.
  10. *
  11. * FFmpeg is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  14. * Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public
  17. * License along with FFmpeg; if not, write to the Free Software
  18. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20. /**
  21. * @file
  22. * Cineform HD video decoder
  23. */
  24. #include "libavutil/attributes.h"
  25. #include "libavutil/buffer.h"
  26. #include "libavutil/common.h"
  27. #include "libavutil/imgutils.h"
  28. #include "libavutil/intreadwrite.h"
  29. #include "libavutil/opt.h"
  30. #include "avcodec.h"
  31. #include "bytestream.h"
  32. #include "get_bits.h"
  33. #include "internal.h"
  34. #include "thread.h"
  35. #include "cfhd.h"
  36. #define ALPHA_COMPAND_DC_OFFSET 256
  37. #define ALPHA_COMPAND_GAIN 9400
  38. static av_cold int cfhd_init(AVCodecContext *avctx)
  39. {
  40. CFHDContext *s = avctx->priv_data;
  41. s->avctx = avctx;
  42. for (int i = 0; i < 64; i++) {
  43. int val = i;
  44. if (val >= 40) {
  45. if (val >= 54) {
  46. val -= 54;
  47. val <<= 2;
  48. val += 54;
  49. }
  50. val -= 40;
  51. val <<= 2;
  52. val += 40;
  53. }
  54. s->lut[0][i] = val;
  55. }
  56. for (int i = 0; i < 256; i++)
  57. s->lut[1][i] = i + ((768LL * i * i * i) / (256 * 256 * 256));
  58. return ff_cfhd_init_vlcs(s);
  59. }
  60. static void init_plane_defaults(CFHDContext *s)
  61. {
  62. s->subband_num = 0;
  63. s->level = 0;
  64. s->subband_num_actual = 0;
  65. }
  66. static void init_peak_table_defaults(CFHDContext *s)
  67. {
  68. s->peak.level = 0;
  69. s->peak.offset = 0;
  70. memset(&s->peak.base, 0, sizeof(s->peak.base));
  71. }
  72. static void init_frame_defaults(CFHDContext *s)
  73. {
  74. s->coded_width = 0;
  75. s->coded_height = 0;
  76. s->coded_format = AV_PIX_FMT_YUV422P10;
  77. s->cropped_height = 0;
  78. s->bpc = 10;
  79. s->channel_cnt = 3;
  80. s->subband_cnt = SUBBAND_COUNT;
  81. s->channel_num = 0;
  82. s->lowpass_precision = 16;
  83. s->quantisation = 1;
  84. s->codebook = 0;
  85. s->difference_coding = 0;
  86. s->frame_type = 0;
  87. s->sample_type = 0;
  88. init_plane_defaults(s);
  89. init_peak_table_defaults(s);
  90. }
  91. static inline int dequant_and_decompand(CFHDContext *s, int level, int quantisation, int codebook)
  92. {
  93. if (codebook == 0 || codebook == 1) {
  94. return s->lut[codebook][abs(level)] * FFSIGN(level) * quantisation;
  95. } else
  96. return level * quantisation;
  97. }
  98. static inline void difference_coding(int16_t *band, int width, int height)
  99. {
  100. int i,j;
  101. for (i = 0; i < height; i++) {
  102. for (j = 1; j < width; j++) {
  103. band[j] += band[j-1];
  104. }
  105. band += width;
  106. }
  107. }
  108. static inline void peak_table(int16_t *band, Peak *peak, int length)
  109. {
  110. int i;
  111. for (i = 0; i < length; i++)
  112. if (abs(band[i]) > peak->level)
  113. band[i] = bytestream2_get_le16(&peak->base);
  114. }
  115. static inline void process_alpha(int16_t *alpha, int width)
  116. {
  117. int i, channel;
  118. for (i = 0; i < width; i++) {
  119. channel = alpha[i];
  120. channel -= ALPHA_COMPAND_DC_OFFSET;
  121. channel <<= 3;
  122. channel *= ALPHA_COMPAND_GAIN;
  123. channel >>= 16;
  124. channel = av_clip_uintp2(channel, 12);
  125. alpha[i] = channel;
  126. }
  127. }
  128. static inline void process_bayer(AVFrame *frame, int bpc)
  129. {
  130. const int linesize = frame->linesize[0];
  131. uint16_t *r = (uint16_t *)frame->data[0];
  132. uint16_t *g1 = (uint16_t *)(frame->data[0] + 2);
  133. uint16_t *g2 = (uint16_t *)(frame->data[0] + frame->linesize[0]);
  134. uint16_t *b = (uint16_t *)(frame->data[0] + frame->linesize[0] + 2);
  135. const int mid = 1 << (bpc - 1);
  136. const int factor = 1 << (16 - bpc);
  137. for (int y = 0; y < frame->height >> 1; y++) {
  138. for (int x = 0; x < frame->width; x += 2) {
  139. int R, G1, G2, B;
  140. int g, rg, bg, gd;
  141. g = r[x];
  142. rg = g1[x];
  143. bg = g2[x];
  144. gd = b[x];
  145. gd -= mid;
  146. R = (rg - mid) * 2 + g;
  147. G1 = g + gd;
  148. G2 = g - gd;
  149. B = (bg - mid) * 2 + g;
  150. R = av_clip_uintp2(R * factor, 16);
  151. G1 = av_clip_uintp2(G1 * factor, 16);
  152. G2 = av_clip_uintp2(G2 * factor, 16);
  153. B = av_clip_uintp2(B * factor, 16);
  154. r[x] = R;
  155. g1[x] = G1;
  156. g2[x] = G2;
  157. b[x] = B;
  158. }
  159. r += linesize;
  160. g1 += linesize;
  161. g2 += linesize;
  162. b += linesize;
  163. }
  164. }
  165. static inline void interlaced_vertical_filter(int16_t *output, int16_t *low, int16_t *high,
  166. int width, int linesize, int plane)
  167. {
  168. int i;
  169. int16_t even, odd;
  170. for (i = 0; i < width; i++) {
  171. even = (low[i] - high[i])/2;
  172. odd = (low[i] + high[i])/2;
  173. output[i] = av_clip_uintp2(even, 10);
  174. output[i + linesize] = av_clip_uintp2(odd, 10);
  175. }
  176. }
  177. static inline void inverse_temporal_filter(int16_t *low, int16_t *high, int width)
  178. {
  179. for (int i = 0; i < width; i++) {
  180. int even = (low[i] - high[i]) / 2;
  181. int odd = (low[i] + high[i]) / 2;
  182. low[i] = even;
  183. high[i] = odd;
  184. }
  185. }
  186. static void free_buffers(CFHDContext *s)
  187. {
  188. int i, j;
  189. for (i = 0; i < FF_ARRAY_ELEMS(s->plane); i++) {
  190. av_freep(&s->plane[i].idwt_buf);
  191. av_freep(&s->plane[i].idwt_tmp);
  192. s->plane[i].idwt_size = 0;
  193. for (j = 0; j < SUBBAND_COUNT_3D; j++)
  194. s->plane[i].subband[j] = NULL;
  195. for (j = 0; j < 10; j++)
  196. s->plane[i].l_h[j] = NULL;
  197. }
  198. s->a_height = 0;
  199. s->a_width = 0;
  200. }
  201. static int alloc_buffers(AVCodecContext *avctx)
  202. {
  203. CFHDContext *s = avctx->priv_data;
  204. int i, j, ret, planes, bayer = 0;
  205. int chroma_x_shift, chroma_y_shift;
  206. unsigned k;
  207. if ((ret = ff_set_dimensions(avctx, s->coded_width, s->coded_height)) < 0)
  208. return ret;
  209. avctx->pix_fmt = s->coded_format;
  210. ff_cfhddsp_init(&s->dsp, s->bpc, avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16);
  211. if ((ret = av_pix_fmt_get_chroma_sub_sample(s->coded_format,
  212. &chroma_x_shift,
  213. &chroma_y_shift)) < 0)
  214. return ret;
  215. planes = av_pix_fmt_count_planes(s->coded_format);
  216. if (s->coded_format == AV_PIX_FMT_BAYER_RGGB16) {
  217. planes = 4;
  218. chroma_x_shift = 1;
  219. chroma_y_shift = 1;
  220. bayer = 1;
  221. }
  222. for (i = 0; i < planes; i++) {
  223. int w8, h8, w4, h4, w2, h2;
  224. int width = (i || bayer) ? s->coded_width >> chroma_x_shift : s->coded_width;
  225. int height = (i || bayer) ? s->coded_height >> chroma_y_shift : s->coded_height;
  226. ptrdiff_t stride = (FFALIGN(width / 8, 8) + 64) * 8;
  227. if (chroma_y_shift && !bayer)
  228. height = FFALIGN(height / 8, 2) * 8;
  229. s->plane[i].width = width;
  230. s->plane[i].height = height;
  231. s->plane[i].stride = stride;
  232. w8 = FFALIGN(s->plane[i].width / 8, 8) + 64;
  233. h8 = FFALIGN(height, 8) / 8;
  234. w4 = w8 * 2;
  235. h4 = h8 * 2;
  236. w2 = w4 * 2;
  237. h2 = h4 * 2;
  238. if (s->transform_type == 0) {
  239. s->plane[i].idwt_size = FFALIGN(height, 8) * stride;
  240. s->plane[i].idwt_buf =
  241. av_mallocz_array(s->plane[i].idwt_size, sizeof(*s->plane[i].idwt_buf));
  242. s->plane[i].idwt_tmp =
  243. av_malloc_array(s->plane[i].idwt_size, sizeof(*s->plane[i].idwt_tmp));
  244. } else {
  245. s->plane[i].idwt_size = FFALIGN(height, 8) * stride * 2;
  246. s->plane[i].idwt_buf =
  247. av_mallocz_array(s->plane[i].idwt_size, sizeof(*s->plane[i].idwt_buf));
  248. s->plane[i].idwt_tmp =
  249. av_malloc_array(s->plane[i].idwt_size, sizeof(*s->plane[i].idwt_tmp));
  250. }
  251. if (!s->plane[i].idwt_buf || !s->plane[i].idwt_tmp)
  252. return AVERROR(ENOMEM);
  253. s->plane[i].subband[0] = s->plane[i].idwt_buf;
  254. s->plane[i].subband[1] = s->plane[i].idwt_buf + 2 * w8 * h8;
  255. s->plane[i].subband[2] = s->plane[i].idwt_buf + 1 * w8 * h8;
  256. s->plane[i].subband[3] = s->plane[i].idwt_buf + 3 * w8 * h8;
  257. s->plane[i].subband[4] = s->plane[i].idwt_buf + 2 * w4 * h4;
  258. s->plane[i].subband[5] = s->plane[i].idwt_buf + 1 * w4 * h4;
  259. s->plane[i].subband[6] = s->plane[i].idwt_buf + 3 * w4 * h4;
  260. if (s->transform_type == 0) {
  261. s->plane[i].subband[7] = s->plane[i].idwt_buf + 2 * w2 * h2;
  262. s->plane[i].subband[8] = s->plane[i].idwt_buf + 1 * w2 * h2;
  263. s->plane[i].subband[9] = s->plane[i].idwt_buf + 3 * w2 * h2;
  264. } else {
  265. int16_t *frame2 =
  266. s->plane[i].subband[7] = s->plane[i].idwt_buf + 4 * w2 * h2;
  267. s->plane[i].subband[8] = frame2 + 2 * w4 * h4;
  268. s->plane[i].subband[9] = frame2 + 1 * w4 * h4;
  269. s->plane[i].subband[10] = frame2 + 3 * w4 * h4;
  270. s->plane[i].subband[11] = frame2 + 2 * w2 * h2;
  271. s->plane[i].subband[12] = frame2 + 1 * w2 * h2;
  272. s->plane[i].subband[13] = frame2 + 3 * w2 * h2;
  273. s->plane[i].subband[14] = s->plane[i].idwt_buf + 2 * w2 * h2;
  274. s->plane[i].subband[15] = s->plane[i].idwt_buf + 1 * w2 * h2;
  275. s->plane[i].subband[16] = s->plane[i].idwt_buf + 3 * w2 * h2;
  276. }
  277. if (s->transform_type == 0) {
  278. for (j = 0; j < DWT_LEVELS; j++) {
  279. for (k = 0; k < FF_ARRAY_ELEMS(s->plane[i].band[j]); k++) {
  280. s->plane[i].band[j][k].a_width = w8 << j;
  281. s->plane[i].band[j][k].a_height = h8 << j;
  282. }
  283. }
  284. } else {
  285. for (j = 0; j < DWT_LEVELS_3D; j++) {
  286. int t = j < 1 ? 0 : (j < 3 ? 1 : 2);
  287. for (k = 0; k < FF_ARRAY_ELEMS(s->plane[i].band[j]); k++) {
  288. s->plane[i].band[j][k].a_width = w8 << t;
  289. s->plane[i].band[j][k].a_height = h8 << t;
  290. }
  291. }
  292. }
  293. /* ll2 and ll1 commented out because they are done in-place */
  294. s->plane[i].l_h[0] = s->plane[i].idwt_tmp;
  295. s->plane[i].l_h[1] = s->plane[i].idwt_tmp + 2 * w8 * h8;
  296. // s->plane[i].l_h[2] = ll2;
  297. s->plane[i].l_h[3] = s->plane[i].idwt_tmp;
  298. s->plane[i].l_h[4] = s->plane[i].idwt_tmp + 2 * w4 * h4;
  299. // s->plane[i].l_h[5] = ll1;
  300. s->plane[i].l_h[6] = s->plane[i].idwt_tmp;
  301. s->plane[i].l_h[7] = s->plane[i].idwt_tmp + 2 * w2 * h2;
  302. if (s->transform_type != 0) {
  303. int16_t *frame2 = s->plane[i].idwt_tmp + 4 * w2 * h2;
  304. s->plane[i].l_h[8] = frame2;
  305. s->plane[i].l_h[9] = frame2 + 2 * w2 * h2;
  306. }
  307. }
  308. s->a_height = s->coded_height;
  309. s->a_width = s->coded_width;
  310. s->a_format = s->coded_format;
  311. return 0;
  312. }
  313. static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
  314. AVPacket *avpkt)
  315. {
  316. CFHDContext *s = avctx->priv_data;
  317. CFHDDSPContext *dsp = &s->dsp;
  318. GetByteContext gb;
  319. ThreadFrame frame = { .f = data };
  320. AVFrame *pic = data;
  321. int ret = 0, i, j, plane, got_buffer = 0;
  322. int16_t *coeff_data;
  323. init_frame_defaults(s);
  324. s->planes = av_pix_fmt_count_planes(s->coded_format);
  325. bytestream2_init(&gb, avpkt->data, avpkt->size);
  326. while (bytestream2_get_bytes_left(&gb) >= 4) {
  327. /* Bit weird but implement the tag parsing as the spec says */
  328. uint16_t tagu = bytestream2_get_be16(&gb);
  329. int16_t tag = (int16_t)tagu;
  330. int8_t tag8 = (int8_t)(tagu >> 8);
  331. uint16_t abstag = abs(tag);
  332. int8_t abs_tag8 = abs(tag8);
  333. uint16_t data = bytestream2_get_be16(&gb);
  334. if (abs_tag8 >= 0x60 && abs_tag8 <= 0x6f) {
  335. av_log(avctx, AV_LOG_DEBUG, "large len %x\n", ((tagu & 0xff) << 16) | data);
  336. } else if (tag == SampleFlags) {
  337. av_log(avctx, AV_LOG_DEBUG, "Progressive? %"PRIu16"\n", data);
  338. s->progressive = data & 0x0001;
  339. } else if (tag == FrameType) {
  340. s->frame_type = data;
  341. av_log(avctx, AV_LOG_DEBUG, "Frame type %"PRIu16"\n", data);
  342. } else if (abstag == VersionMajor) {
  343. av_log(avctx, AV_LOG_DEBUG, "Version major %"PRIu16"\n", data);
  344. } else if (abstag == VersionMinor) {
  345. av_log(avctx, AV_LOG_DEBUG, "Version minor %"PRIu16"\n", data);
  346. } else if (abstag == VersionRevision) {
  347. av_log(avctx, AV_LOG_DEBUG, "Version revision %"PRIu16"\n", data);
  348. } else if (abstag == VersionEdit) {
  349. av_log(avctx, AV_LOG_DEBUG, "Version edit %"PRIu16"\n", data);
  350. } else if (abstag == Version) {
  351. av_log(avctx, AV_LOG_DEBUG, "Version %"PRIu16"\n", data);
  352. } else if (tag == ImageWidth) {
  353. av_log(avctx, AV_LOG_DEBUG, "Width %"PRIu16"\n", data);
  354. s->coded_width = data;
  355. } else if (tag == ImageHeight) {
  356. av_log(avctx, AV_LOG_DEBUG, "Height %"PRIu16"\n", data);
  357. s->coded_height = data;
  358. } else if (tag == ChannelCount) {
  359. av_log(avctx, AV_LOG_DEBUG, "Channel Count: %"PRIu16"\n", data);
  360. s->channel_cnt = data;
  361. if (data > 4) {
  362. av_log(avctx, AV_LOG_ERROR, "Channel Count of %"PRIu16" is unsupported\n", data);
  363. ret = AVERROR_PATCHWELCOME;
  364. break;
  365. }
  366. } else if (tag == SubbandCount) {
  367. av_log(avctx, AV_LOG_DEBUG, "Subband Count: %"PRIu16"\n", data);
  368. if (data != SUBBAND_COUNT && data != SUBBAND_COUNT_3D) {
  369. av_log(avctx, AV_LOG_ERROR, "Subband Count of %"PRIu16" is unsupported\n", data);
  370. ret = AVERROR_PATCHWELCOME;
  371. break;
  372. }
  373. } else if (tag == ChannelNumber) {
  374. s->channel_num = data;
  375. av_log(avctx, AV_LOG_DEBUG, "Channel number %"PRIu16"\n", data);
  376. if (s->channel_num >= s->planes) {
  377. av_log(avctx, AV_LOG_ERROR, "Invalid channel number\n");
  378. ret = AVERROR(EINVAL);
  379. break;
  380. }
  381. init_plane_defaults(s);
  382. } else if (tag == SubbandNumber) {
  383. if (s->subband_num != 0 && data == 1) // hack
  384. s->level++;
  385. av_log(avctx, AV_LOG_DEBUG, "Subband number %"PRIu16"\n", data);
  386. s->subband_num = data;
  387. if ((s->transform_type == 0 && s->level >= DWT_LEVELS) ||
  388. (s->transform_type == 2 && s->level >= DWT_LEVELS_3D)) {
  389. av_log(avctx, AV_LOG_ERROR, "Invalid level\n");
  390. ret = AVERROR(EINVAL);
  391. break;
  392. }
  393. if (s->subband_num > 3) {
  394. av_log(avctx, AV_LOG_ERROR, "Invalid subband number\n");
  395. ret = AVERROR(EINVAL);
  396. break;
  397. }
  398. } else if (tag == SubbandBand) {
  399. av_log(avctx, AV_LOG_DEBUG, "Subband number actual %"PRIu16"\n", data);
  400. s->subband_num_actual = data;
  401. if ((s->transform_type == 0 && s->subband_num_actual >= SUBBAND_COUNT) ||
  402. (s->transform_type == 2 && s->subband_num_actual >= SUBBAND_COUNT_3D && s->subband_num_actual != 255)) {
  403. av_log(avctx, AV_LOG_ERROR, "Invalid subband number actual\n");
  404. ret = AVERROR(EINVAL);
  405. break;
  406. }
  407. } else if (tag == LowpassPrecision)
  408. av_log(avctx, AV_LOG_DEBUG, "Lowpass precision bits: %"PRIu16"\n", data);
  409. else if (tag == Quantization) {
  410. s->quantisation = data;
  411. av_log(avctx, AV_LOG_DEBUG, "Quantisation: %"PRIu16"\n", data);
  412. } else if (tag == PrescaleTable) {
  413. for (i = 0; i < 8; i++)
  414. s->prescale_table[i] = (data >> (14 - i * 2)) & 0x3;
  415. av_log(avctx, AV_LOG_DEBUG, "Prescale table: %x\n", data);
  416. } else if (tag == BandEncoding) {
  417. if (!data || data > 5) {
  418. av_log(avctx, AV_LOG_ERROR, "Invalid band encoding\n");
  419. ret = AVERROR(EINVAL);
  420. break;
  421. }
  422. s->band_encoding = data;
  423. av_log(avctx, AV_LOG_DEBUG, "Encode Method for Subband %d : %x\n", s->subband_num_actual, data);
  424. } else if (tag == LowpassWidth) {
  425. av_log(avctx, AV_LOG_DEBUG, "Lowpass width %"PRIu16"\n", data);
  426. s->plane[s->channel_num].band[0][0].width = data;
  427. s->plane[s->channel_num].band[0][0].stride = data;
  428. } else if (tag == LowpassHeight) {
  429. av_log(avctx, AV_LOG_DEBUG, "Lowpass height %"PRIu16"\n", data);
  430. s->plane[s->channel_num].band[0][0].height = data;
  431. } else if (tag == SampleType) {
  432. s->sample_type = data;
  433. av_log(avctx, AV_LOG_DEBUG, "Sample type? %"PRIu16"\n", data);
  434. } else if (tag == TransformType) {
  435. if (data > 2) {
  436. av_log(avctx, AV_LOG_ERROR, "Invalid transform type\n");
  437. ret = AVERROR(EINVAL);
  438. break;
  439. } else if (data == 1) {
  440. av_log(avctx, AV_LOG_ERROR, "unsupported transform type\n");
  441. ret = AVERROR_PATCHWELCOME;
  442. break;
  443. }
  444. s->transform_type = data;
  445. av_log(avctx, AV_LOG_DEBUG, "Transform type %"PRIu16"\n", data);
  446. } else if (abstag >= 0x4000 && abstag <= 0x40ff) {
  447. if (abstag == 0x4001)
  448. s->peak.level = 0;
  449. av_log(avctx, AV_LOG_DEBUG, "Small chunk length %d %s\n", data * 4, tag < 0 ? "optional" : "required");
  450. bytestream2_skipu(&gb, data * 4);
  451. } else if (tag == FrameIndex) {
  452. av_log(avctx, AV_LOG_DEBUG, "Frame index %"PRIu16"\n", data);
  453. s->frame_index = data;
  454. } else if (tag == SampleIndexTable) {
  455. av_log(avctx, AV_LOG_DEBUG, "Sample index table - skipping %i values\n", data);
  456. if (data > bytestream2_get_bytes_left(&gb) / 4) {
  457. av_log(avctx, AV_LOG_ERROR, "too many values (%d)\n", data);
  458. ret = AVERROR_INVALIDDATA;
  459. break;
  460. }
  461. for (i = 0; i < data; i++) {
  462. uint32_t offset = bytestream2_get_be32(&gb);
  463. av_log(avctx, AV_LOG_DEBUG, "Offset = %"PRIu32"\n", offset);
  464. }
  465. } else if (tag == HighpassWidth) {
  466. av_log(avctx, AV_LOG_DEBUG, "Highpass width %i channel %i level %i subband %i\n", data, s->channel_num, s->level, s->subband_num);
  467. if (data < 3) {
  468. av_log(avctx, AV_LOG_ERROR, "Invalid highpass width\n");
  469. ret = AVERROR(EINVAL);
  470. break;
  471. }
  472. s->plane[s->channel_num].band[s->level][s->subband_num].width = data;
  473. s->plane[s->channel_num].band[s->level][s->subband_num].stride = FFALIGN(data, 8);
  474. } else if (tag == HighpassHeight) {
  475. av_log(avctx, AV_LOG_DEBUG, "Highpass height %i\n", data);
  476. if (data < 3) {
  477. av_log(avctx, AV_LOG_ERROR, "Invalid highpass height\n");
  478. ret = AVERROR(EINVAL);
  479. break;
  480. }
  481. s->plane[s->channel_num].band[s->level][s->subband_num].height = data;
  482. } else if (tag == BandWidth) {
  483. av_log(avctx, AV_LOG_DEBUG, "Highpass width2 %i\n", data);
  484. if (data < 3) {
  485. av_log(avctx, AV_LOG_ERROR, "Invalid highpass width2\n");
  486. ret = AVERROR(EINVAL);
  487. break;
  488. }
  489. s->plane[s->channel_num].band[s->level][s->subband_num].width = data;
  490. s->plane[s->channel_num].band[s->level][s->subband_num].stride = FFALIGN(data, 8);
  491. } else if (tag == BandHeight) {
  492. av_log(avctx, AV_LOG_DEBUG, "Highpass height2 %i\n", data);
  493. if (data < 3) {
  494. av_log(avctx, AV_LOG_ERROR, "Invalid highpass height2\n");
  495. ret = AVERROR(EINVAL);
  496. break;
  497. }
  498. s->plane[s->channel_num].band[s->level][s->subband_num].height = data;
  499. } else if (tag == InputFormat) {
  500. av_log(avctx, AV_LOG_DEBUG, "Input format %i\n", data);
  501. if (s->coded_format == AV_PIX_FMT_NONE ||
  502. s->coded_format == AV_PIX_FMT_YUV422P10) {
  503. if (data >= 100 && data <= 105) {
  504. s->coded_format = AV_PIX_FMT_BAYER_RGGB16;
  505. } else if (data >= 122 && data <= 128) {
  506. s->coded_format = AV_PIX_FMT_GBRP12;
  507. } else if (data == 30) {
  508. s->coded_format = AV_PIX_FMT_GBRAP12;
  509. } else {
  510. s->coded_format = AV_PIX_FMT_YUV422P10;
  511. }
  512. s->planes = s->coded_format == AV_PIX_FMT_BAYER_RGGB16 ? 4 : av_pix_fmt_count_planes(s->coded_format);
  513. }
  514. } else if (tag == BandCodingFlags) {
  515. s->codebook = data & 0xf;
  516. s->difference_coding = (data >> 4) & 1;
  517. av_log(avctx, AV_LOG_DEBUG, "Other codebook? %i\n", s->codebook);
  518. } else if (tag == Precision) {
  519. av_log(avctx, AV_LOG_DEBUG, "Precision %i\n", data);
  520. if (!(data == 10 || data == 12)) {
  521. av_log(avctx, AV_LOG_ERROR, "Invalid bits per channel\n");
  522. ret = AVERROR(EINVAL);
  523. break;
  524. }
  525. avctx->bits_per_raw_sample = s->bpc = data;
  526. } else if (tag == EncodedFormat) {
  527. av_log(avctx, AV_LOG_DEBUG, "Sample format? %i\n", data);
  528. if (data == 1) {
  529. s->coded_format = AV_PIX_FMT_YUV422P10;
  530. } else if (data == 2) {
  531. s->coded_format = AV_PIX_FMT_BAYER_RGGB16;
  532. } else if (data == 3) {
  533. s->coded_format = AV_PIX_FMT_GBRP12;
  534. } else if (data == 4) {
  535. s->coded_format = AV_PIX_FMT_GBRAP12;
  536. } else {
  537. avpriv_report_missing_feature(avctx, "Sample format of %"PRIu16, data);
  538. ret = AVERROR_PATCHWELCOME;
  539. break;
  540. }
  541. s->planes = data == 2 ? 4 : av_pix_fmt_count_planes(s->coded_format);
  542. } else if (tag == -DisplayHeight) {
  543. av_log(avctx, AV_LOG_DEBUG, "Cropped height %"PRIu16"\n", data);
  544. s->cropped_height = data;
  545. } else if (tag == -PeakOffsetLow) {
  546. s->peak.offset &= ~0xffff;
  547. s->peak.offset |= (data & 0xffff);
  548. s->peak.base = gb;
  549. s->peak.level = 0;
  550. } else if (tag == -PeakOffsetHigh) {
  551. s->peak.offset &= 0xffff;
  552. s->peak.offset |= (data & 0xffffU)<<16;
  553. s->peak.base = gb;
  554. s->peak.level = 0;
  555. } else if (tag == -PeakLevel && s->peak.offset) {
  556. s->peak.level = data;
  557. bytestream2_seek(&s->peak.base, s->peak.offset - 4, SEEK_CUR);
  558. } else
  559. av_log(avctx, AV_LOG_DEBUG, "Unknown tag %i data %x\n", tag, data);
  560. if (tag == BitstreamMarker && data == 0xf0f &&
  561. s->coded_format != AV_PIX_FMT_NONE) {
  562. int lowpass_height = s->plane[s->channel_num].band[0][0].height;
  563. int lowpass_width = s->plane[s->channel_num].band[0][0].width;
  564. int factor = s->coded_format == AV_PIX_FMT_BAYER_RGGB16 ? 2 : 1;
  565. if (s->coded_width) {
  566. s->coded_width *= factor;
  567. }
  568. if (s->coded_height) {
  569. s->coded_height *= factor;
  570. }
  571. if (!s->a_width && !s->coded_width) {
  572. s->coded_width = lowpass_width * factor * 8;
  573. }
  574. if (!s->a_height && !s->coded_height) {
  575. s->coded_height = lowpass_height * factor * 8;
  576. }
  577. if (s->a_width && !s->coded_width)
  578. s->coded_width = s->a_width;
  579. if (s->a_height && !s->coded_height)
  580. s->coded_height = s->a_height;
  581. if (s->a_width != s->coded_width || s->a_height != s->coded_height ||
  582. s->a_format != s->coded_format) {
  583. free_buffers(s);
  584. if ((ret = alloc_buffers(avctx)) < 0) {
  585. free_buffers(s);
  586. return ret;
  587. }
  588. }
  589. ret = ff_set_dimensions(avctx, s->coded_width, s->coded_height);
  590. if (ret < 0)
  591. return ret;
  592. if (s->cropped_height) {
  593. unsigned height = s->cropped_height << (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16);
  594. if (avctx->height < height)
  595. return AVERROR_INVALIDDATA;
  596. avctx->height = height;
  597. }
  598. frame.f->width =
  599. frame.f->height = 0;
  600. if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
  601. return ret;
  602. s->coded_width = 0;
  603. s->coded_height = 0;
  604. s->coded_format = AV_PIX_FMT_NONE;
  605. got_buffer = 1;
  606. } else if (tag == FrameIndex && data == 1 && s->sample_type == 1 && s->frame_type == 2) {
  607. frame.f->width =
  608. frame.f->height = 0;
  609. if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
  610. return ret;
  611. s->coded_width = 0;
  612. s->coded_height = 0;
  613. s->coded_format = AV_PIX_FMT_NONE;
  614. got_buffer = 1;
  615. }
  616. if (s->subband_num_actual == 255)
  617. goto finish;
  618. coeff_data = s->plane[s->channel_num].subband[s->subband_num_actual];
  619. /* Lowpass coefficients */
  620. if (tag == BitstreamMarker && data == 0xf0f && s->a_width && s->a_height) {
  621. int lowpass_height = s->plane[s->channel_num].band[0][0].height;
  622. int lowpass_width = s->plane[s->channel_num].band[0][0].width;
  623. int lowpass_a_height = s->plane[s->channel_num].band[0][0].a_height;
  624. int lowpass_a_width = s->plane[s->channel_num].band[0][0].a_width;
  625. if (lowpass_width < 3 ||
  626. lowpass_width > lowpass_a_width) {
  627. av_log(avctx, AV_LOG_ERROR, "Invalid lowpass width\n");
  628. ret = AVERROR(EINVAL);
  629. goto end;
  630. }
  631. if (lowpass_height < 3 ||
  632. lowpass_height > lowpass_a_height) {
  633. av_log(avctx, AV_LOG_ERROR, "Invalid lowpass height\n");
  634. ret = AVERROR(EINVAL);
  635. goto end;
  636. }
  637. if (!got_buffer) {
  638. av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
  639. ret = AVERROR(EINVAL);
  640. goto end;
  641. }
  642. if (lowpass_height > lowpass_a_height || lowpass_width > lowpass_a_width ||
  643. lowpass_width * lowpass_height * sizeof(int16_t) > bytestream2_get_bytes_left(&gb)) {
  644. av_log(avctx, AV_LOG_ERROR, "Too many lowpass coefficients\n");
  645. ret = AVERROR(EINVAL);
  646. goto end;
  647. }
  648. av_log(avctx, AV_LOG_DEBUG, "Start of lowpass coeffs component %d height:%d, width:%d\n", s->channel_num, lowpass_height, lowpass_width);
  649. for (i = 0; i < lowpass_height; i++) {
  650. for (j = 0; j < lowpass_width; j++)
  651. coeff_data[j] = bytestream2_get_be16u(&gb);
  652. coeff_data += lowpass_width;
  653. }
  654. /* Align to mod-4 position to continue reading tags */
  655. bytestream2_seek(&gb, bytestream2_tell(&gb) & 3, SEEK_CUR);
  656. /* Copy last line of coefficients if odd height */
  657. if (lowpass_height & 1) {
  658. memcpy(&coeff_data[lowpass_height * lowpass_width],
  659. &coeff_data[(lowpass_height - 1) * lowpass_width],
  660. lowpass_width * sizeof(*coeff_data));
  661. }
  662. av_log(avctx, AV_LOG_DEBUG, "Lowpass coefficients %d\n", lowpass_width * lowpass_height);
  663. }
  664. if ((tag == BandHeader || tag == BandSecondPass) && s->subband_num_actual != 255 && s->a_width && s->a_height) {
  665. int highpass_height = s->plane[s->channel_num].band[s->level][s->subband_num].height;
  666. int highpass_width = s->plane[s->channel_num].band[s->level][s->subband_num].width;
  667. int highpass_a_width = s->plane[s->channel_num].band[s->level][s->subband_num].a_width;
  668. int highpass_a_height = s->plane[s->channel_num].band[s->level][s->subband_num].a_height;
  669. int highpass_stride = s->plane[s->channel_num].band[s->level][s->subband_num].stride;
  670. int expected;
  671. int a_expected = highpass_a_height * highpass_a_width;
  672. int level, run, coeff;
  673. int count = 0, bytes;
  674. if (!got_buffer) {
  675. av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
  676. ret = AVERROR(EINVAL);
  677. goto end;
  678. }
  679. if (highpass_height > highpass_a_height || highpass_width > highpass_a_width || a_expected < highpass_height * (uint64_t)highpass_stride) {
  680. av_log(avctx, AV_LOG_ERROR, "Too many highpass coefficients\n");
  681. ret = AVERROR(EINVAL);
  682. goto end;
  683. }
  684. expected = highpass_height * highpass_stride;
  685. av_log(avctx, AV_LOG_DEBUG, "Start subband coeffs plane %i level %i codebook %i expected %i\n", s->channel_num, s->level, s->codebook, expected);
  686. ret = init_get_bits8(&s->gb, gb.buffer, bytestream2_get_bytes_left(&gb));
  687. if (ret < 0)
  688. goto end;
  689. {
  690. OPEN_READER(re, &s->gb);
  691. const int lossless = s->band_encoding == 5;
  692. if (s->codebook == 0 && s->transform_type == 2 && s->subband_num_actual == 7)
  693. s->codebook = 1;
  694. if (!s->codebook) {
  695. while (1) {
  696. UPDATE_CACHE(re, &s->gb);
  697. GET_RL_VLC(level, run, re, &s->gb, s->table_9_rl_vlc,
  698. VLC_BITS, 3, 1);
  699. /* escape */
  700. if (level == 64)
  701. break;
  702. count += run;
  703. if (count > expected)
  704. break;
  705. if (!lossless)
  706. coeff = dequant_and_decompand(s, level, s->quantisation, 0);
  707. else
  708. coeff = level;
  709. if (tag == BandSecondPass) {
  710. const uint16_t q = s->quantisation;
  711. for (i = 0; i < run; i++) {
  712. *coeff_data |= coeff << 8;
  713. *coeff_data++ *= q;
  714. }
  715. } else {
  716. for (i = 0; i < run; i++)
  717. *coeff_data++ = coeff;
  718. }
  719. }
  720. } else {
  721. while (1) {
  722. UPDATE_CACHE(re, &s->gb);
  723. GET_RL_VLC(level, run, re, &s->gb, s->table_18_rl_vlc,
  724. VLC_BITS, 3, 1);
  725. /* escape */
  726. if (level == 255 && run == 2)
  727. break;
  728. count += run;
  729. if (count > expected)
  730. break;
  731. if (!lossless)
  732. coeff = dequant_and_decompand(s, level, s->quantisation, s->codebook);
  733. else
  734. coeff = level;
  735. if (tag == BandSecondPass) {
  736. const uint16_t q = s->quantisation;
  737. for (i = 0; i < run; i++) {
  738. *coeff_data |= coeff << 8;
  739. *coeff_data++ *= q;
  740. }
  741. } else {
  742. for (i = 0; i < run; i++)
  743. *coeff_data++ = coeff;
  744. }
  745. }
  746. }
  747. CLOSE_READER(re, &s->gb);
  748. }
  749. if (count > expected) {
  750. av_log(avctx, AV_LOG_ERROR, "Escape codeword not found, probably corrupt data\n");
  751. ret = AVERROR(EINVAL);
  752. goto end;
  753. }
  754. if (s->peak.level)
  755. peak_table(coeff_data - count, &s->peak, count);
  756. if (s->difference_coding)
  757. difference_coding(s->plane[s->channel_num].subband[s->subband_num_actual], highpass_width, highpass_height);
  758. bytes = FFALIGN(AV_CEIL_RSHIFT(get_bits_count(&s->gb), 3), 4);
  759. if (bytes > bytestream2_get_bytes_left(&gb)) {
  760. av_log(avctx, AV_LOG_ERROR, "Bitstream overread error\n");
  761. ret = AVERROR(EINVAL);
  762. goto end;
  763. } else
  764. bytestream2_seek(&gb, bytes, SEEK_CUR);
  765. av_log(avctx, AV_LOG_DEBUG, "End subband coeffs %i extra %i\n", count, count - expected);
  766. finish:
  767. if (s->subband_num_actual != 255)
  768. s->codebook = 0;
  769. }
  770. }
  771. s->planes = av_pix_fmt_count_planes(avctx->pix_fmt);
  772. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  773. s->progressive = 1;
  774. s->planes = 4;
  775. }
  776. ff_thread_finish_setup(avctx);
  777. if (!s->a_width || !s->a_height || s->a_format == AV_PIX_FMT_NONE ||
  778. s->coded_width || s->coded_height || s->coded_format != AV_PIX_FMT_NONE) {
  779. av_log(avctx, AV_LOG_ERROR, "Invalid dimensions\n");
  780. ret = AVERROR(EINVAL);
  781. goto end;
  782. }
  783. if (!got_buffer) {
  784. av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
  785. ret = AVERROR(EINVAL);
  786. goto end;
  787. }
  788. if (s->transform_type == 0 && s->sample_type != 1) {
  789. for (plane = 0; plane < s->planes && !ret; plane++) {
  790. /* level 1 */
  791. int lowpass_height = s->plane[plane].band[0][0].height;
  792. int output_stride = s->plane[plane].band[0][0].a_width;
  793. int lowpass_width = s->plane[plane].band[0][0].width;
  794. int highpass_stride = s->plane[plane].band[0][1].stride;
  795. int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
  796. ptrdiff_t dst_linesize;
  797. int16_t *low, *high, *output, *dst;
  798. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  799. act_plane = 0;
  800. dst_linesize = pic->linesize[act_plane];
  801. } else {
  802. dst_linesize = pic->linesize[act_plane] / 2;
  803. }
  804. if (lowpass_height > s->plane[plane].band[0][0].a_height || lowpass_width > s->plane[plane].band[0][0].a_width ||
  805. !highpass_stride || s->plane[plane].band[0][1].width > s->plane[plane].band[0][1].a_width) {
  806. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  807. ret = AVERROR(EINVAL);
  808. goto end;
  809. }
  810. av_log(avctx, AV_LOG_DEBUG, "Decoding level 1 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  811. low = s->plane[plane].subband[0];
  812. high = s->plane[plane].subband[2];
  813. output = s->plane[plane].l_h[0];
  814. dsp->vert_filter(output, output_stride, low, lowpass_width, high, highpass_stride, lowpass_width, lowpass_height);
  815. low = s->plane[plane].subband[1];
  816. high = s->plane[plane].subband[3];
  817. output = s->plane[plane].l_h[1];
  818. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  819. low = s->plane[plane].l_h[0];
  820. high = s->plane[plane].l_h[1];
  821. output = s->plane[plane].subband[0];
  822. dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
  823. if (s->bpc == 12) {
  824. output = s->plane[plane].subband[0];
  825. for (i = 0; i < lowpass_height * 2; i++) {
  826. for (j = 0; j < lowpass_width * 2; j++)
  827. output[j] *= 4;
  828. output += output_stride * 2;
  829. }
  830. }
  831. /* level 2 */
  832. lowpass_height = s->plane[plane].band[1][1].height;
  833. output_stride = s->plane[plane].band[1][1].a_width;
  834. lowpass_width = s->plane[plane].band[1][1].width;
  835. highpass_stride = s->plane[plane].band[1][1].stride;
  836. if (lowpass_height > s->plane[plane].band[1][1].a_height || lowpass_width > s->plane[plane].band[1][1].a_width ||
  837. !highpass_stride || s->plane[plane].band[1][1].width > s->plane[plane].band[1][1].a_width) {
  838. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  839. ret = AVERROR(EINVAL);
  840. goto end;
  841. }
  842. av_log(avctx, AV_LOG_DEBUG, "Level 2 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  843. low = s->plane[plane].subband[0];
  844. high = s->plane[plane].subband[5];
  845. output = s->plane[plane].l_h[3];
  846. dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  847. low = s->plane[plane].subband[4];
  848. high = s->plane[plane].subband[6];
  849. output = s->plane[plane].l_h[4];
  850. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  851. low = s->plane[plane].l_h[3];
  852. high = s->plane[plane].l_h[4];
  853. output = s->plane[plane].subband[0];
  854. dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
  855. output = s->plane[plane].subband[0];
  856. for (i = 0; i < lowpass_height * 2; i++) {
  857. for (j = 0; j < lowpass_width * 2; j++)
  858. output[j] *= 4;
  859. output += output_stride * 2;
  860. }
  861. /* level 3 */
  862. lowpass_height = s->plane[plane].band[2][1].height;
  863. output_stride = s->plane[plane].band[2][1].a_width;
  864. lowpass_width = s->plane[plane].band[2][1].width;
  865. highpass_stride = s->plane[plane].band[2][1].stride;
  866. if (lowpass_height > s->plane[plane].band[2][1].a_height || lowpass_width > s->plane[plane].band[2][1].a_width ||
  867. !highpass_stride || s->plane[plane].band[2][1].width > s->plane[plane].band[2][1].a_width) {
  868. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  869. ret = AVERROR(EINVAL);
  870. goto end;
  871. }
  872. av_log(avctx, AV_LOG_DEBUG, "Level 3 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  873. if (s->progressive) {
  874. low = s->plane[plane].subband[0];
  875. high = s->plane[plane].subband[8];
  876. output = s->plane[plane].l_h[6];
  877. dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  878. low = s->plane[plane].subband[7];
  879. high = s->plane[plane].subband[9];
  880. output = s->plane[plane].l_h[7];
  881. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  882. dst = (int16_t *)pic->data[act_plane];
  883. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  884. if (plane & 1)
  885. dst++;
  886. if (plane > 1)
  887. dst += pic->linesize[act_plane] >> 1;
  888. }
  889. low = s->plane[plane].l_h[6];
  890. high = s->plane[plane].l_h[7];
  891. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16 &&
  892. (lowpass_height * 2 > avctx->coded_height / 2 ||
  893. lowpass_width * 2 > avctx->coded_width / 2 )
  894. ) {
  895. ret = AVERROR_INVALIDDATA;
  896. goto end;
  897. }
  898. for (i = 0; i < lowpass_height * 2; i++) {
  899. dsp->horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
  900. if (avctx->pix_fmt == AV_PIX_FMT_GBRAP12 && act_plane == 3)
  901. process_alpha(dst, lowpass_width * 2);
  902. low += output_stride;
  903. high += output_stride;
  904. dst += dst_linesize;
  905. }
  906. } else {
  907. av_log(avctx, AV_LOG_DEBUG, "interlaced frame ? %d", pic->interlaced_frame);
  908. pic->interlaced_frame = 1;
  909. low = s->plane[plane].subband[0];
  910. high = s->plane[plane].subband[7];
  911. output = s->plane[plane].l_h[6];
  912. dsp->horiz_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  913. low = s->plane[plane].subband[8];
  914. high = s->plane[plane].subband[9];
  915. output = s->plane[plane].l_h[7];
  916. dsp->horiz_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  917. dst = (int16_t *)pic->data[act_plane];
  918. low = s->plane[plane].l_h[6];
  919. high = s->plane[plane].l_h[7];
  920. for (i = 0; i < lowpass_height; i++) {
  921. interlaced_vertical_filter(dst, low, high, lowpass_width * 2, pic->linesize[act_plane]/2, act_plane);
  922. low += output_stride * 2;
  923. high += output_stride * 2;
  924. dst += pic->linesize[act_plane];
  925. }
  926. }
  927. }
  928. } else if (s->transform_type == 2 && (avctx->internal->is_copy || s->frame_index == 1 || s->sample_type != 1)) {
  929. for (plane = 0; plane < s->planes && !ret; plane++) {
  930. int lowpass_height = s->plane[plane].band[0][0].height;
  931. int output_stride = s->plane[plane].band[0][0].a_width;
  932. int lowpass_width = s->plane[plane].band[0][0].width;
  933. int highpass_stride = s->plane[plane].band[0][1].stride;
  934. int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
  935. int16_t *low, *high, *output, *dst;
  936. ptrdiff_t dst_linesize;
  937. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  938. act_plane = 0;
  939. dst_linesize = pic->linesize[act_plane];
  940. } else {
  941. dst_linesize = pic->linesize[act_plane] / 2;
  942. }
  943. if (lowpass_height > s->plane[plane].band[0][0].a_height || lowpass_width > s->plane[plane].band[0][0].a_width ||
  944. !highpass_stride || s->plane[plane].band[0][1].width > s->plane[plane].band[0][1].a_width) {
  945. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  946. ret = AVERROR(EINVAL);
  947. goto end;
  948. }
  949. av_log(avctx, AV_LOG_DEBUG, "Decoding level 1 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  950. low = s->plane[plane].subband[0];
  951. high = s->plane[plane].subband[2];
  952. output = s->plane[plane].l_h[0];
  953. dsp->vert_filter(output, output_stride, low, lowpass_width, high, highpass_stride, lowpass_width, lowpass_height);
  954. low = s->plane[plane].subband[1];
  955. high = s->plane[plane].subband[3];
  956. output = s->plane[plane].l_h[1];
  957. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  958. low = s->plane[plane].l_h[0];
  959. high = s->plane[plane].l_h[1];
  960. output = s->plane[plane].l_h[7];
  961. dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
  962. if (s->bpc == 12) {
  963. output = s->plane[plane].l_h[7];
  964. for (i = 0; i < lowpass_height * 2; i++) {
  965. for (j = 0; j < lowpass_width * 2; j++)
  966. output[j] *= 4;
  967. output += output_stride * 2;
  968. }
  969. }
  970. lowpass_height = s->plane[plane].band[1][1].height;
  971. output_stride = s->plane[plane].band[1][1].a_width;
  972. lowpass_width = s->plane[plane].band[1][1].width;
  973. highpass_stride = s->plane[plane].band[1][1].stride;
  974. if (lowpass_height > s->plane[plane].band[1][1].a_height || lowpass_width > s->plane[plane].band[1][1].a_width ||
  975. !highpass_stride || s->plane[plane].band[1][1].width > s->plane[plane].band[1][1].a_width) {
  976. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  977. ret = AVERROR(EINVAL);
  978. goto end;
  979. }
  980. av_log(avctx, AV_LOG_DEBUG, "Level 2 lowpass plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  981. low = s->plane[plane].l_h[7];
  982. high = s->plane[plane].subband[5];
  983. output = s->plane[plane].l_h[3];
  984. dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  985. low = s->plane[plane].subband[4];
  986. high = s->plane[plane].subband[6];
  987. output = s->plane[plane].l_h[4];
  988. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  989. low = s->plane[plane].l_h[3];
  990. high = s->plane[plane].l_h[4];
  991. output = s->plane[plane].l_h[7];
  992. dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
  993. output = s->plane[plane].l_h[7];
  994. for (i = 0; i < lowpass_height * 2; i++) {
  995. for (j = 0; j < lowpass_width * 2; j++)
  996. output[j] *= 4;
  997. output += output_stride * 2;
  998. }
  999. low = s->plane[plane].subband[7];
  1000. high = s->plane[plane].subband[9];
  1001. output = s->plane[plane].l_h[3];
  1002. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1003. low = s->plane[plane].subband[8];
  1004. high = s->plane[plane].subband[10];
  1005. output = s->plane[plane].l_h[4];
  1006. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1007. low = s->plane[plane].l_h[3];
  1008. high = s->plane[plane].l_h[4];
  1009. output = s->plane[plane].l_h[9];
  1010. dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
  1011. lowpass_height = s->plane[plane].band[4][1].height;
  1012. output_stride = s->plane[plane].band[4][1].a_width;
  1013. lowpass_width = s->plane[plane].band[4][1].width;
  1014. highpass_stride = s->plane[plane].band[4][1].stride;
  1015. av_log(avctx, AV_LOG_DEBUG, "temporal level %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  1016. if (lowpass_height > s->plane[plane].band[4][1].a_height || lowpass_width > s->plane[plane].band[4][1].a_width ||
  1017. !highpass_stride || s->plane[plane].band[4][1].width > s->plane[plane].band[4][1].a_width) {
  1018. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  1019. ret = AVERROR(EINVAL);
  1020. goto end;
  1021. }
  1022. low = s->plane[plane].l_h[7];
  1023. high = s->plane[plane].l_h[9];
  1024. output = s->plane[plane].l_h[7];
  1025. for (i = 0; i < lowpass_height; i++) {
  1026. inverse_temporal_filter(low, high, lowpass_width);
  1027. low += output_stride;
  1028. high += output_stride;
  1029. }
  1030. if (s->progressive) {
  1031. low = s->plane[plane].l_h[7];
  1032. high = s->plane[plane].subband[15];
  1033. output = s->plane[plane].l_h[6];
  1034. dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1035. low = s->plane[plane].subband[14];
  1036. high = s->plane[plane].subband[16];
  1037. output = s->plane[plane].l_h[7];
  1038. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1039. low = s->plane[plane].l_h[9];
  1040. high = s->plane[plane].subband[12];
  1041. output = s->plane[plane].l_h[8];
  1042. dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1043. low = s->plane[plane].subband[11];
  1044. high = s->plane[plane].subband[13];
  1045. output = s->plane[plane].l_h[9];
  1046. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1047. if (s->sample_type == 1)
  1048. continue;
  1049. dst = (int16_t *)pic->data[act_plane];
  1050. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  1051. if (plane & 1)
  1052. dst++;
  1053. if (plane > 1)
  1054. dst += pic->linesize[act_plane] >> 1;
  1055. }
  1056. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16 &&
  1057. (lowpass_height * 2 > avctx->coded_height / 2 ||
  1058. lowpass_width * 2 > avctx->coded_width / 2 )
  1059. ) {
  1060. ret = AVERROR_INVALIDDATA;
  1061. goto end;
  1062. }
  1063. low = s->plane[plane].l_h[6];
  1064. high = s->plane[plane].l_h[7];
  1065. for (i = 0; i < lowpass_height * 2; i++) {
  1066. dsp->horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
  1067. low += output_stride;
  1068. high += output_stride;
  1069. dst += dst_linesize;
  1070. }
  1071. } else {
  1072. pic->interlaced_frame = 1;
  1073. low = s->plane[plane].l_h[7];
  1074. high = s->plane[plane].subband[14];
  1075. output = s->plane[plane].l_h[6];
  1076. dsp->horiz_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1077. low = s->plane[plane].subband[15];
  1078. high = s->plane[plane].subband[16];
  1079. output = s->plane[plane].l_h[7];
  1080. dsp->horiz_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1081. low = s->plane[plane].l_h[9];
  1082. high = s->plane[plane].subband[11];
  1083. output = s->plane[plane].l_h[8];
  1084. dsp->horiz_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1085. low = s->plane[plane].subband[12];
  1086. high = s->plane[plane].subband[13];
  1087. output = s->plane[plane].l_h[9];
  1088. dsp->horiz_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1089. if (s->sample_type == 1)
  1090. continue;
  1091. dst = (int16_t *)pic->data[act_plane];
  1092. low = s->plane[plane].l_h[6];
  1093. high = s->plane[plane].l_h[7];
  1094. for (i = 0; i < lowpass_height; i++) {
  1095. interlaced_vertical_filter(dst, low, high, lowpass_width * 2, pic->linesize[act_plane]/2, act_plane);
  1096. low += output_stride * 2;
  1097. high += output_stride * 2;
  1098. dst += pic->linesize[act_plane];
  1099. }
  1100. }
  1101. }
  1102. }
  1103. if (s->transform_type == 2 && s->sample_type == 1) {
  1104. int16_t *low, *high, *dst;
  1105. int output_stride, lowpass_height, lowpass_width;
  1106. ptrdiff_t dst_linesize;
  1107. for (plane = 0; plane < s->planes; plane++) {
  1108. int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
  1109. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  1110. act_plane = 0;
  1111. dst_linesize = pic->linesize[act_plane];
  1112. } else {
  1113. dst_linesize = pic->linesize[act_plane] / 2;
  1114. }
  1115. lowpass_height = s->plane[plane].band[4][1].height;
  1116. output_stride = s->plane[plane].band[4][1].a_width;
  1117. lowpass_width = s->plane[plane].band[4][1].width;
  1118. if (s->progressive) {
  1119. dst = (int16_t *)pic->data[act_plane];
  1120. low = s->plane[plane].l_h[8];
  1121. high = s->plane[plane].l_h[9];
  1122. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  1123. if (plane & 1)
  1124. dst++;
  1125. if (plane > 1)
  1126. dst += pic->linesize[act_plane] >> 1;
  1127. }
  1128. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16 &&
  1129. (lowpass_height * 2 > avctx->coded_height / 2 ||
  1130. lowpass_width * 2 > avctx->coded_width / 2 )
  1131. ) {
  1132. ret = AVERROR_INVALIDDATA;
  1133. goto end;
  1134. }
  1135. for (i = 0; i < lowpass_height * 2; i++) {
  1136. dsp->horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
  1137. low += output_stride;
  1138. high += output_stride;
  1139. dst += dst_linesize;
  1140. }
  1141. } else {
  1142. dst = (int16_t *)pic->data[act_plane];
  1143. low = s->plane[plane].l_h[8];
  1144. high = s->plane[plane].l_h[9];
  1145. for (i = 0; i < lowpass_height; i++) {
  1146. interlaced_vertical_filter(dst, low, high, lowpass_width * 2, pic->linesize[act_plane]/2, act_plane);
  1147. low += output_stride * 2;
  1148. high += output_stride * 2;
  1149. dst += pic->linesize[act_plane];
  1150. }
  1151. }
  1152. }
  1153. }
  1154. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16)
  1155. process_bayer(pic, s->bpc);
  1156. end:
  1157. if (ret < 0)
  1158. return ret;
  1159. *got_frame = 1;
  1160. return avpkt->size;
  1161. }
  1162. static av_cold int cfhd_close(AVCodecContext *avctx)
  1163. {
  1164. CFHDContext *s = avctx->priv_data;
  1165. free_buffers(s);
  1166. ff_free_vlc(&s->vlc_9);
  1167. ff_free_vlc(&s->vlc_18);
  1168. return 0;
  1169. }
  1170. #if HAVE_THREADS
  1171. static int update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
  1172. {
  1173. CFHDContext *psrc = src->priv_data;
  1174. CFHDContext *pdst = dst->priv_data;
  1175. int ret;
  1176. if (dst == src || psrc->transform_type == 0)
  1177. return 0;
  1178. pdst->a_format = psrc->a_format;
  1179. pdst->a_width = psrc->a_width;
  1180. pdst->a_height = psrc->a_height;
  1181. pdst->transform_type = psrc->transform_type;
  1182. pdst->progressive = psrc->progressive;
  1183. pdst->planes = psrc->planes;
  1184. if (!pdst->plane[0].idwt_buf) {
  1185. pdst->coded_width = pdst->a_width;
  1186. pdst->coded_height = pdst->a_height;
  1187. pdst->coded_format = pdst->a_format;
  1188. ret = alloc_buffers(dst);
  1189. if (ret < 0)
  1190. return ret;
  1191. }
  1192. for (int plane = 0; plane < pdst->planes; plane++) {
  1193. memcpy(pdst->plane[plane].band, psrc->plane[plane].band, sizeof(pdst->plane[plane].band));
  1194. memcpy(pdst->plane[plane].idwt_buf, psrc->plane[plane].idwt_buf,
  1195. pdst->plane[plane].idwt_size * sizeof(int16_t));
  1196. }
  1197. return 0;
  1198. }
  1199. #endif
  1200. AVCodec ff_cfhd_decoder = {
  1201. .name = "cfhd",
  1202. .long_name = NULL_IF_CONFIG_SMALL("GoPro CineForm HD"),
  1203. .type = AVMEDIA_TYPE_VIDEO,
  1204. .id = AV_CODEC_ID_CFHD,
  1205. .priv_data_size = sizeof(CFHDContext),
  1206. .init = cfhd_init,
  1207. .close = cfhd_close,
  1208. .decode = cfhd_decode,
  1209. .update_thread_context = ONLY_IF_THREADS_ENABLED(update_thread_context),
  1210. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS,
  1211. .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
  1212. };