You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1426 lines
58KB

  1. /*
  2. * Copyright (c) 2015-2016 Kieran Kunhya <kieran@kunhya.com>
  3. *
  4. * This file is part of FFmpeg.
  5. *
  6. * FFmpeg is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public
  8. * License as published by the Free Software Foundation; either
  9. * version 2.1 of the License, or (at your option) any later version.
  10. *
  11. * FFmpeg is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  14. * Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public
  17. * License along with FFmpeg; if not, write to the Free Software
  18. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20. /**
  21. * @file
  22. * Cineform HD video decoder
  23. */
  24. #include "libavutil/attributes.h"
  25. #include "libavutil/buffer.h"
  26. #include "libavutil/common.h"
  27. #include "libavutil/imgutils.h"
  28. #include "libavutil/intreadwrite.h"
  29. #include "libavutil/opt.h"
  30. #include "avcodec.h"
  31. #include "bytestream.h"
  32. #include "get_bits.h"
  33. #include "internal.h"
  34. #include "thread.h"
  35. #include "cfhd.h"
  36. #define ALPHA_COMPAND_DC_OFFSET 256
  37. #define ALPHA_COMPAND_GAIN 9400
  38. static av_cold int cfhd_init(AVCodecContext *avctx)
  39. {
  40. CFHDContext *s = avctx->priv_data;
  41. s->avctx = avctx;
  42. for (int i = 0; i < 64; i++) {
  43. int val = i;
  44. if (val >= 40) {
  45. if (val >= 54) {
  46. val -= 54;
  47. val <<= 2;
  48. val += 54;
  49. }
  50. val -= 40;
  51. val <<= 2;
  52. val += 40;
  53. }
  54. s->lut[0][i] = val;
  55. }
  56. for (int i = 0; i < 256; i++)
  57. s->lut[1][i] = i + ((768LL * i * i * i) / (256 * 256 * 256));
  58. return ff_cfhd_init_vlcs(s);
  59. }
  60. static void init_plane_defaults(CFHDContext *s)
  61. {
  62. s->subband_num = 0;
  63. s->level = 0;
  64. s->subband_num_actual = 0;
  65. }
  66. static void init_peak_table_defaults(CFHDContext *s)
  67. {
  68. s->peak.level = 0;
  69. s->peak.offset = 0;
  70. memset(&s->peak.base, 0, sizeof(s->peak.base));
  71. }
  72. static void init_frame_defaults(CFHDContext *s)
  73. {
  74. s->coded_width = 0;
  75. s->coded_height = 0;
  76. s->coded_format = AV_PIX_FMT_YUV422P10;
  77. s->cropped_height = 0;
  78. s->bpc = 10;
  79. s->channel_cnt = 3;
  80. s->subband_cnt = SUBBAND_COUNT;
  81. s->channel_num = 0;
  82. s->lowpass_precision = 16;
  83. s->quantisation = 1;
  84. s->codebook = 0;
  85. s->difference_coding = 0;
  86. s->frame_type = 0;
  87. s->sample_type = 0;
  88. if (s->transform_type != 2)
  89. s->transform_type = -1;
  90. init_plane_defaults(s);
  91. init_peak_table_defaults(s);
  92. }
  93. static inline int dequant_and_decompand(CFHDContext *s, int level, int quantisation, int codebook)
  94. {
  95. if (codebook == 0 || codebook == 1) {
  96. return s->lut[codebook][abs(level)] * FFSIGN(level) * quantisation;
  97. } else
  98. return level * quantisation;
  99. }
  100. static inline void difference_coding(int16_t *band, int width, int height)
  101. {
  102. int i,j;
  103. for (i = 0; i < height; i++) {
  104. for (j = 1; j < width; j++) {
  105. band[j] += band[j-1];
  106. }
  107. band += width;
  108. }
  109. }
  110. static inline void peak_table(int16_t *band, Peak *peak, int length)
  111. {
  112. int i;
  113. for (i = 0; i < length; i++)
  114. if (abs(band[i]) > peak->level)
  115. band[i] = bytestream2_get_le16(&peak->base);
  116. }
  117. static inline void process_alpha(int16_t *alpha, int width)
  118. {
  119. int i, channel;
  120. for (i = 0; i < width; i++) {
  121. channel = alpha[i];
  122. channel -= ALPHA_COMPAND_DC_OFFSET;
  123. channel <<= 3;
  124. channel *= ALPHA_COMPAND_GAIN;
  125. channel >>= 16;
  126. channel = av_clip_uintp2(channel, 12);
  127. alpha[i] = channel;
  128. }
  129. }
  130. static inline void process_bayer(AVFrame *frame, int bpc)
  131. {
  132. const int linesize = frame->linesize[0];
  133. uint16_t *r = (uint16_t *)frame->data[0];
  134. uint16_t *g1 = (uint16_t *)(frame->data[0] + 2);
  135. uint16_t *g2 = (uint16_t *)(frame->data[0] + frame->linesize[0]);
  136. uint16_t *b = (uint16_t *)(frame->data[0] + frame->linesize[0] + 2);
  137. const int mid = 1 << (bpc - 1);
  138. const int factor = 1 << (16 - bpc);
  139. for (int y = 0; y < frame->height >> 1; y++) {
  140. for (int x = 0; x < frame->width; x += 2) {
  141. int R, G1, G2, B;
  142. int g, rg, bg, gd;
  143. g = r[x];
  144. rg = g1[x];
  145. bg = g2[x];
  146. gd = b[x];
  147. gd -= mid;
  148. R = (rg - mid) * 2 + g;
  149. G1 = g + gd;
  150. G2 = g - gd;
  151. B = (bg - mid) * 2 + g;
  152. R = av_clip_uintp2(R * factor, 16);
  153. G1 = av_clip_uintp2(G1 * factor, 16);
  154. G2 = av_clip_uintp2(G2 * factor, 16);
  155. B = av_clip_uintp2(B * factor, 16);
  156. r[x] = R;
  157. g1[x] = G1;
  158. g2[x] = G2;
  159. b[x] = B;
  160. }
  161. r += linesize;
  162. g1 += linesize;
  163. g2 += linesize;
  164. b += linesize;
  165. }
  166. }
  167. static inline void interlaced_vertical_filter(int16_t *output, int16_t *low, int16_t *high,
  168. int width, int linesize, int plane)
  169. {
  170. int i;
  171. int16_t even, odd;
  172. for (i = 0; i < width; i++) {
  173. even = (low[i] - high[i])/2;
  174. odd = (low[i] + high[i])/2;
  175. output[i] = av_clip_uintp2(even, 10);
  176. output[i + linesize] = av_clip_uintp2(odd, 10);
  177. }
  178. }
  179. static inline void inverse_temporal_filter(int16_t *low, int16_t *high, int width)
  180. {
  181. for (int i = 0; i < width; i++) {
  182. int even = (low[i] - high[i]) / 2;
  183. int odd = (low[i] + high[i]) / 2;
  184. low[i] = even;
  185. high[i] = odd;
  186. }
  187. }
  188. static void free_buffers(CFHDContext *s)
  189. {
  190. int i, j;
  191. for (i = 0; i < FF_ARRAY_ELEMS(s->plane); i++) {
  192. av_freep(&s->plane[i].idwt_buf);
  193. av_freep(&s->plane[i].idwt_tmp);
  194. s->plane[i].idwt_size = 0;
  195. for (j = 0; j < SUBBAND_COUNT_3D; j++)
  196. s->plane[i].subband[j] = NULL;
  197. for (j = 0; j < 10; j++)
  198. s->plane[i].l_h[j] = NULL;
  199. }
  200. s->a_height = 0;
  201. s->a_width = 0;
  202. }
  203. static int alloc_buffers(AVCodecContext *avctx)
  204. {
  205. CFHDContext *s = avctx->priv_data;
  206. int i, j, ret, planes, bayer = 0;
  207. int chroma_x_shift, chroma_y_shift;
  208. unsigned k;
  209. if ((ret = ff_set_dimensions(avctx, s->coded_width, s->coded_height)) < 0)
  210. return ret;
  211. avctx->pix_fmt = s->coded_format;
  212. ff_cfhddsp_init(&s->dsp, s->bpc, avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16);
  213. if ((ret = av_pix_fmt_get_chroma_sub_sample(s->coded_format,
  214. &chroma_x_shift,
  215. &chroma_y_shift)) < 0)
  216. return ret;
  217. planes = av_pix_fmt_count_planes(s->coded_format);
  218. if (s->coded_format == AV_PIX_FMT_BAYER_RGGB16) {
  219. planes = 4;
  220. chroma_x_shift = 1;
  221. chroma_y_shift = 1;
  222. bayer = 1;
  223. }
  224. for (i = 0; i < planes; i++) {
  225. int w8, h8, w4, h4, w2, h2;
  226. int width = (i || bayer) ? s->coded_width >> chroma_x_shift : s->coded_width;
  227. int height = (i || bayer) ? s->coded_height >> chroma_y_shift : s->coded_height;
  228. ptrdiff_t stride = (FFALIGN(width / 8, 8) + 64) * 8;
  229. if (chroma_y_shift && !bayer)
  230. height = FFALIGN(height / 8, 2) * 8;
  231. s->plane[i].width = width;
  232. s->plane[i].height = height;
  233. s->plane[i].stride = stride;
  234. w8 = FFALIGN(s->plane[i].width / 8, 8) + 64;
  235. h8 = FFALIGN(height, 8) / 8;
  236. w4 = w8 * 2;
  237. h4 = h8 * 2;
  238. w2 = w4 * 2;
  239. h2 = h4 * 2;
  240. if (s->transform_type == 0) {
  241. s->plane[i].idwt_size = FFALIGN(height, 8) * stride;
  242. s->plane[i].idwt_buf =
  243. av_mallocz_array(s->plane[i].idwt_size, sizeof(*s->plane[i].idwt_buf));
  244. s->plane[i].idwt_tmp =
  245. av_malloc_array(s->plane[i].idwt_size, sizeof(*s->plane[i].idwt_tmp));
  246. } else {
  247. s->plane[i].idwt_size = FFALIGN(height, 8) * stride * 2;
  248. s->plane[i].idwt_buf =
  249. av_mallocz_array(s->plane[i].idwt_size, sizeof(*s->plane[i].idwt_buf));
  250. s->plane[i].idwt_tmp =
  251. av_malloc_array(s->plane[i].idwt_size, sizeof(*s->plane[i].idwt_tmp));
  252. }
  253. if (!s->plane[i].idwt_buf || !s->plane[i].idwt_tmp)
  254. return AVERROR(ENOMEM);
  255. s->plane[i].subband[0] = s->plane[i].idwt_buf;
  256. s->plane[i].subband[1] = s->plane[i].idwt_buf + 2 * w8 * h8;
  257. s->plane[i].subband[2] = s->plane[i].idwt_buf + 1 * w8 * h8;
  258. s->plane[i].subband[3] = s->plane[i].idwt_buf + 3 * w8 * h8;
  259. s->plane[i].subband[4] = s->plane[i].idwt_buf + 2 * w4 * h4;
  260. s->plane[i].subband[5] = s->plane[i].idwt_buf + 1 * w4 * h4;
  261. s->plane[i].subband[6] = s->plane[i].idwt_buf + 3 * w4 * h4;
  262. if (s->transform_type == 0) {
  263. s->plane[i].subband[7] = s->plane[i].idwt_buf + 2 * w2 * h2;
  264. s->plane[i].subband[8] = s->plane[i].idwt_buf + 1 * w2 * h2;
  265. s->plane[i].subband[9] = s->plane[i].idwt_buf + 3 * w2 * h2;
  266. } else {
  267. int16_t *frame2 =
  268. s->plane[i].subband[7] = s->plane[i].idwt_buf + 4 * w2 * h2;
  269. s->plane[i].subband[8] = frame2 + 2 * w4 * h4;
  270. s->plane[i].subband[9] = frame2 + 1 * w4 * h4;
  271. s->plane[i].subband[10] = frame2 + 3 * w4 * h4;
  272. s->plane[i].subband[11] = frame2 + 2 * w2 * h2;
  273. s->plane[i].subband[12] = frame2 + 1 * w2 * h2;
  274. s->plane[i].subband[13] = frame2 + 3 * w2 * h2;
  275. s->plane[i].subband[14] = s->plane[i].idwt_buf + 2 * w2 * h2;
  276. s->plane[i].subband[15] = s->plane[i].idwt_buf + 1 * w2 * h2;
  277. s->plane[i].subband[16] = s->plane[i].idwt_buf + 3 * w2 * h2;
  278. }
  279. if (s->transform_type == 0) {
  280. for (j = 0; j < DWT_LEVELS; j++) {
  281. for (k = 0; k < FF_ARRAY_ELEMS(s->plane[i].band[j]); k++) {
  282. s->plane[i].band[j][k].a_width = w8 << j;
  283. s->plane[i].band[j][k].a_height = h8 << j;
  284. }
  285. }
  286. } else {
  287. for (j = 0; j < DWT_LEVELS_3D; j++) {
  288. int t = j < 1 ? 0 : (j < 3 ? 1 : 2);
  289. for (k = 0; k < FF_ARRAY_ELEMS(s->plane[i].band[j]); k++) {
  290. s->plane[i].band[j][k].a_width = w8 << t;
  291. s->plane[i].band[j][k].a_height = h8 << t;
  292. }
  293. }
  294. }
  295. /* ll2 and ll1 commented out because they are done in-place */
  296. s->plane[i].l_h[0] = s->plane[i].idwt_tmp;
  297. s->plane[i].l_h[1] = s->plane[i].idwt_tmp + 2 * w8 * h8;
  298. // s->plane[i].l_h[2] = ll2;
  299. s->plane[i].l_h[3] = s->plane[i].idwt_tmp;
  300. s->plane[i].l_h[4] = s->plane[i].idwt_tmp + 2 * w4 * h4;
  301. // s->plane[i].l_h[5] = ll1;
  302. s->plane[i].l_h[6] = s->plane[i].idwt_tmp;
  303. s->plane[i].l_h[7] = s->plane[i].idwt_tmp + 2 * w2 * h2;
  304. if (s->transform_type != 0) {
  305. int16_t *frame2 = s->plane[i].idwt_tmp + 4 * w2 * h2;
  306. s->plane[i].l_h[8] = frame2;
  307. s->plane[i].l_h[9] = frame2 + 2 * w2 * h2;
  308. }
  309. }
  310. s->a_height = s->coded_height;
  311. s->a_width = s->coded_width;
  312. s->a_format = s->coded_format;
  313. return 0;
  314. }
  315. static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
  316. AVPacket *avpkt)
  317. {
  318. CFHDContext *s = avctx->priv_data;
  319. CFHDDSPContext *dsp = &s->dsp;
  320. GetByteContext gb;
  321. ThreadFrame frame = { .f = data };
  322. AVFrame *pic = data;
  323. int ret = 0, i, j, plane, got_buffer = 0;
  324. int16_t *coeff_data;
  325. init_frame_defaults(s);
  326. s->planes = av_pix_fmt_count_planes(s->coded_format);
  327. bytestream2_init(&gb, avpkt->data, avpkt->size);
  328. while (bytestream2_get_bytes_left(&gb) >= 4) {
  329. /* Bit weird but implement the tag parsing as the spec says */
  330. uint16_t tagu = bytestream2_get_be16(&gb);
  331. int16_t tag = (int16_t)tagu;
  332. int8_t tag8 = (int8_t)(tagu >> 8);
  333. uint16_t abstag = abs(tag);
  334. int8_t abs_tag8 = abs(tag8);
  335. uint16_t data = bytestream2_get_be16(&gb);
  336. if (abs_tag8 >= 0x60 && abs_tag8 <= 0x6f) {
  337. av_log(avctx, AV_LOG_DEBUG, "large len %x\n", ((tagu & 0xff) << 16) | data);
  338. } else if (tag == SampleFlags) {
  339. av_log(avctx, AV_LOG_DEBUG, "Progressive? %"PRIu16"\n", data);
  340. s->progressive = data & 0x0001;
  341. } else if (tag == FrameType) {
  342. s->frame_type = data;
  343. av_log(avctx, AV_LOG_DEBUG, "Frame type %"PRIu16"\n", data);
  344. } else if (abstag == VersionMajor) {
  345. av_log(avctx, AV_LOG_DEBUG, "Version major %"PRIu16"\n", data);
  346. } else if (abstag == VersionMinor) {
  347. av_log(avctx, AV_LOG_DEBUG, "Version minor %"PRIu16"\n", data);
  348. } else if (abstag == VersionRevision) {
  349. av_log(avctx, AV_LOG_DEBUG, "Version revision %"PRIu16"\n", data);
  350. } else if (abstag == VersionEdit) {
  351. av_log(avctx, AV_LOG_DEBUG, "Version edit %"PRIu16"\n", data);
  352. } else if (abstag == Version) {
  353. av_log(avctx, AV_LOG_DEBUG, "Version %"PRIu16"\n", data);
  354. } else if (tag == ImageWidth) {
  355. av_log(avctx, AV_LOG_DEBUG, "Width %"PRIu16"\n", data);
  356. s->coded_width = data;
  357. } else if (tag == ImageHeight) {
  358. av_log(avctx, AV_LOG_DEBUG, "Height %"PRIu16"\n", data);
  359. s->coded_height = data;
  360. } else if (tag == ChannelCount) {
  361. av_log(avctx, AV_LOG_DEBUG, "Channel Count: %"PRIu16"\n", data);
  362. s->channel_cnt = data;
  363. if (data > 4) {
  364. av_log(avctx, AV_LOG_ERROR, "Channel Count of %"PRIu16" is unsupported\n", data);
  365. ret = AVERROR_PATCHWELCOME;
  366. goto end;
  367. }
  368. } else if (tag == SubbandCount) {
  369. av_log(avctx, AV_LOG_DEBUG, "Subband Count: %"PRIu16"\n", data);
  370. if (data != SUBBAND_COUNT && data != SUBBAND_COUNT_3D) {
  371. av_log(avctx, AV_LOG_ERROR, "Subband Count of %"PRIu16" is unsupported\n", data);
  372. ret = AVERROR_PATCHWELCOME;
  373. goto end;
  374. }
  375. } else if (tag == ChannelNumber) {
  376. s->channel_num = data;
  377. av_log(avctx, AV_LOG_DEBUG, "Channel number %"PRIu16"\n", data);
  378. if (s->channel_num >= s->planes) {
  379. av_log(avctx, AV_LOG_ERROR, "Invalid channel number\n");
  380. ret = AVERROR(EINVAL);
  381. goto end;
  382. }
  383. init_plane_defaults(s);
  384. } else if (tag == SubbandNumber) {
  385. if (s->subband_num != 0 && data == 1 && (s->transform_type == 0 || s->transform_type == 2)) // hack
  386. s->level++;
  387. av_log(avctx, AV_LOG_DEBUG, "Subband number %"PRIu16"\n", data);
  388. s->subband_num = data;
  389. if ((s->transform_type == 0 && s->level >= DWT_LEVELS) ||
  390. (s->transform_type == 2 && s->level >= DWT_LEVELS_3D)) {
  391. av_log(avctx, AV_LOG_ERROR, "Invalid level\n");
  392. ret = AVERROR(EINVAL);
  393. goto end;
  394. }
  395. if (s->subband_num > 3) {
  396. av_log(avctx, AV_LOG_ERROR, "Invalid subband number\n");
  397. ret = AVERROR(EINVAL);
  398. goto end;
  399. }
  400. } else if (tag == SubbandBand) {
  401. av_log(avctx, AV_LOG_DEBUG, "Subband number actual %"PRIu16"\n", data);
  402. if ((s->transform_type == 0 && data >= SUBBAND_COUNT) ||
  403. (s->transform_type == 2 && data >= SUBBAND_COUNT_3D && data != 255)) {
  404. av_log(avctx, AV_LOG_ERROR, "Invalid subband number actual\n");
  405. ret = AVERROR(EINVAL);
  406. goto end;
  407. }
  408. if (s->transform_type == 0 || s->transform_type == 2)
  409. s->subband_num_actual = data;
  410. else
  411. av_log(avctx, AV_LOG_WARNING, "Ignoring subband num actual %"PRIu16"\n", data);
  412. } else if (tag == LowpassPrecision)
  413. av_log(avctx, AV_LOG_DEBUG, "Lowpass precision bits: %"PRIu16"\n", data);
  414. else if (tag == Quantization) {
  415. s->quantisation = data;
  416. av_log(avctx, AV_LOG_DEBUG, "Quantisation: %"PRIu16"\n", data);
  417. } else if (tag == PrescaleTable) {
  418. for (i = 0; i < 8; i++)
  419. s->prescale_table[i] = (data >> (14 - i * 2)) & 0x3;
  420. av_log(avctx, AV_LOG_DEBUG, "Prescale table: %x\n", data);
  421. } else if (tag == BandEncoding) {
  422. if (!data || data > 5) {
  423. av_log(avctx, AV_LOG_ERROR, "Invalid band encoding\n");
  424. ret = AVERROR(EINVAL);
  425. goto end;
  426. }
  427. s->band_encoding = data;
  428. av_log(avctx, AV_LOG_DEBUG, "Encode Method for Subband %d : %x\n", s->subband_num_actual, data);
  429. } else if (tag == LowpassWidth) {
  430. av_log(avctx, AV_LOG_DEBUG, "Lowpass width %"PRIu16"\n", data);
  431. s->plane[s->channel_num].band[0][0].width = data;
  432. s->plane[s->channel_num].band[0][0].stride = data;
  433. } else if (tag == LowpassHeight) {
  434. av_log(avctx, AV_LOG_DEBUG, "Lowpass height %"PRIu16"\n", data);
  435. s->plane[s->channel_num].band[0][0].height = data;
  436. } else if (tag == SampleType) {
  437. s->sample_type = data;
  438. av_log(avctx, AV_LOG_DEBUG, "Sample type? %"PRIu16"\n", data);
  439. } else if (tag == TransformType) {
  440. if (data > 2) {
  441. av_log(avctx, AV_LOG_ERROR, "Invalid transform type\n");
  442. ret = AVERROR(EINVAL);
  443. goto end;
  444. } else if (data == 1) {
  445. av_log(avctx, AV_LOG_ERROR, "unsupported transform type\n");
  446. ret = AVERROR_PATCHWELCOME;
  447. goto end;
  448. }
  449. if (s->transform_type == -1) {
  450. s->transform_type = data;
  451. av_log(avctx, AV_LOG_DEBUG, "Transform type %"PRIu16"\n", data);
  452. } else {
  453. av_log(avctx, AV_LOG_DEBUG, "Ignoring additional transform type %"PRIu16"\n", data);
  454. }
  455. } else if (abstag >= 0x4000 && abstag <= 0x40ff) {
  456. if (abstag == 0x4001)
  457. s->peak.level = 0;
  458. av_log(avctx, AV_LOG_DEBUG, "Small chunk length %d %s\n", data * 4, tag < 0 ? "optional" : "required");
  459. bytestream2_skipu(&gb, data * 4);
  460. } else if (tag == FrameIndex) {
  461. av_log(avctx, AV_LOG_DEBUG, "Frame index %"PRIu16"\n", data);
  462. s->frame_index = data;
  463. } else if (tag == SampleIndexTable) {
  464. av_log(avctx, AV_LOG_DEBUG, "Sample index table - skipping %i values\n", data);
  465. if (data > bytestream2_get_bytes_left(&gb) / 4) {
  466. av_log(avctx, AV_LOG_ERROR, "too many values (%d)\n", data);
  467. ret = AVERROR_INVALIDDATA;
  468. goto end;
  469. }
  470. for (i = 0; i < data; i++) {
  471. uint32_t offset = bytestream2_get_be32(&gb);
  472. av_log(avctx, AV_LOG_DEBUG, "Offset = %"PRIu32"\n", offset);
  473. }
  474. } else if (tag == HighpassWidth) {
  475. av_log(avctx, AV_LOG_DEBUG, "Highpass width %i channel %i level %i subband %i\n", data, s->channel_num, s->level, s->subband_num);
  476. if (data < 3) {
  477. av_log(avctx, AV_LOG_ERROR, "Invalid highpass width\n");
  478. ret = AVERROR(EINVAL);
  479. goto end;
  480. }
  481. s->plane[s->channel_num].band[s->level][s->subband_num].width = data;
  482. s->plane[s->channel_num].band[s->level][s->subband_num].stride = FFALIGN(data, 8);
  483. } else if (tag == HighpassHeight) {
  484. av_log(avctx, AV_LOG_DEBUG, "Highpass height %i\n", data);
  485. if (data < 3) {
  486. av_log(avctx, AV_LOG_ERROR, "Invalid highpass height\n");
  487. ret = AVERROR(EINVAL);
  488. goto end;
  489. }
  490. s->plane[s->channel_num].band[s->level][s->subband_num].height = data;
  491. } else if (tag == BandWidth) {
  492. av_log(avctx, AV_LOG_DEBUG, "Highpass width2 %i\n", data);
  493. if (data < 3) {
  494. av_log(avctx, AV_LOG_ERROR, "Invalid highpass width2\n");
  495. ret = AVERROR(EINVAL);
  496. goto end;
  497. }
  498. s->plane[s->channel_num].band[s->level][s->subband_num].width = data;
  499. s->plane[s->channel_num].band[s->level][s->subband_num].stride = FFALIGN(data, 8);
  500. } else if (tag == BandHeight) {
  501. av_log(avctx, AV_LOG_DEBUG, "Highpass height2 %i\n", data);
  502. if (data < 3) {
  503. av_log(avctx, AV_LOG_ERROR, "Invalid highpass height2\n");
  504. ret = AVERROR(EINVAL);
  505. goto end;
  506. }
  507. s->plane[s->channel_num].band[s->level][s->subband_num].height = data;
  508. } else if (tag == InputFormat) {
  509. av_log(avctx, AV_LOG_DEBUG, "Input format %i\n", data);
  510. if (s->coded_format == AV_PIX_FMT_NONE ||
  511. s->coded_format == AV_PIX_FMT_YUV422P10) {
  512. if (data >= 100 && data <= 105) {
  513. s->coded_format = AV_PIX_FMT_BAYER_RGGB16;
  514. } else if (data >= 122 && data <= 128) {
  515. s->coded_format = AV_PIX_FMT_GBRP12;
  516. } else if (data == 30) {
  517. s->coded_format = AV_PIX_FMT_GBRAP12;
  518. } else {
  519. s->coded_format = AV_PIX_FMT_YUV422P10;
  520. }
  521. s->planes = s->coded_format == AV_PIX_FMT_BAYER_RGGB16 ? 4 : av_pix_fmt_count_planes(s->coded_format);
  522. }
  523. } else if (tag == BandCodingFlags) {
  524. s->codebook = data & 0xf;
  525. s->difference_coding = (data >> 4) & 1;
  526. av_log(avctx, AV_LOG_DEBUG, "Other codebook? %i\n", s->codebook);
  527. } else if (tag == Precision) {
  528. av_log(avctx, AV_LOG_DEBUG, "Precision %i\n", data);
  529. if (!(data == 10 || data == 12)) {
  530. av_log(avctx, AV_LOG_ERROR, "Invalid bits per channel\n");
  531. ret = AVERROR(EINVAL);
  532. goto end;
  533. }
  534. avctx->bits_per_raw_sample = s->bpc = data;
  535. } else if (tag == EncodedFormat) {
  536. av_log(avctx, AV_LOG_DEBUG, "Sample format? %i\n", data);
  537. if (data == 1) {
  538. s->coded_format = AV_PIX_FMT_YUV422P10;
  539. } else if (data == 2) {
  540. s->coded_format = AV_PIX_FMT_BAYER_RGGB16;
  541. } else if (data == 3) {
  542. s->coded_format = AV_PIX_FMT_GBRP12;
  543. } else if (data == 4) {
  544. s->coded_format = AV_PIX_FMT_GBRAP12;
  545. } else {
  546. avpriv_report_missing_feature(avctx, "Sample format of %"PRIu16, data);
  547. ret = AVERROR_PATCHWELCOME;
  548. goto end;
  549. }
  550. s->planes = data == 2 ? 4 : av_pix_fmt_count_planes(s->coded_format);
  551. } else if (tag == -DisplayHeight) {
  552. av_log(avctx, AV_LOG_DEBUG, "Cropped height %"PRIu16"\n", data);
  553. s->cropped_height = data;
  554. } else if (tag == -PeakOffsetLow) {
  555. s->peak.offset &= ~0xffff;
  556. s->peak.offset |= (data & 0xffff);
  557. s->peak.base = gb;
  558. s->peak.level = 0;
  559. } else if (tag == -PeakOffsetHigh) {
  560. s->peak.offset &= 0xffff;
  561. s->peak.offset |= (data & 0xffffU)<<16;
  562. s->peak.base = gb;
  563. s->peak.level = 0;
  564. } else if (tag == -PeakLevel && s->peak.offset) {
  565. s->peak.level = data;
  566. if (s->peak.offset < 4 - bytestream2_tell(&s->peak.base) ||
  567. s->peak.offset > 4 + bytestream2_get_bytes_left(&s->peak.base)
  568. ) {
  569. ret = AVERROR_INVALIDDATA;
  570. goto end;
  571. }
  572. bytestream2_seek(&s->peak.base, s->peak.offset - 4, SEEK_CUR);
  573. } else
  574. av_log(avctx, AV_LOG_DEBUG, "Unknown tag %i data %x\n", tag, data);
  575. if (tag == BitstreamMarker && data == 0xf0f &&
  576. s->coded_format != AV_PIX_FMT_NONE) {
  577. int lowpass_height = s->plane[s->channel_num].band[0][0].height;
  578. int lowpass_width = s->plane[s->channel_num].band[0][0].width;
  579. int factor = s->coded_format == AV_PIX_FMT_BAYER_RGGB16 ? 2 : 1;
  580. if (s->coded_width) {
  581. s->coded_width *= factor;
  582. }
  583. if (s->coded_height) {
  584. s->coded_height *= factor;
  585. }
  586. if (!s->a_width && !s->coded_width) {
  587. s->coded_width = lowpass_width * factor * 8;
  588. }
  589. if (!s->a_height && !s->coded_height) {
  590. s->coded_height = lowpass_height * factor * 8;
  591. }
  592. if (s->a_width && !s->coded_width)
  593. s->coded_width = s->a_width;
  594. if (s->a_height && !s->coded_height)
  595. s->coded_height = s->a_height;
  596. if (s->a_width != s->coded_width || s->a_height != s->coded_height ||
  597. s->a_format != s->coded_format) {
  598. free_buffers(s);
  599. if ((ret = alloc_buffers(avctx)) < 0) {
  600. free_buffers(s);
  601. return ret;
  602. }
  603. }
  604. ret = ff_set_dimensions(avctx, s->coded_width, s->coded_height);
  605. if (ret < 0)
  606. return ret;
  607. if (s->cropped_height) {
  608. unsigned height = s->cropped_height << (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16);
  609. if (avctx->height < height)
  610. return AVERROR_INVALIDDATA;
  611. avctx->height = height;
  612. }
  613. frame.f->width =
  614. frame.f->height = 0;
  615. if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
  616. return ret;
  617. s->coded_width = 0;
  618. s->coded_height = 0;
  619. s->coded_format = AV_PIX_FMT_NONE;
  620. got_buffer = 1;
  621. } else if (tag == FrameIndex && data == 1 && s->sample_type == 1 && s->frame_type == 2) {
  622. frame.f->width =
  623. frame.f->height = 0;
  624. if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
  625. return ret;
  626. s->coded_width = 0;
  627. s->coded_height = 0;
  628. s->coded_format = AV_PIX_FMT_NONE;
  629. got_buffer = 1;
  630. }
  631. if (s->subband_num_actual == 255)
  632. goto finish;
  633. coeff_data = s->plane[s->channel_num].subband[s->subband_num_actual];
  634. /* Lowpass coefficients */
  635. if (tag == BitstreamMarker && data == 0xf0f && s->a_width && s->a_height) {
  636. int lowpass_height = s->plane[s->channel_num].band[0][0].height;
  637. int lowpass_width = s->plane[s->channel_num].band[0][0].width;
  638. int lowpass_a_height = s->plane[s->channel_num].band[0][0].a_height;
  639. int lowpass_a_width = s->plane[s->channel_num].band[0][0].a_width;
  640. if (lowpass_width < 3 ||
  641. lowpass_width > lowpass_a_width) {
  642. av_log(avctx, AV_LOG_ERROR, "Invalid lowpass width\n");
  643. ret = AVERROR(EINVAL);
  644. goto end;
  645. }
  646. if (lowpass_height < 3 ||
  647. lowpass_height > lowpass_a_height) {
  648. av_log(avctx, AV_LOG_ERROR, "Invalid lowpass height\n");
  649. ret = AVERROR(EINVAL);
  650. goto end;
  651. }
  652. if (!got_buffer) {
  653. av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
  654. ret = AVERROR(EINVAL);
  655. goto end;
  656. }
  657. if (lowpass_height > lowpass_a_height || lowpass_width > lowpass_a_width ||
  658. lowpass_width * lowpass_height * sizeof(int16_t) > bytestream2_get_bytes_left(&gb)) {
  659. av_log(avctx, AV_LOG_ERROR, "Too many lowpass coefficients\n");
  660. ret = AVERROR(EINVAL);
  661. goto end;
  662. }
  663. av_log(avctx, AV_LOG_DEBUG, "Start of lowpass coeffs component %d height:%d, width:%d\n", s->channel_num, lowpass_height, lowpass_width);
  664. for (i = 0; i < lowpass_height; i++) {
  665. for (j = 0; j < lowpass_width; j++)
  666. coeff_data[j] = bytestream2_get_be16u(&gb);
  667. coeff_data += lowpass_width;
  668. }
  669. /* Align to mod-4 position to continue reading tags */
  670. bytestream2_seek(&gb, bytestream2_tell(&gb) & 3, SEEK_CUR);
  671. /* Copy last line of coefficients if odd height */
  672. if (lowpass_height & 1) {
  673. memcpy(&coeff_data[lowpass_height * lowpass_width],
  674. &coeff_data[(lowpass_height - 1) * lowpass_width],
  675. lowpass_width * sizeof(*coeff_data));
  676. }
  677. av_log(avctx, AV_LOG_DEBUG, "Lowpass coefficients %d\n", lowpass_width * lowpass_height);
  678. }
  679. if ((tag == BandHeader || tag == BandSecondPass) && s->subband_num_actual != 255 && s->a_width && s->a_height) {
  680. int highpass_height = s->plane[s->channel_num].band[s->level][s->subband_num].height;
  681. int highpass_width = s->plane[s->channel_num].band[s->level][s->subband_num].width;
  682. int highpass_a_width = s->plane[s->channel_num].band[s->level][s->subband_num].a_width;
  683. int highpass_a_height = s->plane[s->channel_num].band[s->level][s->subband_num].a_height;
  684. int highpass_stride = s->plane[s->channel_num].band[s->level][s->subband_num].stride;
  685. int expected;
  686. int a_expected = highpass_a_height * highpass_a_width;
  687. int level, run, coeff;
  688. int count = 0, bytes;
  689. if (!got_buffer) {
  690. av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
  691. ret = AVERROR(EINVAL);
  692. goto end;
  693. }
  694. if (highpass_height > highpass_a_height || highpass_width > highpass_a_width || a_expected < highpass_height * (uint64_t)highpass_stride) {
  695. av_log(avctx, AV_LOG_ERROR, "Too many highpass coefficients\n");
  696. ret = AVERROR(EINVAL);
  697. goto end;
  698. }
  699. expected = highpass_height * highpass_stride;
  700. av_log(avctx, AV_LOG_DEBUG, "Start subband coeffs plane %i level %i codebook %i expected %i\n", s->channel_num, s->level, s->codebook, expected);
  701. ret = init_get_bits8(&s->gb, gb.buffer, bytestream2_get_bytes_left(&gb));
  702. if (ret < 0)
  703. goto end;
  704. {
  705. OPEN_READER(re, &s->gb);
  706. const int lossless = s->band_encoding == 5;
  707. if (s->codebook == 0 && s->transform_type == 2 && s->subband_num_actual == 7)
  708. s->codebook = 1;
  709. if (!s->codebook) {
  710. while (1) {
  711. UPDATE_CACHE(re, &s->gb);
  712. GET_RL_VLC(level, run, re, &s->gb, s->table_9_rl_vlc,
  713. VLC_BITS, 3, 1);
  714. /* escape */
  715. if (level == 64)
  716. break;
  717. count += run;
  718. if (count > expected)
  719. break;
  720. if (!lossless)
  721. coeff = dequant_and_decompand(s, level, s->quantisation, 0);
  722. else
  723. coeff = level;
  724. if (tag == BandSecondPass) {
  725. const uint16_t q = s->quantisation;
  726. for (i = 0; i < run; i++) {
  727. *coeff_data |= coeff * 256;
  728. *coeff_data++ *= q;
  729. }
  730. } else {
  731. for (i = 0; i < run; i++)
  732. *coeff_data++ = coeff;
  733. }
  734. }
  735. } else {
  736. while (1) {
  737. UPDATE_CACHE(re, &s->gb);
  738. GET_RL_VLC(level, run, re, &s->gb, s->table_18_rl_vlc,
  739. VLC_BITS, 3, 1);
  740. /* escape */
  741. if (level == 255 && run == 2)
  742. break;
  743. count += run;
  744. if (count > expected)
  745. break;
  746. if (!lossless)
  747. coeff = dequant_and_decompand(s, level, s->quantisation, s->codebook);
  748. else
  749. coeff = level;
  750. if (tag == BandSecondPass) {
  751. const uint16_t q = s->quantisation;
  752. for (i = 0; i < run; i++) {
  753. *coeff_data |= coeff * 256;
  754. *coeff_data++ *= q;
  755. }
  756. } else {
  757. for (i = 0; i < run; i++)
  758. *coeff_data++ = coeff;
  759. }
  760. }
  761. }
  762. CLOSE_READER(re, &s->gb);
  763. }
  764. if (count > expected) {
  765. av_log(avctx, AV_LOG_ERROR, "Escape codeword not found, probably corrupt data\n");
  766. ret = AVERROR(EINVAL);
  767. goto end;
  768. }
  769. if (s->peak.level)
  770. peak_table(coeff_data - count, &s->peak, count);
  771. if (s->difference_coding)
  772. difference_coding(s->plane[s->channel_num].subband[s->subband_num_actual], highpass_width, highpass_height);
  773. bytes = FFALIGN(AV_CEIL_RSHIFT(get_bits_count(&s->gb), 3), 4);
  774. if (bytes > bytestream2_get_bytes_left(&gb)) {
  775. av_log(avctx, AV_LOG_ERROR, "Bitstream overread error\n");
  776. ret = AVERROR(EINVAL);
  777. goto end;
  778. } else
  779. bytestream2_seek(&gb, bytes, SEEK_CUR);
  780. av_log(avctx, AV_LOG_DEBUG, "End subband coeffs %i extra %i\n", count, count - expected);
  781. finish:
  782. if (s->subband_num_actual != 255)
  783. s->codebook = 0;
  784. }
  785. }
  786. s->planes = av_pix_fmt_count_planes(avctx->pix_fmt);
  787. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  788. s->progressive = 1;
  789. s->planes = 4;
  790. }
  791. ff_thread_finish_setup(avctx);
  792. if (!s->a_width || !s->a_height || s->a_format == AV_PIX_FMT_NONE ||
  793. s->coded_width || s->coded_height || s->coded_format != AV_PIX_FMT_NONE) {
  794. av_log(avctx, AV_LOG_ERROR, "Invalid dimensions\n");
  795. ret = AVERROR(EINVAL);
  796. goto end;
  797. }
  798. if (!got_buffer) {
  799. av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
  800. ret = AVERROR(EINVAL);
  801. goto end;
  802. }
  803. if (s->transform_type == 0 && s->sample_type != 1) {
  804. for (plane = 0; plane < s->planes && !ret; plane++) {
  805. /* level 1 */
  806. int lowpass_height = s->plane[plane].band[0][0].height;
  807. int output_stride = s->plane[plane].band[0][0].a_width;
  808. int lowpass_width = s->plane[plane].band[0][0].width;
  809. int highpass_stride = s->plane[plane].band[0][1].stride;
  810. int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
  811. ptrdiff_t dst_linesize;
  812. int16_t *low, *high, *output, *dst;
  813. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  814. act_plane = 0;
  815. dst_linesize = pic->linesize[act_plane];
  816. } else {
  817. dst_linesize = pic->linesize[act_plane] / 2;
  818. }
  819. if (lowpass_height > s->plane[plane].band[0][0].a_height || lowpass_width > s->plane[plane].band[0][0].a_width ||
  820. !highpass_stride || s->plane[plane].band[0][1].width > s->plane[plane].band[0][1].a_width ||
  821. lowpass_width < 3 || lowpass_height < 3) {
  822. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  823. ret = AVERROR(EINVAL);
  824. goto end;
  825. }
  826. av_log(avctx, AV_LOG_DEBUG, "Decoding level 1 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  827. low = s->plane[plane].subband[0];
  828. high = s->plane[plane].subband[2];
  829. output = s->plane[plane].l_h[0];
  830. dsp->vert_filter(output, output_stride, low, lowpass_width, high, highpass_stride, lowpass_width, lowpass_height);
  831. low = s->plane[plane].subband[1];
  832. high = s->plane[plane].subband[3];
  833. output = s->plane[plane].l_h[1];
  834. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  835. low = s->plane[plane].l_h[0];
  836. high = s->plane[plane].l_h[1];
  837. output = s->plane[plane].subband[0];
  838. dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
  839. if (s->bpc == 12) {
  840. output = s->plane[plane].subband[0];
  841. for (i = 0; i < lowpass_height * 2; i++) {
  842. for (j = 0; j < lowpass_width * 2; j++)
  843. output[j] *= 4;
  844. output += output_stride * 2;
  845. }
  846. }
  847. /* level 2 */
  848. lowpass_height = s->plane[plane].band[1][1].height;
  849. output_stride = s->plane[plane].band[1][1].a_width;
  850. lowpass_width = s->plane[plane].band[1][1].width;
  851. highpass_stride = s->plane[plane].band[1][1].stride;
  852. if (lowpass_height > s->plane[plane].band[1][1].a_height || lowpass_width > s->plane[plane].band[1][1].a_width ||
  853. !highpass_stride || s->plane[plane].band[1][1].width > s->plane[plane].band[1][1].a_width ||
  854. lowpass_width < 3 || lowpass_height < 3) {
  855. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  856. ret = AVERROR(EINVAL);
  857. goto end;
  858. }
  859. av_log(avctx, AV_LOG_DEBUG, "Level 2 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  860. low = s->plane[plane].subband[0];
  861. high = s->plane[plane].subband[5];
  862. output = s->plane[plane].l_h[3];
  863. dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  864. low = s->plane[plane].subband[4];
  865. high = s->plane[plane].subband[6];
  866. output = s->plane[plane].l_h[4];
  867. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  868. low = s->plane[plane].l_h[3];
  869. high = s->plane[plane].l_h[4];
  870. output = s->plane[plane].subband[0];
  871. dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
  872. output = s->plane[plane].subband[0];
  873. for (i = 0; i < lowpass_height * 2; i++) {
  874. for (j = 0; j < lowpass_width * 2; j++)
  875. output[j] *= 4;
  876. output += output_stride * 2;
  877. }
  878. /* level 3 */
  879. lowpass_height = s->plane[plane].band[2][1].height;
  880. output_stride = s->plane[plane].band[2][1].a_width;
  881. lowpass_width = s->plane[plane].band[2][1].width;
  882. highpass_stride = s->plane[plane].band[2][1].stride;
  883. if (lowpass_height > s->plane[plane].band[2][1].a_height || lowpass_width > s->plane[plane].band[2][1].a_width ||
  884. !highpass_stride || s->plane[plane].band[2][1].width > s->plane[plane].band[2][1].a_width ||
  885. lowpass_height < 3 || lowpass_width < 3 || lowpass_width * 2 > s->plane[plane].width) {
  886. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  887. ret = AVERROR(EINVAL);
  888. goto end;
  889. }
  890. av_log(avctx, AV_LOG_DEBUG, "Level 3 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  891. if (s->progressive) {
  892. low = s->plane[plane].subband[0];
  893. high = s->plane[plane].subband[8];
  894. output = s->plane[plane].l_h[6];
  895. dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  896. low = s->plane[plane].subband[7];
  897. high = s->plane[plane].subband[9];
  898. output = s->plane[plane].l_h[7];
  899. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  900. dst = (int16_t *)pic->data[act_plane];
  901. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  902. if (plane & 1)
  903. dst++;
  904. if (plane > 1)
  905. dst += pic->linesize[act_plane] >> 1;
  906. }
  907. low = s->plane[plane].l_h[6];
  908. high = s->plane[plane].l_h[7];
  909. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16 &&
  910. (lowpass_height * 2 > avctx->coded_height / 2 ||
  911. lowpass_width * 2 > avctx->coded_width / 2 )
  912. ) {
  913. ret = AVERROR_INVALIDDATA;
  914. goto end;
  915. }
  916. for (i = 0; i < s->plane[act_plane].height; i++) {
  917. dsp->horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
  918. if (avctx->pix_fmt == AV_PIX_FMT_GBRAP12 && act_plane == 3)
  919. process_alpha(dst, lowpass_width * 2);
  920. low += output_stride;
  921. high += output_stride;
  922. dst += dst_linesize;
  923. }
  924. } else {
  925. av_log(avctx, AV_LOG_DEBUG, "interlaced frame ? %d", pic->interlaced_frame);
  926. pic->interlaced_frame = 1;
  927. low = s->plane[plane].subband[0];
  928. high = s->plane[plane].subband[7];
  929. output = s->plane[plane].l_h[6];
  930. dsp->horiz_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  931. low = s->plane[plane].subband[8];
  932. high = s->plane[plane].subband[9];
  933. output = s->plane[plane].l_h[7];
  934. dsp->horiz_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  935. dst = (int16_t *)pic->data[act_plane];
  936. low = s->plane[plane].l_h[6];
  937. high = s->plane[plane].l_h[7];
  938. for (i = 0; i < s->plane[act_plane].height / 2; i++) {
  939. interlaced_vertical_filter(dst, low, high, lowpass_width * 2, pic->linesize[act_plane]/2, act_plane);
  940. low += output_stride * 2;
  941. high += output_stride * 2;
  942. dst += pic->linesize[act_plane];
  943. }
  944. }
  945. }
  946. } else if (s->transform_type == 2 && (avctx->internal->is_copy || s->frame_index == 1 || s->sample_type != 1)) {
  947. for (plane = 0; plane < s->planes && !ret; plane++) {
  948. int lowpass_height = s->plane[plane].band[0][0].height;
  949. int output_stride = s->plane[plane].band[0][0].a_width;
  950. int lowpass_width = s->plane[plane].band[0][0].width;
  951. int highpass_stride = s->plane[plane].band[0][1].stride;
  952. int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
  953. int16_t *low, *high, *output, *dst;
  954. ptrdiff_t dst_linesize;
  955. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  956. act_plane = 0;
  957. dst_linesize = pic->linesize[act_plane];
  958. } else {
  959. dst_linesize = pic->linesize[act_plane] / 2;
  960. }
  961. if (lowpass_height > s->plane[plane].band[0][0].a_height || lowpass_width > s->plane[plane].band[0][0].a_width ||
  962. !highpass_stride || s->plane[plane].band[0][1].width > s->plane[plane].band[0][1].a_width ||
  963. lowpass_width < 3 || lowpass_height < 3) {
  964. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  965. ret = AVERROR(EINVAL);
  966. goto end;
  967. }
  968. av_log(avctx, AV_LOG_DEBUG, "Decoding level 1 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  969. low = s->plane[plane].subband[0];
  970. high = s->plane[plane].subband[2];
  971. output = s->plane[plane].l_h[0];
  972. dsp->vert_filter(output, output_stride, low, lowpass_width, high, highpass_stride, lowpass_width, lowpass_height);
  973. low = s->plane[plane].subband[1];
  974. high = s->plane[plane].subband[3];
  975. output = s->plane[plane].l_h[1];
  976. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  977. low = s->plane[plane].l_h[0];
  978. high = s->plane[plane].l_h[1];
  979. output = s->plane[plane].l_h[7];
  980. dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
  981. if (s->bpc == 12) {
  982. output = s->plane[plane].l_h[7];
  983. for (i = 0; i < lowpass_height * 2; i++) {
  984. for (j = 0; j < lowpass_width * 2; j++)
  985. output[j] *= 4;
  986. output += output_stride * 2;
  987. }
  988. }
  989. lowpass_height = s->plane[plane].band[1][1].height;
  990. output_stride = s->plane[plane].band[1][1].a_width;
  991. lowpass_width = s->plane[plane].band[1][1].width;
  992. highpass_stride = s->plane[plane].band[1][1].stride;
  993. if (lowpass_height > s->plane[plane].band[1][1].a_height || lowpass_width > s->plane[plane].band[1][1].a_width ||
  994. !highpass_stride || s->plane[plane].band[1][1].width > s->plane[plane].band[1][1].a_width ||
  995. lowpass_width < 3 || lowpass_height < 3) {
  996. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  997. ret = AVERROR(EINVAL);
  998. goto end;
  999. }
  1000. av_log(avctx, AV_LOG_DEBUG, "Level 2 lowpass plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  1001. low = s->plane[plane].l_h[7];
  1002. high = s->plane[plane].subband[5];
  1003. output = s->plane[plane].l_h[3];
  1004. dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1005. low = s->plane[plane].subband[4];
  1006. high = s->plane[plane].subband[6];
  1007. output = s->plane[plane].l_h[4];
  1008. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1009. low = s->plane[plane].l_h[3];
  1010. high = s->plane[plane].l_h[4];
  1011. output = s->plane[plane].l_h[7];
  1012. dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
  1013. output = s->plane[plane].l_h[7];
  1014. for (i = 0; i < lowpass_height * 2; i++) {
  1015. for (j = 0; j < lowpass_width * 2; j++)
  1016. output[j] *= 4;
  1017. output += output_stride * 2;
  1018. }
  1019. low = s->plane[plane].subband[7];
  1020. high = s->plane[plane].subband[9];
  1021. output = s->plane[plane].l_h[3];
  1022. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1023. low = s->plane[plane].subband[8];
  1024. high = s->plane[plane].subband[10];
  1025. output = s->plane[plane].l_h[4];
  1026. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1027. low = s->plane[plane].l_h[3];
  1028. high = s->plane[plane].l_h[4];
  1029. output = s->plane[plane].l_h[9];
  1030. dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
  1031. lowpass_height = s->plane[plane].band[4][1].height;
  1032. output_stride = s->plane[plane].band[4][1].a_width;
  1033. lowpass_width = s->plane[plane].band[4][1].width;
  1034. highpass_stride = s->plane[plane].band[4][1].stride;
  1035. av_log(avctx, AV_LOG_DEBUG, "temporal level %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  1036. if (lowpass_height > s->plane[plane].band[4][1].a_height || lowpass_width > s->plane[plane].band[4][1].a_width ||
  1037. !highpass_stride || s->plane[plane].band[4][1].width > s->plane[plane].band[4][1].a_width ||
  1038. lowpass_width < 3 || lowpass_height < 3) {
  1039. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  1040. ret = AVERROR(EINVAL);
  1041. goto end;
  1042. }
  1043. low = s->plane[plane].l_h[7];
  1044. high = s->plane[plane].l_h[9];
  1045. output = s->plane[plane].l_h[7];
  1046. for (i = 0; i < lowpass_height; i++) {
  1047. inverse_temporal_filter(low, high, lowpass_width);
  1048. low += output_stride;
  1049. high += output_stride;
  1050. }
  1051. if (s->progressive) {
  1052. low = s->plane[plane].l_h[7];
  1053. high = s->plane[plane].subband[15];
  1054. output = s->plane[plane].l_h[6];
  1055. dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1056. low = s->plane[plane].subband[14];
  1057. high = s->plane[plane].subband[16];
  1058. output = s->plane[plane].l_h[7];
  1059. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1060. low = s->plane[plane].l_h[9];
  1061. high = s->plane[plane].subband[12];
  1062. output = s->plane[plane].l_h[8];
  1063. dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1064. low = s->plane[plane].subband[11];
  1065. high = s->plane[plane].subband[13];
  1066. output = s->plane[plane].l_h[9];
  1067. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1068. if (s->sample_type == 1)
  1069. continue;
  1070. dst = (int16_t *)pic->data[act_plane];
  1071. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  1072. if (plane & 1)
  1073. dst++;
  1074. if (plane > 1)
  1075. dst += pic->linesize[act_plane] >> 1;
  1076. }
  1077. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16 &&
  1078. (lowpass_height * 2 > avctx->coded_height / 2 ||
  1079. lowpass_width * 2 > avctx->coded_width / 2 )
  1080. ) {
  1081. ret = AVERROR_INVALIDDATA;
  1082. goto end;
  1083. }
  1084. low = s->plane[plane].l_h[6];
  1085. high = s->plane[plane].l_h[7];
  1086. for (i = 0; i < s->plane[act_plane].height; i++) {
  1087. dsp->horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
  1088. low += output_stride;
  1089. high += output_stride;
  1090. dst += dst_linesize;
  1091. }
  1092. } else {
  1093. pic->interlaced_frame = 1;
  1094. low = s->plane[plane].l_h[7];
  1095. high = s->plane[plane].subband[14];
  1096. output = s->plane[plane].l_h[6];
  1097. dsp->horiz_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1098. low = s->plane[plane].subband[15];
  1099. high = s->plane[plane].subband[16];
  1100. output = s->plane[plane].l_h[7];
  1101. dsp->horiz_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1102. low = s->plane[plane].l_h[9];
  1103. high = s->plane[plane].subband[11];
  1104. output = s->plane[plane].l_h[8];
  1105. dsp->horiz_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1106. low = s->plane[plane].subband[12];
  1107. high = s->plane[plane].subband[13];
  1108. output = s->plane[plane].l_h[9];
  1109. dsp->horiz_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1110. if (s->sample_type == 1)
  1111. continue;
  1112. dst = (int16_t *)pic->data[act_plane];
  1113. low = s->plane[plane].l_h[6];
  1114. high = s->plane[plane].l_h[7];
  1115. for (i = 0; i < s->plane[act_plane].height / 2; i++) {
  1116. interlaced_vertical_filter(dst, low, high, lowpass_width * 2, pic->linesize[act_plane]/2, act_plane);
  1117. low += output_stride * 2;
  1118. high += output_stride * 2;
  1119. dst += pic->linesize[act_plane];
  1120. }
  1121. }
  1122. }
  1123. }
  1124. if (s->transform_type == 2 && s->sample_type == 1) {
  1125. int16_t *low, *high, *dst;
  1126. int output_stride, lowpass_height, lowpass_width;
  1127. ptrdiff_t dst_linesize;
  1128. for (plane = 0; plane < s->planes; plane++) {
  1129. int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
  1130. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  1131. act_plane = 0;
  1132. dst_linesize = pic->linesize[act_plane];
  1133. } else {
  1134. dst_linesize = pic->linesize[act_plane] / 2;
  1135. }
  1136. lowpass_height = s->plane[plane].band[4][1].height;
  1137. output_stride = s->plane[plane].band[4][1].a_width;
  1138. lowpass_width = s->plane[plane].band[4][1].width;
  1139. if (lowpass_height > s->plane[plane].band[4][1].a_height || lowpass_width > s->plane[plane].band[4][1].a_width ||
  1140. s->plane[plane].band[4][1].width > s->plane[plane].band[4][1].a_width ||
  1141. lowpass_width < 3 || lowpass_height < 3) {
  1142. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  1143. ret = AVERROR(EINVAL);
  1144. goto end;
  1145. }
  1146. if (s->progressive) {
  1147. dst = (int16_t *)pic->data[act_plane];
  1148. low = s->plane[plane].l_h[8];
  1149. high = s->plane[plane].l_h[9];
  1150. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  1151. if (plane & 1)
  1152. dst++;
  1153. if (plane > 1)
  1154. dst += pic->linesize[act_plane] >> 1;
  1155. }
  1156. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16 &&
  1157. (lowpass_height * 2 > avctx->coded_height / 2 ||
  1158. lowpass_width * 2 > avctx->coded_width / 2 )
  1159. ) {
  1160. ret = AVERROR_INVALIDDATA;
  1161. goto end;
  1162. }
  1163. for (i = 0; i < s->plane[act_plane].height; i++) {
  1164. dsp->horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
  1165. low += output_stride;
  1166. high += output_stride;
  1167. dst += dst_linesize;
  1168. }
  1169. } else {
  1170. dst = (int16_t *)pic->data[act_plane];
  1171. low = s->plane[plane].l_h[8];
  1172. high = s->plane[plane].l_h[9];
  1173. for (i = 0; i < s->plane[act_plane].height / 2; i++) {
  1174. interlaced_vertical_filter(dst, low, high, lowpass_width * 2, pic->linesize[act_plane]/2, act_plane);
  1175. low += output_stride * 2;
  1176. high += output_stride * 2;
  1177. dst += pic->linesize[act_plane];
  1178. }
  1179. }
  1180. }
  1181. }
  1182. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16)
  1183. process_bayer(pic, s->bpc);
  1184. end:
  1185. if (ret < 0)
  1186. return ret;
  1187. *got_frame = 1;
  1188. return avpkt->size;
  1189. }
  1190. static av_cold int cfhd_close(AVCodecContext *avctx)
  1191. {
  1192. CFHDContext *s = avctx->priv_data;
  1193. free_buffers(s);
  1194. ff_free_vlc(&s->vlc_9);
  1195. ff_free_vlc(&s->vlc_18);
  1196. return 0;
  1197. }
  1198. #if HAVE_THREADS
  1199. static int update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
  1200. {
  1201. CFHDContext *psrc = src->priv_data;
  1202. CFHDContext *pdst = dst->priv_data;
  1203. int ret;
  1204. if (dst == src || psrc->transform_type == 0)
  1205. return 0;
  1206. if (pdst->plane[0].idwt_size != psrc->plane[0].idwt_size ||
  1207. pdst->a_format != psrc->a_format ||
  1208. pdst->a_width != psrc->a_width ||
  1209. pdst->a_height != psrc->a_height)
  1210. free_buffers(pdst);
  1211. pdst->a_format = psrc->a_format;
  1212. pdst->a_width = psrc->a_width;
  1213. pdst->a_height = psrc->a_height;
  1214. pdst->transform_type = psrc->transform_type;
  1215. pdst->progressive = psrc->progressive;
  1216. pdst->planes = psrc->planes;
  1217. if (!pdst->plane[0].idwt_buf) {
  1218. pdst->coded_width = pdst->a_width;
  1219. pdst->coded_height = pdst->a_height;
  1220. pdst->coded_format = pdst->a_format;
  1221. ret = alloc_buffers(dst);
  1222. if (ret < 0)
  1223. return ret;
  1224. }
  1225. for (int plane = 0; plane < pdst->planes; plane++) {
  1226. memcpy(pdst->plane[plane].band, psrc->plane[plane].band, sizeof(pdst->plane[plane].band));
  1227. memcpy(pdst->plane[plane].idwt_buf, psrc->plane[plane].idwt_buf,
  1228. pdst->plane[plane].idwt_size * sizeof(int16_t));
  1229. }
  1230. return 0;
  1231. }
  1232. #endif
  1233. AVCodec ff_cfhd_decoder = {
  1234. .name = "cfhd",
  1235. .long_name = NULL_IF_CONFIG_SMALL("GoPro CineForm HD"),
  1236. .type = AVMEDIA_TYPE_VIDEO,
  1237. .id = AV_CODEC_ID_CFHD,
  1238. .priv_data_size = sizeof(CFHDContext),
  1239. .init = cfhd_init,
  1240. .close = cfhd_close,
  1241. .decode = cfhd_decode,
  1242. .update_thread_context = ONLY_IF_THREADS_ENABLED(update_thread_context),
  1243. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS,
  1244. .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
  1245. };