You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1414 lines
58KB

  1. /*
  2. * Copyright (c) 2015-2016 Kieran Kunhya <kieran@kunhya.com>
  3. *
  4. * This file is part of FFmpeg.
  5. *
  6. * FFmpeg is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public
  8. * License as published by the Free Software Foundation; either
  9. * version 2.1 of the License, or (at your option) any later version.
  10. *
  11. * FFmpeg is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  14. * Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public
  17. * License along with FFmpeg; if not, write to the Free Software
  18. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20. /**
  21. * @file
  22. * Cineform HD video decoder
  23. */
  24. #include "libavutil/attributes.h"
  25. #include "libavutil/buffer.h"
  26. #include "libavutil/common.h"
  27. #include "libavutil/imgutils.h"
  28. #include "libavutil/intreadwrite.h"
  29. #include "libavutil/opt.h"
  30. #include "avcodec.h"
  31. #include "bytestream.h"
  32. #include "get_bits.h"
  33. #include "internal.h"
  34. #include "thread.h"
  35. #include "cfhd.h"
  36. #define ALPHA_COMPAND_DC_OFFSET 256
  37. #define ALPHA_COMPAND_GAIN 9400
  38. static av_cold int cfhd_init(AVCodecContext *avctx)
  39. {
  40. CFHDContext *s = avctx->priv_data;
  41. s->avctx = avctx;
  42. for (int i = 0; i < 64; i++) {
  43. int val = i;
  44. if (val >= 40) {
  45. if (val >= 54) {
  46. val -= 54;
  47. val <<= 2;
  48. val += 54;
  49. }
  50. val -= 40;
  51. val <<= 2;
  52. val += 40;
  53. }
  54. s->lut[0][i] = val;
  55. }
  56. for (int i = 0; i < 256; i++)
  57. s->lut[1][i] = i + ((768LL * i * i * i) / (256 * 256 * 256));
  58. return ff_cfhd_init_vlcs(s);
  59. }
  60. static void init_plane_defaults(CFHDContext *s)
  61. {
  62. s->subband_num = 0;
  63. s->level = 0;
  64. s->subband_num_actual = 0;
  65. }
  66. static void init_peak_table_defaults(CFHDContext *s)
  67. {
  68. s->peak.level = 0;
  69. s->peak.offset = 0;
  70. memset(&s->peak.base, 0, sizeof(s->peak.base));
  71. }
  72. static void init_frame_defaults(CFHDContext *s)
  73. {
  74. s->coded_width = 0;
  75. s->coded_height = 0;
  76. s->coded_format = AV_PIX_FMT_YUV422P10;
  77. s->cropped_height = 0;
  78. s->bpc = 10;
  79. s->channel_cnt = 3;
  80. s->subband_cnt = SUBBAND_COUNT;
  81. s->channel_num = 0;
  82. s->lowpass_precision = 16;
  83. s->quantisation = 1;
  84. s->codebook = 0;
  85. s->difference_coding = 0;
  86. s->frame_type = 0;
  87. s->sample_type = 0;
  88. if (s->transform_type != 2)
  89. s->transform_type = -1;
  90. init_plane_defaults(s);
  91. init_peak_table_defaults(s);
  92. }
  93. static inline int dequant_and_decompand(CFHDContext *s, int level, int quantisation, int codebook)
  94. {
  95. if (codebook == 0 || codebook == 1) {
  96. return s->lut[codebook][abs(level)] * FFSIGN(level) * quantisation;
  97. } else
  98. return level * quantisation;
  99. }
  100. static inline void difference_coding(int16_t *band, int width, int height)
  101. {
  102. int i,j;
  103. for (i = 0; i < height; i++) {
  104. for (j = 1; j < width; j++) {
  105. band[j] += band[j-1];
  106. }
  107. band += width;
  108. }
  109. }
  110. static inline void peak_table(int16_t *band, Peak *peak, int length)
  111. {
  112. int i;
  113. for (i = 0; i < length; i++)
  114. if (abs(band[i]) > peak->level)
  115. band[i] = bytestream2_get_le16(&peak->base);
  116. }
  117. static inline void process_alpha(int16_t *alpha, int width)
  118. {
  119. int i, channel;
  120. for (i = 0; i < width; i++) {
  121. channel = alpha[i];
  122. channel -= ALPHA_COMPAND_DC_OFFSET;
  123. channel <<= 3;
  124. channel *= ALPHA_COMPAND_GAIN;
  125. channel >>= 16;
  126. channel = av_clip_uintp2(channel, 12);
  127. alpha[i] = channel;
  128. }
  129. }
  130. static inline void process_bayer(AVFrame *frame, int bpc)
  131. {
  132. const int linesize = frame->linesize[0];
  133. uint16_t *r = (uint16_t *)frame->data[0];
  134. uint16_t *g1 = (uint16_t *)(frame->data[0] + 2);
  135. uint16_t *g2 = (uint16_t *)(frame->data[0] + frame->linesize[0]);
  136. uint16_t *b = (uint16_t *)(frame->data[0] + frame->linesize[0] + 2);
  137. const int mid = 1 << (bpc - 1);
  138. const int factor = 1 << (16 - bpc);
  139. for (int y = 0; y < frame->height >> 1; y++) {
  140. for (int x = 0; x < frame->width; x += 2) {
  141. int R, G1, G2, B;
  142. int g, rg, bg, gd;
  143. g = r[x];
  144. rg = g1[x];
  145. bg = g2[x];
  146. gd = b[x];
  147. gd -= mid;
  148. R = (rg - mid) * 2 + g;
  149. G1 = g + gd;
  150. G2 = g - gd;
  151. B = (bg - mid) * 2 + g;
  152. R = av_clip_uintp2(R * factor, 16);
  153. G1 = av_clip_uintp2(G1 * factor, 16);
  154. G2 = av_clip_uintp2(G2 * factor, 16);
  155. B = av_clip_uintp2(B * factor, 16);
  156. r[x] = R;
  157. g1[x] = G1;
  158. g2[x] = G2;
  159. b[x] = B;
  160. }
  161. r += linesize;
  162. g1 += linesize;
  163. g2 += linesize;
  164. b += linesize;
  165. }
  166. }
  167. static inline void interlaced_vertical_filter(int16_t *output, int16_t *low, int16_t *high,
  168. int width, int linesize, int plane)
  169. {
  170. int i;
  171. int16_t even, odd;
  172. for (i = 0; i < width; i++) {
  173. even = (low[i] - high[i])/2;
  174. odd = (low[i] + high[i])/2;
  175. output[i] = av_clip_uintp2(even, 10);
  176. output[i + linesize] = av_clip_uintp2(odd, 10);
  177. }
  178. }
  179. static inline void inverse_temporal_filter(int16_t *low, int16_t *high, int width)
  180. {
  181. for (int i = 0; i < width; i++) {
  182. int even = (low[i] - high[i]) / 2;
  183. int odd = (low[i] + high[i]) / 2;
  184. low[i] = even;
  185. high[i] = odd;
  186. }
  187. }
  188. static void free_buffers(CFHDContext *s)
  189. {
  190. int i, j;
  191. for (i = 0; i < FF_ARRAY_ELEMS(s->plane); i++) {
  192. av_freep(&s->plane[i].idwt_buf);
  193. av_freep(&s->plane[i].idwt_tmp);
  194. s->plane[i].idwt_size = 0;
  195. for (j = 0; j < SUBBAND_COUNT_3D; j++)
  196. s->plane[i].subband[j] = NULL;
  197. for (j = 0; j < 10; j++)
  198. s->plane[i].l_h[j] = NULL;
  199. }
  200. s->a_height = 0;
  201. s->a_width = 0;
  202. }
  203. static int alloc_buffers(AVCodecContext *avctx)
  204. {
  205. CFHDContext *s = avctx->priv_data;
  206. int i, j, ret, planes, bayer = 0;
  207. int chroma_x_shift, chroma_y_shift;
  208. unsigned k;
  209. if ((ret = ff_set_dimensions(avctx, s->coded_width, s->coded_height)) < 0)
  210. return ret;
  211. avctx->pix_fmt = s->coded_format;
  212. ff_cfhddsp_init(&s->dsp, s->bpc, avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16);
  213. if ((ret = av_pix_fmt_get_chroma_sub_sample(s->coded_format,
  214. &chroma_x_shift,
  215. &chroma_y_shift)) < 0)
  216. return ret;
  217. planes = av_pix_fmt_count_planes(s->coded_format);
  218. if (s->coded_format == AV_PIX_FMT_BAYER_RGGB16) {
  219. planes = 4;
  220. chroma_x_shift = 1;
  221. chroma_y_shift = 1;
  222. bayer = 1;
  223. }
  224. for (i = 0; i < planes; i++) {
  225. int w8, h8, w4, h4, w2, h2;
  226. int width = (i || bayer) ? s->coded_width >> chroma_x_shift : s->coded_width;
  227. int height = (i || bayer) ? s->coded_height >> chroma_y_shift : s->coded_height;
  228. ptrdiff_t stride = (FFALIGN(width / 8, 8) + 64) * 8;
  229. if (chroma_y_shift && !bayer)
  230. height = FFALIGN(height / 8, 2) * 8;
  231. s->plane[i].width = width;
  232. s->plane[i].height = height;
  233. s->plane[i].stride = stride;
  234. w8 = FFALIGN(s->plane[i].width / 8, 8) + 64;
  235. h8 = FFALIGN(height, 8) / 8;
  236. w4 = w8 * 2;
  237. h4 = h8 * 2;
  238. w2 = w4 * 2;
  239. h2 = h4 * 2;
  240. if (s->transform_type == 0) {
  241. s->plane[i].idwt_size = FFALIGN(height, 8) * stride;
  242. s->plane[i].idwt_buf =
  243. av_mallocz_array(s->plane[i].idwt_size, sizeof(*s->plane[i].idwt_buf));
  244. s->plane[i].idwt_tmp =
  245. av_malloc_array(s->plane[i].idwt_size, sizeof(*s->plane[i].idwt_tmp));
  246. } else {
  247. s->plane[i].idwt_size = FFALIGN(height, 8) * stride * 2;
  248. s->plane[i].idwt_buf =
  249. av_mallocz_array(s->plane[i].idwt_size, sizeof(*s->plane[i].idwt_buf));
  250. s->plane[i].idwt_tmp =
  251. av_malloc_array(s->plane[i].idwt_size, sizeof(*s->plane[i].idwt_tmp));
  252. }
  253. if (!s->plane[i].idwt_buf || !s->plane[i].idwt_tmp)
  254. return AVERROR(ENOMEM);
  255. s->plane[i].subband[0] = s->plane[i].idwt_buf;
  256. s->plane[i].subband[1] = s->plane[i].idwt_buf + 2 * w8 * h8;
  257. s->plane[i].subband[2] = s->plane[i].idwt_buf + 1 * w8 * h8;
  258. s->plane[i].subband[3] = s->plane[i].idwt_buf + 3 * w8 * h8;
  259. s->plane[i].subband[4] = s->plane[i].idwt_buf + 2 * w4 * h4;
  260. s->plane[i].subband[5] = s->plane[i].idwt_buf + 1 * w4 * h4;
  261. s->plane[i].subband[6] = s->plane[i].idwt_buf + 3 * w4 * h4;
  262. if (s->transform_type == 0) {
  263. s->plane[i].subband[7] = s->plane[i].idwt_buf + 2 * w2 * h2;
  264. s->plane[i].subband[8] = s->plane[i].idwt_buf + 1 * w2 * h2;
  265. s->plane[i].subband[9] = s->plane[i].idwt_buf + 3 * w2 * h2;
  266. } else {
  267. int16_t *frame2 =
  268. s->plane[i].subband[7] = s->plane[i].idwt_buf + 4 * w2 * h2;
  269. s->plane[i].subband[8] = frame2 + 2 * w4 * h4;
  270. s->plane[i].subband[9] = frame2 + 1 * w4 * h4;
  271. s->plane[i].subband[10] = frame2 + 3 * w4 * h4;
  272. s->plane[i].subband[11] = frame2 + 2 * w2 * h2;
  273. s->plane[i].subband[12] = frame2 + 1 * w2 * h2;
  274. s->plane[i].subband[13] = frame2 + 3 * w2 * h2;
  275. s->plane[i].subband[14] = s->plane[i].idwt_buf + 2 * w2 * h2;
  276. s->plane[i].subband[15] = s->plane[i].idwt_buf + 1 * w2 * h2;
  277. s->plane[i].subband[16] = s->plane[i].idwt_buf + 3 * w2 * h2;
  278. }
  279. if (s->transform_type == 0) {
  280. for (j = 0; j < DWT_LEVELS; j++) {
  281. for (k = 0; k < FF_ARRAY_ELEMS(s->plane[i].band[j]); k++) {
  282. s->plane[i].band[j][k].a_width = w8 << j;
  283. s->plane[i].band[j][k].a_height = h8 << j;
  284. }
  285. }
  286. } else {
  287. for (j = 0; j < DWT_LEVELS_3D; j++) {
  288. int t = j < 1 ? 0 : (j < 3 ? 1 : 2);
  289. for (k = 0; k < FF_ARRAY_ELEMS(s->plane[i].band[j]); k++) {
  290. s->plane[i].band[j][k].a_width = w8 << t;
  291. s->plane[i].band[j][k].a_height = h8 << t;
  292. }
  293. }
  294. }
  295. /* ll2 and ll1 commented out because they are done in-place */
  296. s->plane[i].l_h[0] = s->plane[i].idwt_tmp;
  297. s->plane[i].l_h[1] = s->plane[i].idwt_tmp + 2 * w8 * h8;
  298. // s->plane[i].l_h[2] = ll2;
  299. s->plane[i].l_h[3] = s->plane[i].idwt_tmp;
  300. s->plane[i].l_h[4] = s->plane[i].idwt_tmp + 2 * w4 * h4;
  301. // s->plane[i].l_h[5] = ll1;
  302. s->plane[i].l_h[6] = s->plane[i].idwt_tmp;
  303. s->plane[i].l_h[7] = s->plane[i].idwt_tmp + 2 * w2 * h2;
  304. if (s->transform_type != 0) {
  305. int16_t *frame2 = s->plane[i].idwt_tmp + 4 * w2 * h2;
  306. s->plane[i].l_h[8] = frame2;
  307. s->plane[i].l_h[9] = frame2 + 2 * w2 * h2;
  308. }
  309. }
  310. s->a_height = s->coded_height;
  311. s->a_width = s->coded_width;
  312. s->a_format = s->coded_format;
  313. return 0;
  314. }
  315. static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
  316. AVPacket *avpkt)
  317. {
  318. CFHDContext *s = avctx->priv_data;
  319. CFHDDSPContext *dsp = &s->dsp;
  320. GetByteContext gb;
  321. ThreadFrame frame = { .f = data };
  322. AVFrame *pic = data;
  323. int ret = 0, i, j, plane, got_buffer = 0;
  324. int16_t *coeff_data;
  325. init_frame_defaults(s);
  326. s->planes = av_pix_fmt_count_planes(s->coded_format);
  327. bytestream2_init(&gb, avpkt->data, avpkt->size);
  328. while (bytestream2_get_bytes_left(&gb) >= 4) {
  329. /* Bit weird but implement the tag parsing as the spec says */
  330. uint16_t tagu = bytestream2_get_be16(&gb);
  331. int16_t tag = (int16_t)tagu;
  332. int8_t tag8 = (int8_t)(tagu >> 8);
  333. uint16_t abstag = abs(tag);
  334. int8_t abs_tag8 = abs(tag8);
  335. uint16_t data = bytestream2_get_be16(&gb);
  336. if (abs_tag8 >= 0x60 && abs_tag8 <= 0x6f) {
  337. av_log(avctx, AV_LOG_DEBUG, "large len %x\n", ((tagu & 0xff) << 16) | data);
  338. } else if (tag == SampleFlags) {
  339. av_log(avctx, AV_LOG_DEBUG, "Progressive? %"PRIu16"\n", data);
  340. s->progressive = data & 0x0001;
  341. } else if (tag == FrameType) {
  342. s->frame_type = data;
  343. av_log(avctx, AV_LOG_DEBUG, "Frame type %"PRIu16"\n", data);
  344. } else if (abstag == VersionMajor) {
  345. av_log(avctx, AV_LOG_DEBUG, "Version major %"PRIu16"\n", data);
  346. } else if (abstag == VersionMinor) {
  347. av_log(avctx, AV_LOG_DEBUG, "Version minor %"PRIu16"\n", data);
  348. } else if (abstag == VersionRevision) {
  349. av_log(avctx, AV_LOG_DEBUG, "Version revision %"PRIu16"\n", data);
  350. } else if (abstag == VersionEdit) {
  351. av_log(avctx, AV_LOG_DEBUG, "Version edit %"PRIu16"\n", data);
  352. } else if (abstag == Version) {
  353. av_log(avctx, AV_LOG_DEBUG, "Version %"PRIu16"\n", data);
  354. } else if (tag == ImageWidth) {
  355. av_log(avctx, AV_LOG_DEBUG, "Width %"PRIu16"\n", data);
  356. s->coded_width = data;
  357. } else if (tag == ImageHeight) {
  358. av_log(avctx, AV_LOG_DEBUG, "Height %"PRIu16"\n", data);
  359. s->coded_height = data;
  360. } else if (tag == ChannelCount) {
  361. av_log(avctx, AV_LOG_DEBUG, "Channel Count: %"PRIu16"\n", data);
  362. s->channel_cnt = data;
  363. if (data > 4) {
  364. av_log(avctx, AV_LOG_ERROR, "Channel Count of %"PRIu16" is unsupported\n", data);
  365. ret = AVERROR_PATCHWELCOME;
  366. goto end;
  367. }
  368. } else if (tag == SubbandCount) {
  369. av_log(avctx, AV_LOG_DEBUG, "Subband Count: %"PRIu16"\n", data);
  370. if (data != SUBBAND_COUNT && data != SUBBAND_COUNT_3D) {
  371. av_log(avctx, AV_LOG_ERROR, "Subband Count of %"PRIu16" is unsupported\n", data);
  372. ret = AVERROR_PATCHWELCOME;
  373. goto end;
  374. }
  375. } else if (tag == ChannelNumber) {
  376. s->channel_num = data;
  377. av_log(avctx, AV_LOG_DEBUG, "Channel number %"PRIu16"\n", data);
  378. if (s->channel_num >= s->planes) {
  379. av_log(avctx, AV_LOG_ERROR, "Invalid channel number\n");
  380. ret = AVERROR(EINVAL);
  381. goto end;
  382. }
  383. init_plane_defaults(s);
  384. } else if (tag == SubbandNumber) {
  385. if (s->subband_num != 0 && data == 1) // hack
  386. s->level++;
  387. av_log(avctx, AV_LOG_DEBUG, "Subband number %"PRIu16"\n", data);
  388. s->subband_num = data;
  389. if ((s->transform_type == 0 && s->level >= DWT_LEVELS) ||
  390. (s->transform_type == 2 && s->level >= DWT_LEVELS_3D)) {
  391. av_log(avctx, AV_LOG_ERROR, "Invalid level\n");
  392. ret = AVERROR(EINVAL);
  393. goto end;
  394. }
  395. if (s->subband_num > 3) {
  396. av_log(avctx, AV_LOG_ERROR, "Invalid subband number\n");
  397. ret = AVERROR(EINVAL);
  398. goto end;
  399. }
  400. } else if (tag == SubbandBand) {
  401. av_log(avctx, AV_LOG_DEBUG, "Subband number actual %"PRIu16"\n", data);
  402. if ((s->transform_type == 0 && data >= SUBBAND_COUNT) ||
  403. (s->transform_type == 2 && data >= SUBBAND_COUNT_3D && data != 255)) {
  404. av_log(avctx, AV_LOG_ERROR, "Invalid subband number actual\n");
  405. ret = AVERROR(EINVAL);
  406. goto end;
  407. }
  408. if (s->transform_type == 0 || s->transform_type == 2)
  409. s->subband_num_actual = data;
  410. else
  411. av_log(avctx, AV_LOG_WARNING, "Ignoring subband num actual %"PRIu16"\n", data);
  412. } else if (tag == LowpassPrecision)
  413. av_log(avctx, AV_LOG_DEBUG, "Lowpass precision bits: %"PRIu16"\n", data);
  414. else if (tag == Quantization) {
  415. s->quantisation = data;
  416. av_log(avctx, AV_LOG_DEBUG, "Quantisation: %"PRIu16"\n", data);
  417. } else if (tag == PrescaleTable) {
  418. for (i = 0; i < 8; i++)
  419. s->prescale_table[i] = (data >> (14 - i * 2)) & 0x3;
  420. av_log(avctx, AV_LOG_DEBUG, "Prescale table: %x\n", data);
  421. } else if (tag == BandEncoding) {
  422. if (!data || data > 5) {
  423. av_log(avctx, AV_LOG_ERROR, "Invalid band encoding\n");
  424. ret = AVERROR(EINVAL);
  425. goto end;
  426. }
  427. s->band_encoding = data;
  428. av_log(avctx, AV_LOG_DEBUG, "Encode Method for Subband %d : %x\n", s->subband_num_actual, data);
  429. } else if (tag == LowpassWidth) {
  430. av_log(avctx, AV_LOG_DEBUG, "Lowpass width %"PRIu16"\n", data);
  431. s->plane[s->channel_num].band[0][0].width = data;
  432. s->plane[s->channel_num].band[0][0].stride = data;
  433. } else if (tag == LowpassHeight) {
  434. av_log(avctx, AV_LOG_DEBUG, "Lowpass height %"PRIu16"\n", data);
  435. s->plane[s->channel_num].band[0][0].height = data;
  436. } else if (tag == SampleType) {
  437. s->sample_type = data;
  438. av_log(avctx, AV_LOG_DEBUG, "Sample type? %"PRIu16"\n", data);
  439. } else if (tag == TransformType) {
  440. if (data > 2) {
  441. av_log(avctx, AV_LOG_ERROR, "Invalid transform type\n");
  442. ret = AVERROR(EINVAL);
  443. goto end;
  444. } else if (data == 1) {
  445. av_log(avctx, AV_LOG_ERROR, "unsupported transform type\n");
  446. ret = AVERROR_PATCHWELCOME;
  447. goto end;
  448. }
  449. if (s->transform_type == -1) {
  450. s->transform_type = data;
  451. av_log(avctx, AV_LOG_DEBUG, "Transform type %"PRIu16"\n", data);
  452. } else {
  453. av_log(avctx, AV_LOG_DEBUG, "Ignoring additional transform type %"PRIu16"\n", data);
  454. }
  455. } else if (abstag >= 0x4000 && abstag <= 0x40ff) {
  456. if (abstag == 0x4001)
  457. s->peak.level = 0;
  458. av_log(avctx, AV_LOG_DEBUG, "Small chunk length %d %s\n", data * 4, tag < 0 ? "optional" : "required");
  459. bytestream2_skipu(&gb, data * 4);
  460. } else if (tag == FrameIndex) {
  461. av_log(avctx, AV_LOG_DEBUG, "Frame index %"PRIu16"\n", data);
  462. s->frame_index = data;
  463. } else if (tag == SampleIndexTable) {
  464. av_log(avctx, AV_LOG_DEBUG, "Sample index table - skipping %i values\n", data);
  465. if (data > bytestream2_get_bytes_left(&gb) / 4) {
  466. av_log(avctx, AV_LOG_ERROR, "too many values (%d)\n", data);
  467. ret = AVERROR_INVALIDDATA;
  468. goto end;
  469. }
  470. for (i = 0; i < data; i++) {
  471. uint32_t offset = bytestream2_get_be32(&gb);
  472. av_log(avctx, AV_LOG_DEBUG, "Offset = %"PRIu32"\n", offset);
  473. }
  474. } else if (tag == HighpassWidth) {
  475. av_log(avctx, AV_LOG_DEBUG, "Highpass width %i channel %i level %i subband %i\n", data, s->channel_num, s->level, s->subband_num);
  476. if (data < 3) {
  477. av_log(avctx, AV_LOG_ERROR, "Invalid highpass width\n");
  478. ret = AVERROR(EINVAL);
  479. goto end;
  480. }
  481. s->plane[s->channel_num].band[s->level][s->subband_num].width = data;
  482. s->plane[s->channel_num].band[s->level][s->subband_num].stride = FFALIGN(data, 8);
  483. } else if (tag == HighpassHeight) {
  484. av_log(avctx, AV_LOG_DEBUG, "Highpass height %i\n", data);
  485. if (data < 3) {
  486. av_log(avctx, AV_LOG_ERROR, "Invalid highpass height\n");
  487. ret = AVERROR(EINVAL);
  488. goto end;
  489. }
  490. s->plane[s->channel_num].band[s->level][s->subband_num].height = data;
  491. } else if (tag == BandWidth) {
  492. av_log(avctx, AV_LOG_DEBUG, "Highpass width2 %i\n", data);
  493. if (data < 3) {
  494. av_log(avctx, AV_LOG_ERROR, "Invalid highpass width2\n");
  495. ret = AVERROR(EINVAL);
  496. goto end;
  497. }
  498. s->plane[s->channel_num].band[s->level][s->subband_num].width = data;
  499. s->plane[s->channel_num].band[s->level][s->subband_num].stride = FFALIGN(data, 8);
  500. } else if (tag == BandHeight) {
  501. av_log(avctx, AV_LOG_DEBUG, "Highpass height2 %i\n", data);
  502. if (data < 3) {
  503. av_log(avctx, AV_LOG_ERROR, "Invalid highpass height2\n");
  504. ret = AVERROR(EINVAL);
  505. goto end;
  506. }
  507. s->plane[s->channel_num].band[s->level][s->subband_num].height = data;
  508. } else if (tag == InputFormat) {
  509. av_log(avctx, AV_LOG_DEBUG, "Input format %i\n", data);
  510. if (s->coded_format == AV_PIX_FMT_NONE ||
  511. s->coded_format == AV_PIX_FMT_YUV422P10) {
  512. if (data >= 100 && data <= 105) {
  513. s->coded_format = AV_PIX_FMT_BAYER_RGGB16;
  514. } else if (data >= 122 && data <= 128) {
  515. s->coded_format = AV_PIX_FMT_GBRP12;
  516. } else if (data == 30) {
  517. s->coded_format = AV_PIX_FMT_GBRAP12;
  518. } else {
  519. s->coded_format = AV_PIX_FMT_YUV422P10;
  520. }
  521. s->planes = s->coded_format == AV_PIX_FMT_BAYER_RGGB16 ? 4 : av_pix_fmt_count_planes(s->coded_format);
  522. }
  523. } else if (tag == BandCodingFlags) {
  524. s->codebook = data & 0xf;
  525. s->difference_coding = (data >> 4) & 1;
  526. av_log(avctx, AV_LOG_DEBUG, "Other codebook? %i\n", s->codebook);
  527. } else if (tag == Precision) {
  528. av_log(avctx, AV_LOG_DEBUG, "Precision %i\n", data);
  529. if (!(data == 10 || data == 12)) {
  530. av_log(avctx, AV_LOG_ERROR, "Invalid bits per channel\n");
  531. ret = AVERROR(EINVAL);
  532. goto end;
  533. }
  534. avctx->bits_per_raw_sample = s->bpc = data;
  535. } else if (tag == EncodedFormat) {
  536. av_log(avctx, AV_LOG_DEBUG, "Sample format? %i\n", data);
  537. if (data == 1) {
  538. s->coded_format = AV_PIX_FMT_YUV422P10;
  539. } else if (data == 2) {
  540. s->coded_format = AV_PIX_FMT_BAYER_RGGB16;
  541. } else if (data == 3) {
  542. s->coded_format = AV_PIX_FMT_GBRP12;
  543. } else if (data == 4) {
  544. s->coded_format = AV_PIX_FMT_GBRAP12;
  545. } else {
  546. avpriv_report_missing_feature(avctx, "Sample format of %"PRIu16, data);
  547. ret = AVERROR_PATCHWELCOME;
  548. goto end;
  549. }
  550. s->planes = data == 2 ? 4 : av_pix_fmt_count_planes(s->coded_format);
  551. } else if (tag == -DisplayHeight) {
  552. av_log(avctx, AV_LOG_DEBUG, "Cropped height %"PRIu16"\n", data);
  553. s->cropped_height = data;
  554. } else if (tag == -PeakOffsetLow) {
  555. s->peak.offset &= ~0xffff;
  556. s->peak.offset |= (data & 0xffff);
  557. s->peak.base = gb;
  558. s->peak.level = 0;
  559. } else if (tag == -PeakOffsetHigh) {
  560. s->peak.offset &= 0xffff;
  561. s->peak.offset |= (data & 0xffffU)<<16;
  562. s->peak.base = gb;
  563. s->peak.level = 0;
  564. } else if (tag == -PeakLevel && s->peak.offset) {
  565. s->peak.level = data;
  566. bytestream2_seek(&s->peak.base, s->peak.offset - 4, SEEK_CUR);
  567. } else
  568. av_log(avctx, AV_LOG_DEBUG, "Unknown tag %i data %x\n", tag, data);
  569. if (tag == BitstreamMarker && data == 0xf0f &&
  570. s->coded_format != AV_PIX_FMT_NONE) {
  571. int lowpass_height = s->plane[s->channel_num].band[0][0].height;
  572. int lowpass_width = s->plane[s->channel_num].band[0][0].width;
  573. int factor = s->coded_format == AV_PIX_FMT_BAYER_RGGB16 ? 2 : 1;
  574. if (s->coded_width) {
  575. s->coded_width *= factor;
  576. }
  577. if (s->coded_height) {
  578. s->coded_height *= factor;
  579. }
  580. if (!s->a_width && !s->coded_width) {
  581. s->coded_width = lowpass_width * factor * 8;
  582. }
  583. if (!s->a_height && !s->coded_height) {
  584. s->coded_height = lowpass_height * factor * 8;
  585. }
  586. if (s->a_width && !s->coded_width)
  587. s->coded_width = s->a_width;
  588. if (s->a_height && !s->coded_height)
  589. s->coded_height = s->a_height;
  590. if (s->a_width != s->coded_width || s->a_height != s->coded_height ||
  591. s->a_format != s->coded_format) {
  592. free_buffers(s);
  593. if ((ret = alloc_buffers(avctx)) < 0) {
  594. free_buffers(s);
  595. return ret;
  596. }
  597. }
  598. ret = ff_set_dimensions(avctx, s->coded_width, s->coded_height);
  599. if (ret < 0)
  600. return ret;
  601. if (s->cropped_height) {
  602. unsigned height = s->cropped_height << (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16);
  603. if (avctx->height < height)
  604. return AVERROR_INVALIDDATA;
  605. avctx->height = height;
  606. }
  607. frame.f->width =
  608. frame.f->height = 0;
  609. if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
  610. return ret;
  611. s->coded_width = 0;
  612. s->coded_height = 0;
  613. s->coded_format = AV_PIX_FMT_NONE;
  614. got_buffer = 1;
  615. } else if (tag == FrameIndex && data == 1 && s->sample_type == 1 && s->frame_type == 2) {
  616. frame.f->width =
  617. frame.f->height = 0;
  618. if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
  619. return ret;
  620. s->coded_width = 0;
  621. s->coded_height = 0;
  622. s->coded_format = AV_PIX_FMT_NONE;
  623. got_buffer = 1;
  624. }
  625. if (s->subband_num_actual == 255)
  626. goto finish;
  627. coeff_data = s->plane[s->channel_num].subband[s->subband_num_actual];
  628. /* Lowpass coefficients */
  629. if (tag == BitstreamMarker && data == 0xf0f && s->a_width && s->a_height) {
  630. int lowpass_height = s->plane[s->channel_num].band[0][0].height;
  631. int lowpass_width = s->plane[s->channel_num].band[0][0].width;
  632. int lowpass_a_height = s->plane[s->channel_num].band[0][0].a_height;
  633. int lowpass_a_width = s->plane[s->channel_num].band[0][0].a_width;
  634. if (lowpass_width < 3 ||
  635. lowpass_width > lowpass_a_width) {
  636. av_log(avctx, AV_LOG_ERROR, "Invalid lowpass width\n");
  637. ret = AVERROR(EINVAL);
  638. goto end;
  639. }
  640. if (lowpass_height < 3 ||
  641. lowpass_height > lowpass_a_height) {
  642. av_log(avctx, AV_LOG_ERROR, "Invalid lowpass height\n");
  643. ret = AVERROR(EINVAL);
  644. goto end;
  645. }
  646. if (!got_buffer) {
  647. av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
  648. ret = AVERROR(EINVAL);
  649. goto end;
  650. }
  651. if (lowpass_height > lowpass_a_height || lowpass_width > lowpass_a_width ||
  652. lowpass_width * lowpass_height * sizeof(int16_t) > bytestream2_get_bytes_left(&gb)) {
  653. av_log(avctx, AV_LOG_ERROR, "Too many lowpass coefficients\n");
  654. ret = AVERROR(EINVAL);
  655. goto end;
  656. }
  657. av_log(avctx, AV_LOG_DEBUG, "Start of lowpass coeffs component %d height:%d, width:%d\n", s->channel_num, lowpass_height, lowpass_width);
  658. for (i = 0; i < lowpass_height; i++) {
  659. for (j = 0; j < lowpass_width; j++)
  660. coeff_data[j] = bytestream2_get_be16u(&gb);
  661. coeff_data += lowpass_width;
  662. }
  663. /* Align to mod-4 position to continue reading tags */
  664. bytestream2_seek(&gb, bytestream2_tell(&gb) & 3, SEEK_CUR);
  665. /* Copy last line of coefficients if odd height */
  666. if (lowpass_height & 1) {
  667. memcpy(&coeff_data[lowpass_height * lowpass_width],
  668. &coeff_data[(lowpass_height - 1) * lowpass_width],
  669. lowpass_width * sizeof(*coeff_data));
  670. }
  671. av_log(avctx, AV_LOG_DEBUG, "Lowpass coefficients %d\n", lowpass_width * lowpass_height);
  672. }
  673. if ((tag == BandHeader || tag == BandSecondPass) && s->subband_num_actual != 255 && s->a_width && s->a_height) {
  674. int highpass_height = s->plane[s->channel_num].band[s->level][s->subband_num].height;
  675. int highpass_width = s->plane[s->channel_num].band[s->level][s->subband_num].width;
  676. int highpass_a_width = s->plane[s->channel_num].band[s->level][s->subband_num].a_width;
  677. int highpass_a_height = s->plane[s->channel_num].band[s->level][s->subband_num].a_height;
  678. int highpass_stride = s->plane[s->channel_num].band[s->level][s->subband_num].stride;
  679. int expected;
  680. int a_expected = highpass_a_height * highpass_a_width;
  681. int level, run, coeff;
  682. int count = 0, bytes;
  683. if (!got_buffer) {
  684. av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
  685. ret = AVERROR(EINVAL);
  686. goto end;
  687. }
  688. if (highpass_height > highpass_a_height || highpass_width > highpass_a_width || a_expected < highpass_height * (uint64_t)highpass_stride) {
  689. av_log(avctx, AV_LOG_ERROR, "Too many highpass coefficients\n");
  690. ret = AVERROR(EINVAL);
  691. goto end;
  692. }
  693. expected = highpass_height * highpass_stride;
  694. av_log(avctx, AV_LOG_DEBUG, "Start subband coeffs plane %i level %i codebook %i expected %i\n", s->channel_num, s->level, s->codebook, expected);
  695. ret = init_get_bits8(&s->gb, gb.buffer, bytestream2_get_bytes_left(&gb));
  696. if (ret < 0)
  697. goto end;
  698. {
  699. OPEN_READER(re, &s->gb);
  700. const int lossless = s->band_encoding == 5;
  701. if (s->codebook == 0 && s->transform_type == 2 && s->subband_num_actual == 7)
  702. s->codebook = 1;
  703. if (!s->codebook) {
  704. while (1) {
  705. UPDATE_CACHE(re, &s->gb);
  706. GET_RL_VLC(level, run, re, &s->gb, s->table_9_rl_vlc,
  707. VLC_BITS, 3, 1);
  708. /* escape */
  709. if (level == 64)
  710. break;
  711. count += run;
  712. if (count > expected)
  713. break;
  714. if (!lossless)
  715. coeff = dequant_and_decompand(s, level, s->quantisation, 0);
  716. else
  717. coeff = level;
  718. if (tag == BandSecondPass) {
  719. const uint16_t q = s->quantisation;
  720. for (i = 0; i < run; i++) {
  721. *coeff_data |= coeff << 8;
  722. *coeff_data++ *= q;
  723. }
  724. } else {
  725. for (i = 0; i < run; i++)
  726. *coeff_data++ = coeff;
  727. }
  728. }
  729. } else {
  730. while (1) {
  731. UPDATE_CACHE(re, &s->gb);
  732. GET_RL_VLC(level, run, re, &s->gb, s->table_18_rl_vlc,
  733. VLC_BITS, 3, 1);
  734. /* escape */
  735. if (level == 255 && run == 2)
  736. break;
  737. count += run;
  738. if (count > expected)
  739. break;
  740. if (!lossless)
  741. coeff = dequant_and_decompand(s, level, s->quantisation, s->codebook);
  742. else
  743. coeff = level;
  744. if (tag == BandSecondPass) {
  745. const uint16_t q = s->quantisation;
  746. for (i = 0; i < run; i++) {
  747. *coeff_data |= coeff << 8;
  748. *coeff_data++ *= q;
  749. }
  750. } else {
  751. for (i = 0; i < run; i++)
  752. *coeff_data++ = coeff;
  753. }
  754. }
  755. }
  756. CLOSE_READER(re, &s->gb);
  757. }
  758. if (count > expected) {
  759. av_log(avctx, AV_LOG_ERROR, "Escape codeword not found, probably corrupt data\n");
  760. ret = AVERROR(EINVAL);
  761. goto end;
  762. }
  763. if (s->peak.level)
  764. peak_table(coeff_data - count, &s->peak, count);
  765. if (s->difference_coding)
  766. difference_coding(s->plane[s->channel_num].subband[s->subband_num_actual], highpass_width, highpass_height);
  767. bytes = FFALIGN(AV_CEIL_RSHIFT(get_bits_count(&s->gb), 3), 4);
  768. if (bytes > bytestream2_get_bytes_left(&gb)) {
  769. av_log(avctx, AV_LOG_ERROR, "Bitstream overread error\n");
  770. ret = AVERROR(EINVAL);
  771. goto end;
  772. } else
  773. bytestream2_seek(&gb, bytes, SEEK_CUR);
  774. av_log(avctx, AV_LOG_DEBUG, "End subband coeffs %i extra %i\n", count, count - expected);
  775. finish:
  776. if (s->subband_num_actual != 255)
  777. s->codebook = 0;
  778. }
  779. }
  780. s->planes = av_pix_fmt_count_planes(avctx->pix_fmt);
  781. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  782. s->progressive = 1;
  783. s->planes = 4;
  784. }
  785. ff_thread_finish_setup(avctx);
  786. if (!s->a_width || !s->a_height || s->a_format == AV_PIX_FMT_NONE ||
  787. s->coded_width || s->coded_height || s->coded_format != AV_PIX_FMT_NONE) {
  788. av_log(avctx, AV_LOG_ERROR, "Invalid dimensions\n");
  789. ret = AVERROR(EINVAL);
  790. goto end;
  791. }
  792. if (!got_buffer) {
  793. av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
  794. ret = AVERROR(EINVAL);
  795. goto end;
  796. }
  797. if (s->transform_type == 0 && s->sample_type != 1) {
  798. for (plane = 0; plane < s->planes && !ret; plane++) {
  799. /* level 1 */
  800. int lowpass_height = s->plane[plane].band[0][0].height;
  801. int output_stride = s->plane[plane].band[0][0].a_width;
  802. int lowpass_width = s->plane[plane].band[0][0].width;
  803. int highpass_stride = s->plane[plane].band[0][1].stride;
  804. int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
  805. ptrdiff_t dst_linesize;
  806. int16_t *low, *high, *output, *dst;
  807. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  808. act_plane = 0;
  809. dst_linesize = pic->linesize[act_plane];
  810. } else {
  811. dst_linesize = pic->linesize[act_plane] / 2;
  812. }
  813. if (lowpass_height > s->plane[plane].band[0][0].a_height || lowpass_width > s->plane[plane].band[0][0].a_width ||
  814. !highpass_stride || s->plane[plane].band[0][1].width > s->plane[plane].band[0][1].a_width ||
  815. lowpass_width < 3 || lowpass_height < 3) {
  816. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  817. ret = AVERROR(EINVAL);
  818. goto end;
  819. }
  820. av_log(avctx, AV_LOG_DEBUG, "Decoding level 1 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  821. low = s->plane[plane].subband[0];
  822. high = s->plane[plane].subband[2];
  823. output = s->plane[plane].l_h[0];
  824. dsp->vert_filter(output, output_stride, low, lowpass_width, high, highpass_stride, lowpass_width, lowpass_height);
  825. low = s->plane[plane].subband[1];
  826. high = s->plane[plane].subband[3];
  827. output = s->plane[plane].l_h[1];
  828. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  829. low = s->plane[plane].l_h[0];
  830. high = s->plane[plane].l_h[1];
  831. output = s->plane[plane].subband[0];
  832. dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
  833. if (s->bpc == 12) {
  834. output = s->plane[plane].subband[0];
  835. for (i = 0; i < lowpass_height * 2; i++) {
  836. for (j = 0; j < lowpass_width * 2; j++)
  837. output[j] *= 4;
  838. output += output_stride * 2;
  839. }
  840. }
  841. /* level 2 */
  842. lowpass_height = s->plane[plane].band[1][1].height;
  843. output_stride = s->plane[plane].band[1][1].a_width;
  844. lowpass_width = s->plane[plane].band[1][1].width;
  845. highpass_stride = s->plane[plane].band[1][1].stride;
  846. if (lowpass_height > s->plane[plane].band[1][1].a_height || lowpass_width > s->plane[plane].band[1][1].a_width ||
  847. !highpass_stride || s->plane[plane].band[1][1].width > s->plane[plane].band[1][1].a_width ||
  848. lowpass_width < 3 || lowpass_height < 3) {
  849. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  850. ret = AVERROR(EINVAL);
  851. goto end;
  852. }
  853. av_log(avctx, AV_LOG_DEBUG, "Level 2 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  854. low = s->plane[plane].subband[0];
  855. high = s->plane[plane].subband[5];
  856. output = s->plane[plane].l_h[3];
  857. dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  858. low = s->plane[plane].subband[4];
  859. high = s->plane[plane].subband[6];
  860. output = s->plane[plane].l_h[4];
  861. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  862. low = s->plane[plane].l_h[3];
  863. high = s->plane[plane].l_h[4];
  864. output = s->plane[plane].subband[0];
  865. dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
  866. output = s->plane[plane].subband[0];
  867. for (i = 0; i < lowpass_height * 2; i++) {
  868. for (j = 0; j < lowpass_width * 2; j++)
  869. output[j] *= 4;
  870. output += output_stride * 2;
  871. }
  872. /* level 3 */
  873. lowpass_height = s->plane[plane].band[2][1].height;
  874. output_stride = s->plane[plane].band[2][1].a_width;
  875. lowpass_width = s->plane[plane].band[2][1].width;
  876. highpass_stride = s->plane[plane].band[2][1].stride;
  877. if (lowpass_height > s->plane[plane].band[2][1].a_height || lowpass_width > s->plane[plane].band[2][1].a_width ||
  878. !highpass_stride || s->plane[plane].band[2][1].width > s->plane[plane].band[2][1].a_width ||
  879. lowpass_height < 3 || lowpass_width < 3 || lowpass_width * 2 > s->plane[plane].width) {
  880. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  881. ret = AVERROR(EINVAL);
  882. goto end;
  883. }
  884. av_log(avctx, AV_LOG_DEBUG, "Level 3 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  885. if (s->progressive) {
  886. low = s->plane[plane].subband[0];
  887. high = s->plane[plane].subband[8];
  888. output = s->plane[plane].l_h[6];
  889. dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  890. low = s->plane[plane].subband[7];
  891. high = s->plane[plane].subband[9];
  892. output = s->plane[plane].l_h[7];
  893. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  894. dst = (int16_t *)pic->data[act_plane];
  895. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  896. if (plane & 1)
  897. dst++;
  898. if (plane > 1)
  899. dst += pic->linesize[act_plane] >> 1;
  900. }
  901. low = s->plane[plane].l_h[6];
  902. high = s->plane[plane].l_h[7];
  903. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16 &&
  904. (lowpass_height * 2 > avctx->coded_height / 2 ||
  905. lowpass_width * 2 > avctx->coded_width / 2 )
  906. ) {
  907. ret = AVERROR_INVALIDDATA;
  908. goto end;
  909. }
  910. for (i = 0; i < s->plane[act_plane].height; i++) {
  911. dsp->horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
  912. if (avctx->pix_fmt == AV_PIX_FMT_GBRAP12 && act_plane == 3)
  913. process_alpha(dst, lowpass_width * 2);
  914. low += output_stride;
  915. high += output_stride;
  916. dst += dst_linesize;
  917. }
  918. } else {
  919. av_log(avctx, AV_LOG_DEBUG, "interlaced frame ? %d", pic->interlaced_frame);
  920. pic->interlaced_frame = 1;
  921. low = s->plane[plane].subband[0];
  922. high = s->plane[plane].subband[7];
  923. output = s->plane[plane].l_h[6];
  924. dsp->horiz_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  925. low = s->plane[plane].subband[8];
  926. high = s->plane[plane].subband[9];
  927. output = s->plane[plane].l_h[7];
  928. dsp->horiz_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  929. dst = (int16_t *)pic->data[act_plane];
  930. low = s->plane[plane].l_h[6];
  931. high = s->plane[plane].l_h[7];
  932. for (i = 0; i < s->plane[act_plane].height / 2; i++) {
  933. interlaced_vertical_filter(dst, low, high, lowpass_width * 2, pic->linesize[act_plane]/2, act_plane);
  934. low += output_stride * 2;
  935. high += output_stride * 2;
  936. dst += pic->linesize[act_plane];
  937. }
  938. }
  939. }
  940. } else if (s->transform_type == 2 && (avctx->internal->is_copy || s->frame_index == 1 || s->sample_type != 1)) {
  941. for (plane = 0; plane < s->planes && !ret; plane++) {
  942. int lowpass_height = s->plane[plane].band[0][0].height;
  943. int output_stride = s->plane[plane].band[0][0].a_width;
  944. int lowpass_width = s->plane[plane].band[0][0].width;
  945. int highpass_stride = s->plane[plane].band[0][1].stride;
  946. int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
  947. int16_t *low, *high, *output, *dst;
  948. ptrdiff_t dst_linesize;
  949. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  950. act_plane = 0;
  951. dst_linesize = pic->linesize[act_plane];
  952. } else {
  953. dst_linesize = pic->linesize[act_plane] / 2;
  954. }
  955. if (lowpass_height > s->plane[plane].band[0][0].a_height || lowpass_width > s->plane[plane].band[0][0].a_width ||
  956. !highpass_stride || s->plane[plane].band[0][1].width > s->plane[plane].band[0][1].a_width ||
  957. lowpass_width < 3 || lowpass_height < 3) {
  958. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  959. ret = AVERROR(EINVAL);
  960. goto end;
  961. }
  962. av_log(avctx, AV_LOG_DEBUG, "Decoding level 1 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  963. low = s->plane[plane].subband[0];
  964. high = s->plane[plane].subband[2];
  965. output = s->plane[plane].l_h[0];
  966. dsp->vert_filter(output, output_stride, low, lowpass_width, high, highpass_stride, lowpass_width, lowpass_height);
  967. low = s->plane[plane].subband[1];
  968. high = s->plane[plane].subband[3];
  969. output = s->plane[plane].l_h[1];
  970. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  971. low = s->plane[plane].l_h[0];
  972. high = s->plane[plane].l_h[1];
  973. output = s->plane[plane].l_h[7];
  974. dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
  975. if (s->bpc == 12) {
  976. output = s->plane[plane].l_h[7];
  977. for (i = 0; i < lowpass_height * 2; i++) {
  978. for (j = 0; j < lowpass_width * 2; j++)
  979. output[j] *= 4;
  980. output += output_stride * 2;
  981. }
  982. }
  983. lowpass_height = s->plane[plane].band[1][1].height;
  984. output_stride = s->plane[plane].band[1][1].a_width;
  985. lowpass_width = s->plane[plane].band[1][1].width;
  986. highpass_stride = s->plane[plane].band[1][1].stride;
  987. if (lowpass_height > s->plane[plane].band[1][1].a_height || lowpass_width > s->plane[plane].band[1][1].a_width ||
  988. !highpass_stride || s->plane[plane].band[1][1].width > s->plane[plane].band[1][1].a_width ||
  989. lowpass_width < 3 || lowpass_height < 3) {
  990. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  991. ret = AVERROR(EINVAL);
  992. goto end;
  993. }
  994. av_log(avctx, AV_LOG_DEBUG, "Level 2 lowpass plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  995. low = s->plane[plane].l_h[7];
  996. high = s->plane[plane].subband[5];
  997. output = s->plane[plane].l_h[3];
  998. dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  999. low = s->plane[plane].subband[4];
  1000. high = s->plane[plane].subband[6];
  1001. output = s->plane[plane].l_h[4];
  1002. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1003. low = s->plane[plane].l_h[3];
  1004. high = s->plane[plane].l_h[4];
  1005. output = s->plane[plane].l_h[7];
  1006. dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
  1007. output = s->plane[plane].l_h[7];
  1008. for (i = 0; i < lowpass_height * 2; i++) {
  1009. for (j = 0; j < lowpass_width * 2; j++)
  1010. output[j] *= 4;
  1011. output += output_stride * 2;
  1012. }
  1013. low = s->plane[plane].subband[7];
  1014. high = s->plane[plane].subband[9];
  1015. output = s->plane[plane].l_h[3];
  1016. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1017. low = s->plane[plane].subband[8];
  1018. high = s->plane[plane].subband[10];
  1019. output = s->plane[plane].l_h[4];
  1020. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1021. low = s->plane[plane].l_h[3];
  1022. high = s->plane[plane].l_h[4];
  1023. output = s->plane[plane].l_h[9];
  1024. dsp->horiz_filter(output, output_stride, low, output_stride, high, output_stride, lowpass_width, lowpass_height * 2);
  1025. lowpass_height = s->plane[plane].band[4][1].height;
  1026. output_stride = s->plane[plane].band[4][1].a_width;
  1027. lowpass_width = s->plane[plane].band[4][1].width;
  1028. highpass_stride = s->plane[plane].band[4][1].stride;
  1029. av_log(avctx, AV_LOG_DEBUG, "temporal level %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
  1030. if (lowpass_height > s->plane[plane].band[4][1].a_height || lowpass_width > s->plane[plane].band[4][1].a_width ||
  1031. !highpass_stride || s->plane[plane].band[4][1].width > s->plane[plane].band[4][1].a_width ||
  1032. lowpass_width < 3 || lowpass_height < 3) {
  1033. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  1034. ret = AVERROR(EINVAL);
  1035. goto end;
  1036. }
  1037. low = s->plane[plane].l_h[7];
  1038. high = s->plane[plane].l_h[9];
  1039. output = s->plane[plane].l_h[7];
  1040. for (i = 0; i < lowpass_height; i++) {
  1041. inverse_temporal_filter(low, high, lowpass_width);
  1042. low += output_stride;
  1043. high += output_stride;
  1044. }
  1045. if (s->progressive) {
  1046. low = s->plane[plane].l_h[7];
  1047. high = s->plane[plane].subband[15];
  1048. output = s->plane[plane].l_h[6];
  1049. dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1050. low = s->plane[plane].subband[14];
  1051. high = s->plane[plane].subband[16];
  1052. output = s->plane[plane].l_h[7];
  1053. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1054. low = s->plane[plane].l_h[9];
  1055. high = s->plane[plane].subband[12];
  1056. output = s->plane[plane].l_h[8];
  1057. dsp->vert_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1058. low = s->plane[plane].subband[11];
  1059. high = s->plane[plane].subband[13];
  1060. output = s->plane[plane].l_h[9];
  1061. dsp->vert_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1062. if (s->sample_type == 1)
  1063. continue;
  1064. dst = (int16_t *)pic->data[act_plane];
  1065. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  1066. if (plane & 1)
  1067. dst++;
  1068. if (plane > 1)
  1069. dst += pic->linesize[act_plane] >> 1;
  1070. }
  1071. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16 &&
  1072. (lowpass_height * 2 > avctx->coded_height / 2 ||
  1073. lowpass_width * 2 > avctx->coded_width / 2 )
  1074. ) {
  1075. ret = AVERROR_INVALIDDATA;
  1076. goto end;
  1077. }
  1078. low = s->plane[plane].l_h[6];
  1079. high = s->plane[plane].l_h[7];
  1080. for (i = 0; i < s->plane[act_plane].height; i++) {
  1081. dsp->horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
  1082. low += output_stride;
  1083. high += output_stride;
  1084. dst += dst_linesize;
  1085. }
  1086. } else {
  1087. pic->interlaced_frame = 1;
  1088. low = s->plane[plane].l_h[7];
  1089. high = s->plane[plane].subband[14];
  1090. output = s->plane[plane].l_h[6];
  1091. dsp->horiz_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1092. low = s->plane[plane].subband[15];
  1093. high = s->plane[plane].subband[16];
  1094. output = s->plane[plane].l_h[7];
  1095. dsp->horiz_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1096. low = s->plane[plane].l_h[9];
  1097. high = s->plane[plane].subband[11];
  1098. output = s->plane[plane].l_h[8];
  1099. dsp->horiz_filter(output, output_stride, low, output_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1100. low = s->plane[plane].subband[12];
  1101. high = s->plane[plane].subband[13];
  1102. output = s->plane[plane].l_h[9];
  1103. dsp->horiz_filter(output, output_stride, low, highpass_stride, high, highpass_stride, lowpass_width, lowpass_height);
  1104. if (s->sample_type == 1)
  1105. continue;
  1106. dst = (int16_t *)pic->data[act_plane];
  1107. low = s->plane[plane].l_h[6];
  1108. high = s->plane[plane].l_h[7];
  1109. for (i = 0; i < s->plane[act_plane].height / 2; i++) {
  1110. interlaced_vertical_filter(dst, low, high, lowpass_width * 2, pic->linesize[act_plane]/2, act_plane);
  1111. low += output_stride * 2;
  1112. high += output_stride * 2;
  1113. dst += pic->linesize[act_plane];
  1114. }
  1115. }
  1116. }
  1117. }
  1118. if (s->transform_type == 2 && s->sample_type == 1) {
  1119. int16_t *low, *high, *dst;
  1120. int output_stride, lowpass_height, lowpass_width;
  1121. ptrdiff_t dst_linesize;
  1122. for (plane = 0; plane < s->planes; plane++) {
  1123. int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
  1124. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  1125. act_plane = 0;
  1126. dst_linesize = pic->linesize[act_plane];
  1127. } else {
  1128. dst_linesize = pic->linesize[act_plane] / 2;
  1129. }
  1130. lowpass_height = s->plane[plane].band[4][1].height;
  1131. output_stride = s->plane[plane].band[4][1].a_width;
  1132. lowpass_width = s->plane[plane].band[4][1].width;
  1133. if (lowpass_height > s->plane[plane].band[4][1].a_height || lowpass_width > s->plane[plane].band[4][1].a_width ||
  1134. s->plane[plane].band[4][1].width > s->plane[plane].band[4][1].a_width ||
  1135. lowpass_width < 3 || lowpass_height < 3) {
  1136. av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
  1137. ret = AVERROR(EINVAL);
  1138. goto end;
  1139. }
  1140. if (s->progressive) {
  1141. dst = (int16_t *)pic->data[act_plane];
  1142. low = s->plane[plane].l_h[8];
  1143. high = s->plane[plane].l_h[9];
  1144. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16) {
  1145. if (plane & 1)
  1146. dst++;
  1147. if (plane > 1)
  1148. dst += pic->linesize[act_plane] >> 1;
  1149. }
  1150. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16 &&
  1151. (lowpass_height * 2 > avctx->coded_height / 2 ||
  1152. lowpass_width * 2 > avctx->coded_width / 2 )
  1153. ) {
  1154. ret = AVERROR_INVALIDDATA;
  1155. goto end;
  1156. }
  1157. for (i = 0; i < s->plane[act_plane].height; i++) {
  1158. dsp->horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
  1159. low += output_stride;
  1160. high += output_stride;
  1161. dst += dst_linesize;
  1162. }
  1163. } else {
  1164. dst = (int16_t *)pic->data[act_plane];
  1165. low = s->plane[plane].l_h[8];
  1166. high = s->plane[plane].l_h[9];
  1167. for (i = 0; i < s->plane[act_plane].height / 2; i++) {
  1168. interlaced_vertical_filter(dst, low, high, lowpass_width * 2, pic->linesize[act_plane]/2, act_plane);
  1169. low += output_stride * 2;
  1170. high += output_stride * 2;
  1171. dst += pic->linesize[act_plane];
  1172. }
  1173. }
  1174. }
  1175. }
  1176. if (avctx->pix_fmt == AV_PIX_FMT_BAYER_RGGB16)
  1177. process_bayer(pic, s->bpc);
  1178. end:
  1179. if (ret < 0)
  1180. return ret;
  1181. *got_frame = 1;
  1182. return avpkt->size;
  1183. }
  1184. static av_cold int cfhd_close(AVCodecContext *avctx)
  1185. {
  1186. CFHDContext *s = avctx->priv_data;
  1187. free_buffers(s);
  1188. ff_free_vlc(&s->vlc_9);
  1189. ff_free_vlc(&s->vlc_18);
  1190. return 0;
  1191. }
  1192. #if HAVE_THREADS
  1193. static int update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
  1194. {
  1195. CFHDContext *psrc = src->priv_data;
  1196. CFHDContext *pdst = dst->priv_data;
  1197. int ret;
  1198. if (dst == src || psrc->transform_type == 0)
  1199. return 0;
  1200. pdst->a_format = psrc->a_format;
  1201. pdst->a_width = psrc->a_width;
  1202. pdst->a_height = psrc->a_height;
  1203. pdst->transform_type = psrc->transform_type;
  1204. pdst->progressive = psrc->progressive;
  1205. pdst->planes = psrc->planes;
  1206. if (!pdst->plane[0].idwt_buf) {
  1207. pdst->coded_width = pdst->a_width;
  1208. pdst->coded_height = pdst->a_height;
  1209. pdst->coded_format = pdst->a_format;
  1210. ret = alloc_buffers(dst);
  1211. if (ret < 0)
  1212. return ret;
  1213. }
  1214. for (int plane = 0; plane < pdst->planes; plane++) {
  1215. memcpy(pdst->plane[plane].band, psrc->plane[plane].band, sizeof(pdst->plane[plane].band));
  1216. memcpy(pdst->plane[plane].idwt_buf, psrc->plane[plane].idwt_buf,
  1217. pdst->plane[plane].idwt_size * sizeof(int16_t));
  1218. }
  1219. return 0;
  1220. }
  1221. #endif
  1222. AVCodec ff_cfhd_decoder = {
  1223. .name = "cfhd",
  1224. .long_name = NULL_IF_CONFIG_SMALL("GoPro CineForm HD"),
  1225. .type = AVMEDIA_TYPE_VIDEO,
  1226. .id = AV_CODEC_ID_CFHD,
  1227. .priv_data_size = sizeof(CFHDContext),
  1228. .init = cfhd_init,
  1229. .close = cfhd_close,
  1230. .decode = cfhd_decode,
  1231. .update_thread_context = ONLY_IF_THREADS_ENABLED(update_thread_context),
  1232. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS,
  1233. .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
  1234. };