You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1266 lines
46KB

  1. /*
  2. * huffyuv decoder
  3. *
  4. * Copyright (c) 2002-2014 Michael Niedermayer <michaelni@gmx.at>
  5. *
  6. * see http://www.pcisys.net/~melanson/codecs/huffyuv.txt for a description of
  7. * the algorithm used
  8. *
  9. * This file is part of FFmpeg.
  10. *
  11. * FFmpeg is free software; you can redistribute it and/or
  12. * modify it under the terms of the GNU Lesser General Public
  13. * License as published by the Free Software Foundation; either
  14. * version 2.1 of the License, or (at your option) any later version.
  15. *
  16. * FFmpeg is distributed in the hope that it will be useful,
  17. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  19. * Lesser General Public License for more details.
  20. *
  21. * You should have received a copy of the GNU Lesser General Public
  22. * License along with FFmpeg; if not, write to the Free Software
  23. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  24. *
  25. * yuva, gray, 4:4:4, 4:1:1, 4:1:0 and >8 bit per sample support sponsored by NOA
  26. */
  27. /**
  28. * @file
  29. * huffyuv decoder
  30. */
  31. #define UNCHECKED_BITSTREAM_READER 1
  32. #include "avcodec.h"
  33. #include "get_bits.h"
  34. #include "huffyuv.h"
  35. #include "huffyuvdsp.h"
  36. #include "thread.h"
  37. #include "libavutil/imgutils.h"
  38. #include "libavutil/pixdesc.h"
  39. #define classic_shift_luma_table_size 42
  40. static const unsigned char classic_shift_luma[classic_shift_luma_table_size + AV_INPUT_BUFFER_PADDING_SIZE] = {
  41. 34, 36, 35, 69, 135, 232, 9, 16, 10, 24, 11, 23, 12, 16, 13, 10,
  42. 14, 8, 15, 8, 16, 8, 17, 20, 16, 10, 207, 206, 205, 236, 11, 8,
  43. 10, 21, 9, 23, 8, 8, 199, 70, 69, 68, 0,
  44. 0,0,0,0,0,0,0,0,
  45. };
  46. #define classic_shift_chroma_table_size 59
  47. static const unsigned char classic_shift_chroma[classic_shift_chroma_table_size + AV_INPUT_BUFFER_PADDING_SIZE] = {
  48. 66, 36, 37, 38, 39, 40, 41, 75, 76, 77, 110, 239, 144, 81, 82, 83,
  49. 84, 85, 118, 183, 56, 57, 88, 89, 56, 89, 154, 57, 58, 57, 26, 141,
  50. 57, 56, 58, 57, 58, 57, 184, 119, 214, 245, 116, 83, 82, 49, 80, 79,
  51. 78, 77, 44, 75, 41, 40, 39, 38, 37, 36, 34, 0,
  52. 0,0,0,0,0,0,0,0,
  53. };
  54. static const unsigned char classic_add_luma[256] = {
  55. 3, 9, 5, 12, 10, 35, 32, 29, 27, 50, 48, 45, 44, 41, 39, 37,
  56. 73, 70, 68, 65, 64, 61, 58, 56, 53, 50, 49, 46, 44, 41, 38, 36,
  57. 68, 65, 63, 61, 58, 55, 53, 51, 48, 46, 45, 43, 41, 39, 38, 36,
  58. 35, 33, 32, 30, 29, 27, 26, 25, 48, 47, 46, 44, 43, 41, 40, 39,
  59. 37, 36, 35, 34, 32, 31, 30, 28, 27, 26, 24, 23, 22, 20, 19, 37,
  60. 35, 34, 33, 31, 30, 29, 27, 26, 24, 23, 21, 20, 18, 17, 15, 29,
  61. 27, 26, 24, 22, 21, 19, 17, 16, 14, 26, 25, 23, 21, 19, 18, 16,
  62. 15, 27, 25, 23, 21, 19, 17, 16, 14, 26, 25, 23, 21, 18, 17, 14,
  63. 12, 17, 19, 13, 4, 9, 2, 11, 1, 7, 8, 0, 16, 3, 14, 6,
  64. 12, 10, 5, 15, 18, 11, 10, 13, 15, 16, 19, 20, 22, 24, 27, 15,
  65. 18, 20, 22, 24, 26, 14, 17, 20, 22, 24, 27, 15, 18, 20, 23, 25,
  66. 28, 16, 19, 22, 25, 28, 32, 36, 21, 25, 29, 33, 38, 42, 45, 49,
  67. 28, 31, 34, 37, 40, 42, 44, 47, 49, 50, 52, 54, 56, 57, 59, 60,
  68. 62, 64, 66, 67, 69, 35, 37, 39, 40, 42, 43, 45, 47, 48, 51, 52,
  69. 54, 55, 57, 59, 60, 62, 63, 66, 67, 69, 71, 72, 38, 40, 42, 43,
  70. 46, 47, 49, 51, 26, 28, 30, 31, 33, 34, 18, 19, 11, 13, 7, 8,
  71. };
  72. static const unsigned char classic_add_chroma[256] = {
  73. 3, 1, 2, 2, 2, 2, 3, 3, 7, 5, 7, 5, 8, 6, 11, 9,
  74. 7, 13, 11, 10, 9, 8, 7, 5, 9, 7, 6, 4, 7, 5, 8, 7,
  75. 11, 8, 13, 11, 19, 15, 22, 23, 20, 33, 32, 28, 27, 29, 51, 77,
  76. 43, 45, 76, 81, 46, 82, 75, 55, 56, 144, 58, 80, 60, 74, 147, 63,
  77. 143, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
  78. 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 27, 30, 21, 22,
  79. 17, 14, 5, 6, 100, 54, 47, 50, 51, 53, 106, 107, 108, 109, 110, 111,
  80. 112, 113, 114, 115, 4, 117, 118, 92, 94, 121, 122, 3, 124, 103, 2, 1,
  81. 0, 129, 130, 131, 120, 119, 126, 125, 136, 137, 138, 139, 140, 141, 142, 134,
  82. 135, 132, 133, 104, 64, 101, 62, 57, 102, 95, 93, 59, 61, 28, 97, 96,
  83. 52, 49, 48, 29, 32, 25, 24, 46, 23, 98, 45, 44, 43, 20, 42, 41,
  84. 19, 18, 99, 40, 15, 39, 38, 16, 13, 12, 11, 37, 10, 9, 8, 36,
  85. 7, 128, 127, 105, 123, 116, 35, 34, 33, 145, 31, 79, 42, 146, 78, 26,
  86. 83, 48, 49, 50, 44, 47, 26, 31, 30, 18, 17, 19, 21, 24, 25, 13,
  87. 14, 16, 17, 18, 20, 21, 12, 14, 15, 9, 10, 6, 9, 6, 5, 8,
  88. 6, 12, 8, 10, 7, 9, 6, 4, 6, 2, 2, 3, 3, 3, 3, 2,
  89. };
  90. static int read_len_table(uint8_t *dst, GetBitContext *gb, int n)
  91. {
  92. int i, val, repeat;
  93. for (i = 0; i < n;) {
  94. repeat = get_bits(gb, 3);
  95. val = get_bits(gb, 5);
  96. if (repeat == 0)
  97. repeat = get_bits(gb, 8);
  98. if (i + repeat > n || get_bits_left(gb) < 0) {
  99. av_log(NULL, AV_LOG_ERROR, "Error reading huffman table\n");
  100. return AVERROR_INVALIDDATA;
  101. }
  102. while (repeat--)
  103. dst[i++] = val;
  104. }
  105. return 0;
  106. }
  107. static int generate_joint_tables(HYuvContext *s)
  108. {
  109. int ret;
  110. uint16_t *symbols = av_mallocz(5 << VLC_BITS);
  111. uint16_t *bits;
  112. uint8_t *len;
  113. if (!symbols)
  114. return AVERROR(ENOMEM);
  115. bits = symbols + (1 << VLC_BITS);
  116. len = (uint8_t *)(bits + (1 << VLC_BITS));
  117. if (s->bitstream_bpp < 24 || s->version > 2) {
  118. int p, i, y, u;
  119. for (p = 0; p < 4; p++) {
  120. int p0 = s->version > 2 ? p : 0;
  121. for (i = y = 0; y < s->vlc_n; y++) {
  122. int len0 = s->len[p0][y];
  123. int limit = VLC_BITS - len0;
  124. if (limit <= 0 || !len0)
  125. continue;
  126. if ((sign_extend(y, 8) & (s->vlc_n-1)) != y)
  127. continue;
  128. for (u = 0; u < s->vlc_n; u++) {
  129. int len1 = s->len[p][u];
  130. if (len1 > limit || !len1)
  131. continue;
  132. if ((sign_extend(u, 8) & (s->vlc_n-1)) != u)
  133. continue;
  134. av_assert0(i < (1 << VLC_BITS));
  135. len[i] = len0 + len1;
  136. bits[i] = (s->bits[p0][y] << len1) + s->bits[p][u];
  137. symbols[i] = (y << 8) + (u & 0xFF);
  138. i++;
  139. }
  140. }
  141. ff_free_vlc(&s->vlc[4 + p]);
  142. if ((ret = ff_init_vlc_sparse(&s->vlc[4 + p], VLC_BITS, i, len, 1, 1,
  143. bits, 2, 2, symbols, 2, 2, 0)) < 0)
  144. goto out;
  145. }
  146. } else {
  147. uint8_t (*map)[4] = (uint8_t(*)[4]) s->pix_bgr_map;
  148. int i, b, g, r, code;
  149. int p0 = s->decorrelate;
  150. int p1 = !s->decorrelate;
  151. /* Restrict the range to +/-16 because that's pretty much guaranteed
  152. * to cover all the combinations that fit in 11 bits total, and it
  153. * does not matter if we miss a few rare codes. */
  154. for (i = 0, g = -16; g < 16; g++) {
  155. int len0 = s->len[p0][g & 255];
  156. int limit0 = VLC_BITS - len0;
  157. if (limit0 < 2 || !len0)
  158. continue;
  159. for (b = -16; b < 16; b++) {
  160. int len1 = s->len[p1][b & 255];
  161. int limit1 = limit0 - len1;
  162. if (limit1 < 1 || !len1)
  163. continue;
  164. code = (s->bits[p0][g & 255] << len1) + s->bits[p1][b & 255];
  165. for (r = -16; r < 16; r++) {
  166. int len2 = s->len[2][r & 255];
  167. if (len2 > limit1 || !len2)
  168. continue;
  169. av_assert0(i < (1 << VLC_BITS));
  170. len[i] = len0 + len1 + len2;
  171. bits[i] = (code << len2) + s->bits[2][r & 255];
  172. if (s->decorrelate) {
  173. map[i][G] = g;
  174. map[i][B] = g + b;
  175. map[i][R] = g + r;
  176. } else {
  177. map[i][B] = g;
  178. map[i][G] = b;
  179. map[i][R] = r;
  180. }
  181. i++;
  182. }
  183. }
  184. }
  185. ff_free_vlc(&s->vlc[4]);
  186. if ((ret = init_vlc(&s->vlc[4], VLC_BITS, i, len, 1, 1,
  187. bits, 2, 2, 0)) < 0)
  188. goto out;
  189. }
  190. ret = 0;
  191. out:
  192. av_freep(&symbols);
  193. return ret;
  194. }
  195. static int read_huffman_tables(HYuvContext *s, const uint8_t *src, int length)
  196. {
  197. GetBitContext gb;
  198. int i, ret;
  199. int count = 3;
  200. if ((ret = init_get_bits(&gb, src, length * 8)) < 0)
  201. return ret;
  202. if (s->version > 2)
  203. count = 1 + s->alpha + 2*s->chroma;
  204. for (i = 0; i < count; i++) {
  205. if ((ret = read_len_table(s->len[i], &gb, s->vlc_n)) < 0)
  206. return ret;
  207. if ((ret = ff_huffyuv_generate_bits_table(s->bits[i], s->len[i], s->vlc_n)) < 0)
  208. return ret;
  209. ff_free_vlc(&s->vlc[i]);
  210. if ((ret = init_vlc(&s->vlc[i], VLC_BITS, s->vlc_n, s->len[i], 1, 1,
  211. s->bits[i], 4, 4, 0)) < 0)
  212. return ret;
  213. }
  214. if ((ret = generate_joint_tables(s)) < 0)
  215. return ret;
  216. return (get_bits_count(&gb) + 7) / 8;
  217. }
  218. static int read_old_huffman_tables(HYuvContext *s)
  219. {
  220. GetBitContext gb;
  221. int i, ret;
  222. init_get_bits(&gb, classic_shift_luma,
  223. classic_shift_luma_table_size * 8);
  224. if ((ret = read_len_table(s->len[0], &gb, 256)) < 0)
  225. return ret;
  226. init_get_bits(&gb, classic_shift_chroma,
  227. classic_shift_chroma_table_size * 8);
  228. if ((ret = read_len_table(s->len[1], &gb, 256)) < 0)
  229. return ret;
  230. for (i = 0; i < 256; i++)
  231. s->bits[0][i] = classic_add_luma[i];
  232. for (i = 0; i < 256; i++)
  233. s->bits[1][i] = classic_add_chroma[i];
  234. if (s->bitstream_bpp >= 24) {
  235. memcpy(s->bits[1], s->bits[0], 256 * sizeof(uint32_t));
  236. memcpy(s->len[1], s->len[0], 256 * sizeof(uint8_t));
  237. }
  238. memcpy(s->bits[2], s->bits[1], 256 * sizeof(uint32_t));
  239. memcpy(s->len[2], s->len[1], 256 * sizeof(uint8_t));
  240. for (i = 0; i < 4; i++) {
  241. ff_free_vlc(&s->vlc[i]);
  242. if ((ret = init_vlc(&s->vlc[i], VLC_BITS, 256, s->len[i], 1, 1,
  243. s->bits[i], 4, 4, 0)) < 0)
  244. return ret;
  245. }
  246. if ((ret = generate_joint_tables(s)) < 0)
  247. return ret;
  248. return 0;
  249. }
  250. static av_cold int decode_end(AVCodecContext *avctx)
  251. {
  252. HYuvContext *s = avctx->priv_data;
  253. int i;
  254. ff_huffyuv_common_end(s);
  255. av_freep(&s->bitstream_buffer);
  256. for (i = 0; i < 8; i++)
  257. ff_free_vlc(&s->vlc[i]);
  258. return 0;
  259. }
  260. static av_cold int decode_init(AVCodecContext *avctx)
  261. {
  262. HYuvContext *s = avctx->priv_data;
  263. int ret;
  264. ret = av_image_check_size(avctx->width, avctx->height, 0, avctx);
  265. if (ret < 0)
  266. return ret;
  267. ff_huffyuvdsp_init(&s->hdsp);
  268. memset(s->vlc, 0, 4 * sizeof(VLC));
  269. s->interlaced = avctx->height > 288;
  270. s->bgr32 = 1;
  271. if (avctx->extradata_size) {
  272. if ((avctx->bits_per_coded_sample & 7) &&
  273. avctx->bits_per_coded_sample != 12)
  274. s->version = 1; // do such files exist at all?
  275. else if (avctx->extradata_size > 3 && avctx->extradata[3] == 0)
  276. s->version = 2;
  277. else
  278. s->version = 3;
  279. } else
  280. s->version = 0;
  281. s->bps = 8;
  282. s->n = 1<<s->bps;
  283. s->vlc_n = FFMIN(s->n, MAX_VLC_N);
  284. s->chroma = 1;
  285. if (s->version >= 2) {
  286. int method, interlace;
  287. if (avctx->extradata_size < 4)
  288. return AVERROR_INVALIDDATA;
  289. method = avctx->extradata[0];
  290. s->decorrelate = method & 64 ? 1 : 0;
  291. s->predictor = method & 63;
  292. if (s->version == 2) {
  293. s->bitstream_bpp = avctx->extradata[1];
  294. if (s->bitstream_bpp == 0)
  295. s->bitstream_bpp = avctx->bits_per_coded_sample & ~7;
  296. } else {
  297. s->bps = (avctx->extradata[1] >> 4) + 1;
  298. s->n = 1<<s->bps;
  299. s->vlc_n = FFMIN(s->n, MAX_VLC_N);
  300. s->chroma_h_shift = avctx->extradata[1] & 3;
  301. s->chroma_v_shift = (avctx->extradata[1] >> 2) & 3;
  302. s->yuv = !!(avctx->extradata[2] & 1);
  303. s->chroma= !!(avctx->extradata[2] & 3);
  304. s->alpha = !!(avctx->extradata[2] & 4);
  305. }
  306. interlace = (avctx->extradata[2] & 0x30) >> 4;
  307. s->interlaced = (interlace == 1) ? 1 : (interlace == 2) ? 0 : s->interlaced;
  308. s->context = avctx->extradata[2] & 0x40 ? 1 : 0;
  309. if ((ret = read_huffman_tables(s, avctx->extradata + 4,
  310. avctx->extradata_size - 4)) < 0)
  311. goto error;
  312. } else {
  313. switch (avctx->bits_per_coded_sample & 7) {
  314. case 1:
  315. s->predictor = LEFT;
  316. s->decorrelate = 0;
  317. break;
  318. case 2:
  319. s->predictor = LEFT;
  320. s->decorrelate = 1;
  321. break;
  322. case 3:
  323. s->predictor = PLANE;
  324. s->decorrelate = avctx->bits_per_coded_sample >= 24;
  325. break;
  326. case 4:
  327. s->predictor = MEDIAN;
  328. s->decorrelate = 0;
  329. break;
  330. default:
  331. s->predictor = LEFT; // OLD
  332. s->decorrelate = 0;
  333. break;
  334. }
  335. s->bitstream_bpp = avctx->bits_per_coded_sample & ~7;
  336. s->context = 0;
  337. if ((ret = read_old_huffman_tables(s)) < 0)
  338. goto error;
  339. }
  340. if (s->version <= 2) {
  341. switch (s->bitstream_bpp) {
  342. case 12:
  343. avctx->pix_fmt = AV_PIX_FMT_YUV420P;
  344. s->yuv = 1;
  345. break;
  346. case 16:
  347. if (s->yuy2)
  348. avctx->pix_fmt = AV_PIX_FMT_YUYV422;
  349. else
  350. avctx->pix_fmt = AV_PIX_FMT_YUV422P;
  351. s->yuv = 1;
  352. break;
  353. case 24:
  354. if (s->bgr32)
  355. avctx->pix_fmt = AV_PIX_FMT_0RGB32;
  356. else
  357. avctx->pix_fmt = AV_PIX_FMT_BGR24;
  358. break;
  359. case 32:
  360. av_assert0(s->bgr32);
  361. avctx->pix_fmt = AV_PIX_FMT_RGB32;
  362. s->alpha = 1;
  363. break;
  364. default:
  365. ret = AVERROR_INVALIDDATA;
  366. goto error;
  367. }
  368. av_pix_fmt_get_chroma_sub_sample(avctx->pix_fmt,
  369. &s->chroma_h_shift,
  370. &s->chroma_v_shift);
  371. } else {
  372. switch ( (s->chroma<<10) | (s->yuv<<9) | (s->alpha<<8) | ((s->bps-1)<<4) | s->chroma_h_shift | (s->chroma_v_shift<<2)) {
  373. case 0x070:
  374. avctx->pix_fmt = AV_PIX_FMT_GRAY8;
  375. break;
  376. case 0x0F0:
  377. avctx->pix_fmt = AV_PIX_FMT_GRAY16;
  378. break;
  379. case 0x170:
  380. avctx->pix_fmt = AV_PIX_FMT_GRAY8A;
  381. break;
  382. case 0x470:
  383. avctx->pix_fmt = AV_PIX_FMT_GBRP;
  384. break;
  385. case 0x480:
  386. avctx->pix_fmt = AV_PIX_FMT_GBRP9;
  387. break;
  388. case 0x490:
  389. avctx->pix_fmt = AV_PIX_FMT_GBRP10;
  390. break;
  391. case 0x4B0:
  392. avctx->pix_fmt = AV_PIX_FMT_GBRP12;
  393. break;
  394. case 0x4D0:
  395. avctx->pix_fmt = AV_PIX_FMT_GBRP14;
  396. break;
  397. case 0x4F0:
  398. avctx->pix_fmt = AV_PIX_FMT_GBRP16;
  399. break;
  400. case 0x570:
  401. avctx->pix_fmt = AV_PIX_FMT_GBRAP;
  402. break;
  403. case 0x670:
  404. avctx->pix_fmt = AV_PIX_FMT_YUV444P;
  405. break;
  406. case 0x680:
  407. avctx->pix_fmt = AV_PIX_FMT_YUV444P9;
  408. break;
  409. case 0x690:
  410. avctx->pix_fmt = AV_PIX_FMT_YUV444P10;
  411. break;
  412. case 0x6B0:
  413. avctx->pix_fmt = AV_PIX_FMT_YUV444P12;
  414. break;
  415. case 0x6D0:
  416. avctx->pix_fmt = AV_PIX_FMT_YUV444P14;
  417. break;
  418. case 0x6F0:
  419. avctx->pix_fmt = AV_PIX_FMT_YUV444P16;
  420. break;
  421. case 0x671:
  422. avctx->pix_fmt = AV_PIX_FMT_YUV422P;
  423. break;
  424. case 0x681:
  425. avctx->pix_fmt = AV_PIX_FMT_YUV422P9;
  426. break;
  427. case 0x691:
  428. avctx->pix_fmt = AV_PIX_FMT_YUV422P10;
  429. break;
  430. case 0x6B1:
  431. avctx->pix_fmt = AV_PIX_FMT_YUV422P12;
  432. break;
  433. case 0x6D1:
  434. avctx->pix_fmt = AV_PIX_FMT_YUV422P14;
  435. break;
  436. case 0x6F1:
  437. avctx->pix_fmt = AV_PIX_FMT_YUV422P16;
  438. break;
  439. case 0x672:
  440. avctx->pix_fmt = AV_PIX_FMT_YUV411P;
  441. break;
  442. case 0x674:
  443. avctx->pix_fmt = AV_PIX_FMT_YUV440P;
  444. break;
  445. case 0x675:
  446. avctx->pix_fmt = AV_PIX_FMT_YUV420P;
  447. break;
  448. case 0x685:
  449. avctx->pix_fmt = AV_PIX_FMT_YUV420P9;
  450. break;
  451. case 0x695:
  452. avctx->pix_fmt = AV_PIX_FMT_YUV420P10;
  453. break;
  454. case 0x6B5:
  455. avctx->pix_fmt = AV_PIX_FMT_YUV420P12;
  456. break;
  457. case 0x6D5:
  458. avctx->pix_fmt = AV_PIX_FMT_YUV420P14;
  459. break;
  460. case 0x6F5:
  461. avctx->pix_fmt = AV_PIX_FMT_YUV420P16;
  462. break;
  463. case 0x67A:
  464. avctx->pix_fmt = AV_PIX_FMT_YUV410P;
  465. break;
  466. case 0x770:
  467. avctx->pix_fmt = AV_PIX_FMT_YUVA444P;
  468. break;
  469. case 0x780:
  470. avctx->pix_fmt = AV_PIX_FMT_YUVA444P9;
  471. break;
  472. case 0x790:
  473. avctx->pix_fmt = AV_PIX_FMT_YUVA444P10;
  474. break;
  475. case 0x7F0:
  476. avctx->pix_fmt = AV_PIX_FMT_YUVA444P16;
  477. break;
  478. case 0x771:
  479. avctx->pix_fmt = AV_PIX_FMT_YUVA422P;
  480. break;
  481. case 0x781:
  482. avctx->pix_fmt = AV_PIX_FMT_YUVA422P9;
  483. break;
  484. case 0x791:
  485. avctx->pix_fmt = AV_PIX_FMT_YUVA422P10;
  486. break;
  487. case 0x7F1:
  488. avctx->pix_fmt = AV_PIX_FMT_YUVA422P16;
  489. break;
  490. case 0x775:
  491. avctx->pix_fmt = AV_PIX_FMT_YUVA420P;
  492. break;
  493. case 0x785:
  494. avctx->pix_fmt = AV_PIX_FMT_YUVA420P9;
  495. break;
  496. case 0x795:
  497. avctx->pix_fmt = AV_PIX_FMT_YUVA420P10;
  498. break;
  499. case 0x7F5:
  500. avctx->pix_fmt = AV_PIX_FMT_YUVA420P16;
  501. break;
  502. default:
  503. ret = AVERROR_INVALIDDATA;
  504. goto error;
  505. }
  506. }
  507. ff_huffyuv_common_init(avctx);
  508. if ((avctx->pix_fmt == AV_PIX_FMT_YUV422P || avctx->pix_fmt == AV_PIX_FMT_YUV420P) && avctx->width & 1) {
  509. av_log(avctx, AV_LOG_ERROR, "width must be even for this colorspace\n");
  510. ret = AVERROR_INVALIDDATA;
  511. goto error;
  512. }
  513. if (s->predictor == MEDIAN && avctx->pix_fmt == AV_PIX_FMT_YUV422P &&
  514. avctx->width % 4) {
  515. av_log(avctx, AV_LOG_ERROR, "width must be a multiple of 4 "
  516. "for this combination of colorspace and predictor type.\n");
  517. ret = AVERROR_INVALIDDATA;
  518. goto error;
  519. }
  520. if ((ret = ff_huffyuv_alloc_temp(s)) < 0) {
  521. ff_huffyuv_common_end(s);
  522. goto error;
  523. }
  524. return 0;
  525. error:
  526. decode_end(avctx);
  527. return ret;
  528. }
  529. static av_cold int decode_init_thread_copy(AVCodecContext *avctx)
  530. {
  531. HYuvContext *s = avctx->priv_data;
  532. int i, ret;
  533. if ((ret = ff_huffyuv_alloc_temp(s)) < 0) {
  534. ff_huffyuv_common_end(s);
  535. return ret;
  536. }
  537. for (i = 0; i < 8; i++)
  538. s->vlc[i].table = NULL;
  539. if (s->version >= 2) {
  540. if ((ret = read_huffman_tables(s, avctx->extradata + 4,
  541. avctx->extradata_size)) < 0)
  542. return ret;
  543. } else {
  544. if ((ret = read_old_huffman_tables(s)) < 0)
  545. return ret;
  546. }
  547. return 0;
  548. }
  549. /** Subset of GET_VLC for use in hand-roller VLC code */
  550. #define VLC_INTERN(dst, table, gb, name, bits, max_depth) \
  551. code = table[index][0]; \
  552. n = table[index][1]; \
  553. if (max_depth > 1 && n < 0) { \
  554. LAST_SKIP_BITS(name, gb, bits); \
  555. UPDATE_CACHE(name, gb); \
  556. \
  557. nb_bits = -n; \
  558. index = SHOW_UBITS(name, gb, nb_bits) + code; \
  559. code = table[index][0]; \
  560. n = table[index][1]; \
  561. if (max_depth > 2 && n < 0) { \
  562. LAST_SKIP_BITS(name, gb, nb_bits); \
  563. UPDATE_CACHE(name, gb); \
  564. \
  565. nb_bits = -n; \
  566. index = SHOW_UBITS(name, gb, nb_bits) + code; \
  567. code = table[index][0]; \
  568. n = table[index][1]; \
  569. } \
  570. } \
  571. dst = code; \
  572. LAST_SKIP_BITS(name, gb, n)
  573. #define GET_VLC_DUAL(dst0, dst1, name, gb, dtable, table1, table2, \
  574. bits, max_depth, OP) \
  575. do { \
  576. unsigned int index = SHOW_UBITS(name, gb, bits); \
  577. int code, n = dtable[index][1]; \
  578. \
  579. if (n<=0) { \
  580. int nb_bits; \
  581. VLC_INTERN(dst0, table1, gb, name, bits, max_depth); \
  582. \
  583. UPDATE_CACHE(re, gb); \
  584. index = SHOW_UBITS(name, gb, bits); \
  585. VLC_INTERN(dst1, table2, gb, name, bits, max_depth); \
  586. } else { \
  587. code = dtable[index][0]; \
  588. OP(dst0, dst1, code); \
  589. LAST_SKIP_BITS(name, gb, n); \
  590. } \
  591. } while (0)
  592. #define OP8bits(dst0, dst1, code) dst0 = code>>8; dst1 = code
  593. #define READ_2PIX(dst0, dst1, plane1) \
  594. UPDATE_CACHE(re, &s->gb); \
  595. GET_VLC_DUAL(dst0, dst1, re, &s->gb, s->vlc[4+plane1].table, \
  596. s->vlc[0].table, s->vlc[plane1].table, VLC_BITS, 3, OP8bits)
  597. static void decode_422_bitstream(HYuvContext *s, int count)
  598. {
  599. int i, icount;
  600. OPEN_READER(re, &s->gb);
  601. count /= 2;
  602. icount = get_bits_left(&s->gb) / (32 * 4);
  603. if (count >= icount) {
  604. for (i = 0; i < icount; i++) {
  605. READ_2PIX(s->temp[0][2 * i], s->temp[1][i], 1);
  606. READ_2PIX(s->temp[0][2 * i + 1], s->temp[2][i], 2);
  607. }
  608. for (; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
  609. READ_2PIX(s->temp[0][2 * i ], s->temp[1][i], 1);
  610. if (BITS_LEFT(re, &s->gb) <= 0) break;
  611. READ_2PIX(s->temp[0][2 * i + 1], s->temp[2][i], 2);
  612. }
  613. for (; i < count; i++)
  614. s->temp[0][2 * i ] = s->temp[1][i] =
  615. s->temp[0][2 * i + 1] = s->temp[2][i] = 0;
  616. } else {
  617. for (i = 0; i < count; i++) {
  618. READ_2PIX(s->temp[0][2 * i], s->temp[1][i], 1);
  619. READ_2PIX(s->temp[0][2 * i + 1], s->temp[2][i], 2);
  620. }
  621. }
  622. CLOSE_READER(re, &s->gb);
  623. }
  624. #define READ_2PIX_PLANE(dst0, dst1, plane, OP) \
  625. UPDATE_CACHE(re, &s->gb); \
  626. GET_VLC_DUAL(dst0, dst1, re, &s->gb, s->vlc[4+plane].table, \
  627. s->vlc[plane].table, s->vlc[plane].table, VLC_BITS, 3, OP)
  628. #define OP14bits(dst0, dst1, code) dst0 = code>>8; dst1 = sign_extend(code, 8)
  629. /* TODO instead of restarting the read when the code isn't in the first level
  630. * of the joint table, jump into the 2nd level of the individual table. */
  631. #define READ_2PIX_PLANE16(dst0, dst1, plane){\
  632. dst0 = get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3)<<2;\
  633. dst0 += get_bits(&s->gb, 2);\
  634. dst1 = get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3)<<2;\
  635. dst1 += get_bits(&s->gb, 2);\
  636. }
  637. static void decode_plane_bitstream(HYuvContext *s, int width, int plane)
  638. {
  639. int i, count = width/2;
  640. if (s->bps <= 8) {
  641. OPEN_READER(re, &s->gb);
  642. if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
  643. for (i = 0; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
  644. READ_2PIX_PLANE(s->temp[0][2 * i], s->temp[0][2 * i + 1], plane, OP8bits);
  645. }
  646. } else {
  647. for(i=0; i<count; i++){
  648. READ_2PIX_PLANE(s->temp[0][2 * i], s->temp[0][2 * i + 1], plane, OP8bits);
  649. }
  650. }
  651. if( width&1 && BITS_LEFT(re, &s->gb)>0 ) {
  652. unsigned int index;
  653. int nb_bits, code, n;
  654. UPDATE_CACHE(re, &s->gb);
  655. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  656. VLC_INTERN(s->temp[0][width-1], s->vlc[plane].table,
  657. &s->gb, re, VLC_BITS, 3);
  658. }
  659. CLOSE_READER(re, &s->gb);
  660. } else if (s->bps <= 14) {
  661. OPEN_READER(re, &s->gb);
  662. if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
  663. for (i = 0; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
  664. READ_2PIX_PLANE(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane, OP14bits);
  665. }
  666. } else {
  667. for(i=0; i<count; i++){
  668. READ_2PIX_PLANE(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane, OP14bits);
  669. }
  670. }
  671. if( width&1 && BITS_LEFT(re, &s->gb)>0 ) {
  672. unsigned int index;
  673. int nb_bits, code, n;
  674. UPDATE_CACHE(re, &s->gb);
  675. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  676. VLC_INTERN(s->temp16[0][width-1], s->vlc[plane].table,
  677. &s->gb, re, VLC_BITS, 3);
  678. }
  679. CLOSE_READER(re, &s->gb);
  680. } else {
  681. if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
  682. for (i = 0; i < count && get_bits_left(&s->gb) > 0; i++) {
  683. READ_2PIX_PLANE16(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane);
  684. }
  685. } else {
  686. for(i=0; i<count; i++){
  687. READ_2PIX_PLANE16(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane);
  688. }
  689. }
  690. if( width&1 && get_bits_left(&s->gb)>0 ) {
  691. int dst = get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3)<<2;
  692. s->temp16[0][width-1] = dst + get_bits(&s->gb, 2);
  693. }
  694. }
  695. }
  696. static void decode_gray_bitstream(HYuvContext *s, int count)
  697. {
  698. int i;
  699. OPEN_READER(re, &s->gb);
  700. count /= 2;
  701. if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
  702. for (i = 0; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
  703. READ_2PIX(s->temp[0][2 * i], s->temp[0][2 * i + 1], 0);
  704. }
  705. } else {
  706. for (i = 0; i < count; i++) {
  707. READ_2PIX(s->temp[0][2 * i], s->temp[0][2 * i + 1], 0);
  708. }
  709. }
  710. CLOSE_READER(re, &s->gb);
  711. }
  712. static av_always_inline void decode_bgr_1(HYuvContext *s, int count,
  713. int decorrelate, int alpha)
  714. {
  715. int i;
  716. OPEN_READER(re, &s->gb);
  717. for (i = 0; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
  718. unsigned int index;
  719. int code, n, nb_bits;
  720. UPDATE_CACHE(re, &s->gb);
  721. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  722. n = s->vlc[4].table[index][1];
  723. if (n>0) {
  724. code = s->vlc[4].table[index][0];
  725. *(uint32_t *) &s->temp[0][4 * i] = s->pix_bgr_map[code];
  726. LAST_SKIP_BITS(re, &s->gb, n);
  727. } else {
  728. if (decorrelate) {
  729. VLC_INTERN(s->temp[0][4 * i + G], s->vlc[1].table,
  730. &s->gb, re, VLC_BITS, 3);
  731. UPDATE_CACHE(re, &s->gb);
  732. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  733. VLC_INTERN(code, s->vlc[0].table, &s->gb, re, VLC_BITS, 3);
  734. s->temp[0][4 * i + B] = code + s->temp[0][4 * i + G];
  735. UPDATE_CACHE(re, &s->gb);
  736. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  737. VLC_INTERN(code, s->vlc[2].table, &s->gb, re, VLC_BITS, 3);
  738. s->temp[0][4 * i + R] = code + s->temp[0][4 * i + G];
  739. } else {
  740. VLC_INTERN(s->temp[0][4 * i + B], s->vlc[0].table,
  741. &s->gb, re, VLC_BITS, 3);
  742. UPDATE_CACHE(re, &s->gb);
  743. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  744. VLC_INTERN(s->temp[0][4 * i + G], s->vlc[1].table,
  745. &s->gb, re, VLC_BITS, 3);
  746. UPDATE_CACHE(re, &s->gb);
  747. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  748. VLC_INTERN(s->temp[0][4 * i + R], s->vlc[2].table,
  749. &s->gb, re, VLC_BITS, 3);
  750. }
  751. }
  752. if (alpha) {
  753. UPDATE_CACHE(re, &s->gb);
  754. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  755. VLC_INTERN(s->temp[0][4 * i + A], s->vlc[2].table,
  756. &s->gb, re, VLC_BITS, 3);
  757. } else
  758. s->temp[0][4 * i + A] = 0;
  759. }
  760. CLOSE_READER(re, &s->gb);
  761. }
  762. static void decode_bgr_bitstream(HYuvContext *s, int count)
  763. {
  764. if (s->decorrelate) {
  765. if (s->bitstream_bpp == 24)
  766. decode_bgr_1(s, count, 1, 0);
  767. else
  768. decode_bgr_1(s, count, 1, 1);
  769. } else {
  770. if (s->bitstream_bpp == 24)
  771. decode_bgr_1(s, count, 0, 0);
  772. else
  773. decode_bgr_1(s, count, 0, 1);
  774. }
  775. }
  776. static void draw_slice(HYuvContext *s, AVFrame *frame, int y)
  777. {
  778. int h, cy, i;
  779. int offset[AV_NUM_DATA_POINTERS];
  780. if (!s->avctx->draw_horiz_band)
  781. return;
  782. h = y - s->last_slice_end;
  783. y -= h;
  784. if (s->bitstream_bpp == 12)
  785. cy = y >> 1;
  786. else
  787. cy = y;
  788. offset[0] = frame->linesize[0] * y;
  789. offset[1] = frame->linesize[1] * cy;
  790. offset[2] = frame->linesize[2] * cy;
  791. for (i = 3; i < AV_NUM_DATA_POINTERS; i++)
  792. offset[i] = 0;
  793. emms_c();
  794. s->avctx->draw_horiz_band(s->avctx, frame, offset, y, 3, h);
  795. s->last_slice_end = y + h;
  796. }
  797. static int left_prediction(HYuvContext *s, uint8_t *dst, const uint8_t *src, int w, int acc)
  798. {
  799. if (s->bps <= 8) {
  800. return s->hdsp.add_hfyu_left_pred(dst, src, w, acc);
  801. } else {
  802. return s->llviddsp.add_hfyu_left_pred_int16(( uint16_t *)dst, (const uint16_t *)src, s->n-1, w, acc);
  803. }
  804. }
  805. static void add_bytes(HYuvContext *s, uint8_t *dst, uint8_t *src, int w)
  806. {
  807. if (s->bps <= 8) {
  808. s->hdsp.add_bytes(dst, src, w);
  809. } else {
  810. s->llviddsp.add_int16((uint16_t*)dst, (const uint16_t*)src, s->n - 1, w);
  811. }
  812. }
  813. static void add_median_prediction(HYuvContext *s, uint8_t *dst, const uint8_t *src, const uint8_t *diff, int w, int *left, int *left_top)
  814. {
  815. if (s->bps <= 8) {
  816. s->hdsp.add_hfyu_median_pred(dst, src, diff, w, left, left_top);
  817. } else {
  818. s->llviddsp.add_hfyu_median_pred_int16((uint16_t *)dst, (const uint16_t *)src, (const uint16_t *)diff, s->n-1, w, left, left_top);
  819. }
  820. }
  821. static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame,
  822. AVPacket *avpkt)
  823. {
  824. const uint8_t *buf = avpkt->data;
  825. int buf_size = avpkt->size;
  826. HYuvContext *s = avctx->priv_data;
  827. const int width = s->width;
  828. const int width2 = s->width >> 1;
  829. const int height = s->height;
  830. int fake_ystride, fake_ustride, fake_vstride;
  831. ThreadFrame frame = { .f = data };
  832. AVFrame *const p = data;
  833. int table_size = 0, ret;
  834. av_fast_padded_malloc(&s->bitstream_buffer,
  835. &s->bitstream_buffer_size,
  836. buf_size);
  837. if (!s->bitstream_buffer)
  838. return AVERROR(ENOMEM);
  839. s->bdsp.bswap_buf((uint32_t *) s->bitstream_buffer,
  840. (const uint32_t *) buf, buf_size / 4);
  841. if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
  842. return ret;
  843. if (s->context) {
  844. table_size = read_huffman_tables(s, s->bitstream_buffer, buf_size);
  845. if (table_size < 0)
  846. return table_size;
  847. }
  848. if ((unsigned) (buf_size - table_size) >= INT_MAX / 8)
  849. return AVERROR_INVALIDDATA;
  850. if ((ret = init_get_bits(&s->gb, s->bitstream_buffer + table_size,
  851. (buf_size - table_size) * 8)) < 0)
  852. return ret;
  853. fake_ystride = s->interlaced ? p->linesize[0] * 2 : p->linesize[0];
  854. fake_ustride = s->interlaced ? p->linesize[1] * 2 : p->linesize[1];
  855. fake_vstride = s->interlaced ? p->linesize[2] * 2 : p->linesize[2];
  856. s->last_slice_end = 0;
  857. if (s->version > 2) {
  858. int plane;
  859. for(plane = 0; plane < 1 + 2*s->chroma + s->alpha; plane++) {
  860. int left, lefttop, y;
  861. int w = width;
  862. int h = height;
  863. int fake_stride = fake_ystride;
  864. if (s->chroma && (plane == 1 || plane == 2)) {
  865. w >>= s->chroma_h_shift;
  866. h >>= s->chroma_v_shift;
  867. fake_stride = plane == 1 ? fake_ustride : fake_vstride;
  868. }
  869. switch (s->predictor) {
  870. case LEFT:
  871. case PLANE:
  872. decode_plane_bitstream(s, w, plane);
  873. left = left_prediction(s, p->data[plane], s->temp[0], w, 0);
  874. for (y = 1; y < h; y++) {
  875. uint8_t *dst = p->data[plane] + p->linesize[plane]*y;
  876. decode_plane_bitstream(s, w, plane);
  877. left = left_prediction(s, dst, s->temp[0], w, left);
  878. if (s->predictor == PLANE) {
  879. if (y > s->interlaced) {
  880. add_bytes(s, dst, dst - fake_stride, w);
  881. }
  882. }
  883. }
  884. break;
  885. case MEDIAN:
  886. decode_plane_bitstream(s, w, plane);
  887. left= left_prediction(s, p->data[plane], s->temp[0], w, 0);
  888. y = 1;
  889. /* second line is left predicted for interlaced case */
  890. if (s->interlaced) {
  891. decode_plane_bitstream(s, w, plane);
  892. left = left_prediction(s, p->data[plane] + p->linesize[plane], s->temp[0], w, left);
  893. y++;
  894. }
  895. lefttop = p->data[plane][0];
  896. decode_plane_bitstream(s, w, plane);
  897. add_median_prediction(s, p->data[plane] + fake_stride, p->data[plane], s->temp[0], w, &left, &lefttop);
  898. y++;
  899. for (; y<h; y++) {
  900. uint8_t *dst;
  901. decode_plane_bitstream(s, w, plane);
  902. dst = p->data[plane] + p->linesize[plane] * y;
  903. add_median_prediction(s, dst, dst - fake_stride, s->temp[0], w, &left, &lefttop);
  904. }
  905. break;
  906. }
  907. }
  908. draw_slice(s, p, height);
  909. } else if (s->bitstream_bpp < 24) {
  910. int y, cy;
  911. int lefty, leftu, leftv;
  912. int lefttopy, lefttopu, lefttopv;
  913. if (s->yuy2) {
  914. p->data[0][3] = get_bits(&s->gb, 8);
  915. p->data[0][2] = get_bits(&s->gb, 8);
  916. p->data[0][1] = get_bits(&s->gb, 8);
  917. p->data[0][0] = get_bits(&s->gb, 8);
  918. av_log(avctx, AV_LOG_ERROR,
  919. "YUY2 output is not implemented yet\n");
  920. return AVERROR_PATCHWELCOME;
  921. } else {
  922. leftv =
  923. p->data[2][0] = get_bits(&s->gb, 8);
  924. lefty =
  925. p->data[0][1] = get_bits(&s->gb, 8);
  926. leftu =
  927. p->data[1][0] = get_bits(&s->gb, 8);
  928. p->data[0][0] = get_bits(&s->gb, 8);
  929. switch (s->predictor) {
  930. case LEFT:
  931. case PLANE:
  932. decode_422_bitstream(s, width - 2);
  933. lefty = s->hdsp.add_hfyu_left_pred(p->data[0] + 2, s->temp[0],
  934. width - 2, lefty);
  935. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  936. leftu = s->hdsp.add_hfyu_left_pred(p->data[1] + 1, s->temp[1], width2 - 1, leftu);
  937. leftv = s->hdsp.add_hfyu_left_pred(p->data[2] + 1, s->temp[2], width2 - 1, leftv);
  938. }
  939. for (cy = y = 1; y < s->height; y++, cy++) {
  940. uint8_t *ydst, *udst, *vdst;
  941. if (s->bitstream_bpp == 12) {
  942. decode_gray_bitstream(s, width);
  943. ydst = p->data[0] + p->linesize[0] * y;
  944. lefty = s->hdsp.add_hfyu_left_pred(ydst, s->temp[0],
  945. width, lefty);
  946. if (s->predictor == PLANE) {
  947. if (y > s->interlaced)
  948. s->hdsp.add_bytes(ydst, ydst - fake_ystride, width);
  949. }
  950. y++;
  951. if (y >= s->height)
  952. break;
  953. }
  954. draw_slice(s, p, y);
  955. ydst = p->data[0] + p->linesize[0] * y;
  956. udst = p->data[1] + p->linesize[1] * cy;
  957. vdst = p->data[2] + p->linesize[2] * cy;
  958. decode_422_bitstream(s, width);
  959. lefty = s->hdsp.add_hfyu_left_pred(ydst, s->temp[0],
  960. width, lefty);
  961. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  962. leftu = s->hdsp.add_hfyu_left_pred(udst, s->temp[1], width2, leftu);
  963. leftv = s->hdsp.add_hfyu_left_pred(vdst, s->temp[2], width2, leftv);
  964. }
  965. if (s->predictor == PLANE) {
  966. if (cy > s->interlaced) {
  967. s->hdsp.add_bytes(ydst, ydst - fake_ystride, width);
  968. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  969. s->hdsp.add_bytes(udst, udst - fake_ustride, width2);
  970. s->hdsp.add_bytes(vdst, vdst - fake_vstride, width2);
  971. }
  972. }
  973. }
  974. }
  975. draw_slice(s, p, height);
  976. break;
  977. case MEDIAN:
  978. /* first line except first 2 pixels is left predicted */
  979. decode_422_bitstream(s, width - 2);
  980. lefty = s->hdsp.add_hfyu_left_pred(p->data[0] + 2, s->temp[0],
  981. width - 2, lefty);
  982. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  983. leftu = s->hdsp.add_hfyu_left_pred(p->data[1] + 1, s->temp[1], width2 - 1, leftu);
  984. leftv = s->hdsp.add_hfyu_left_pred(p->data[2] + 1, s->temp[2], width2 - 1, leftv);
  985. }
  986. cy = y = 1;
  987. /* second line is left predicted for interlaced case */
  988. if (s->interlaced) {
  989. decode_422_bitstream(s, width);
  990. lefty = s->hdsp.add_hfyu_left_pred(p->data[0] + p->linesize[0],
  991. s->temp[0], width, lefty);
  992. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  993. leftu = s->hdsp.add_hfyu_left_pred(p->data[1] + p->linesize[2], s->temp[1], width2, leftu);
  994. leftv = s->hdsp.add_hfyu_left_pred(p->data[2] + p->linesize[1], s->temp[2], width2, leftv);
  995. }
  996. y++;
  997. cy++;
  998. }
  999. /* next 4 pixels are left predicted too */
  1000. decode_422_bitstream(s, 4);
  1001. lefty = s->hdsp.add_hfyu_left_pred(p->data[0] + fake_ystride,
  1002. s->temp[0], 4, lefty);
  1003. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  1004. leftu = s->hdsp.add_hfyu_left_pred(p->data[1] + fake_ustride, s->temp[1], 2, leftu);
  1005. leftv = s->hdsp.add_hfyu_left_pred(p->data[2] + fake_vstride, s->temp[2], 2, leftv);
  1006. }
  1007. /* next line except the first 4 pixels is median predicted */
  1008. lefttopy = p->data[0][3];
  1009. decode_422_bitstream(s, width - 4);
  1010. s->hdsp.add_hfyu_median_pred(p->data[0] + fake_ystride + 4,
  1011. p->data[0] + 4, s->temp[0],
  1012. width - 4, &lefty, &lefttopy);
  1013. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  1014. lefttopu = p->data[1][1];
  1015. lefttopv = p->data[2][1];
  1016. s->hdsp.add_hfyu_median_pred(p->data[1] + fake_ustride + 2, p->data[1] + 2, s->temp[1], width2 - 2, &leftu, &lefttopu);
  1017. s->hdsp.add_hfyu_median_pred(p->data[2] + fake_vstride + 2, p->data[2] + 2, s->temp[2], width2 - 2, &leftv, &lefttopv);
  1018. }
  1019. y++;
  1020. cy++;
  1021. for (; y < height; y++, cy++) {
  1022. uint8_t *ydst, *udst, *vdst;
  1023. if (s->bitstream_bpp == 12) {
  1024. while (2 * cy > y) {
  1025. decode_gray_bitstream(s, width);
  1026. ydst = p->data[0] + p->linesize[0] * y;
  1027. s->hdsp.add_hfyu_median_pred(ydst, ydst - fake_ystride,
  1028. s->temp[0], width,
  1029. &lefty, &lefttopy);
  1030. y++;
  1031. }
  1032. if (y >= height)
  1033. break;
  1034. }
  1035. draw_slice(s, p, y);
  1036. decode_422_bitstream(s, width);
  1037. ydst = p->data[0] + p->linesize[0] * y;
  1038. udst = p->data[1] + p->linesize[1] * cy;
  1039. vdst = p->data[2] + p->linesize[2] * cy;
  1040. s->hdsp.add_hfyu_median_pred(ydst, ydst - fake_ystride,
  1041. s->temp[0], width,
  1042. &lefty, &lefttopy);
  1043. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  1044. s->hdsp.add_hfyu_median_pred(udst, udst - fake_ustride, s->temp[1], width2, &leftu, &lefttopu);
  1045. s->hdsp.add_hfyu_median_pred(vdst, vdst - fake_vstride, s->temp[2], width2, &leftv, &lefttopv);
  1046. }
  1047. }
  1048. draw_slice(s, p, height);
  1049. break;
  1050. }
  1051. }
  1052. } else {
  1053. int y;
  1054. uint8_t left[4];
  1055. const int last_line = (height - 1) * p->linesize[0];
  1056. if (s->bitstream_bpp == 32) {
  1057. left[A] = p->data[0][last_line + A] = get_bits(&s->gb, 8);
  1058. left[R] = p->data[0][last_line + R] = get_bits(&s->gb, 8);
  1059. left[G] = p->data[0][last_line + G] = get_bits(&s->gb, 8);
  1060. left[B] = p->data[0][last_line + B] = get_bits(&s->gb, 8);
  1061. } else {
  1062. left[R] = p->data[0][last_line + R] = get_bits(&s->gb, 8);
  1063. left[G] = p->data[0][last_line + G] = get_bits(&s->gb, 8);
  1064. left[B] = p->data[0][last_line + B] = get_bits(&s->gb, 8);
  1065. left[A] = p->data[0][last_line + A] = 255;
  1066. skip_bits(&s->gb, 8);
  1067. }
  1068. if (s->bgr32) {
  1069. switch (s->predictor) {
  1070. case LEFT:
  1071. case PLANE:
  1072. decode_bgr_bitstream(s, width - 1);
  1073. s->hdsp.add_hfyu_left_pred_bgr32(p->data[0] + last_line + 4,
  1074. s->temp[0], width - 1, left);
  1075. for (y = s->height - 2; y >= 0; y--) { // Yes it is stored upside down.
  1076. decode_bgr_bitstream(s, width);
  1077. s->hdsp.add_hfyu_left_pred_bgr32(p->data[0] + p->linesize[0] * y,
  1078. s->temp[0], width, left);
  1079. if (s->predictor == PLANE) {
  1080. if (s->bitstream_bpp != 32)
  1081. left[A] = 0;
  1082. if (y < s->height - 1 - s->interlaced) {
  1083. s->hdsp.add_bytes(p->data[0] + p->linesize[0] * y,
  1084. p->data[0] + p->linesize[0] * y +
  1085. fake_ystride, 4 * width);
  1086. }
  1087. }
  1088. }
  1089. // just 1 large slice as this is not possible in reverse order
  1090. draw_slice(s, p, height);
  1091. break;
  1092. default:
  1093. av_log(avctx, AV_LOG_ERROR,
  1094. "prediction type not supported!\n");
  1095. }
  1096. } else {
  1097. av_log(avctx, AV_LOG_ERROR,
  1098. "BGR24 output is not implemented yet\n");
  1099. return AVERROR_PATCHWELCOME;
  1100. }
  1101. }
  1102. emms_c();
  1103. *got_frame = 1;
  1104. return (get_bits_count(&s->gb) + 31) / 32 * 4 + table_size;
  1105. }
  1106. AVCodec ff_huffyuv_decoder = {
  1107. .name = "huffyuv",
  1108. .long_name = NULL_IF_CONFIG_SMALL("Huffyuv / HuffYUV"),
  1109. .type = AVMEDIA_TYPE_VIDEO,
  1110. .id = AV_CODEC_ID_HUFFYUV,
  1111. .priv_data_size = sizeof(HYuvContext),
  1112. .init = decode_init,
  1113. .close = decode_end,
  1114. .decode = decode_frame,
  1115. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DRAW_HORIZ_BAND |
  1116. AV_CODEC_CAP_FRAME_THREADS,
  1117. .init_thread_copy = ONLY_IF_THREADS_ENABLED(decode_init_thread_copy),
  1118. };
  1119. #if CONFIG_FFVHUFF_DECODER
  1120. AVCodec ff_ffvhuff_decoder = {
  1121. .name = "ffvhuff",
  1122. .long_name = NULL_IF_CONFIG_SMALL("Huffyuv FFmpeg variant"),
  1123. .type = AVMEDIA_TYPE_VIDEO,
  1124. .id = AV_CODEC_ID_FFVHUFF,
  1125. .priv_data_size = sizeof(HYuvContext),
  1126. .init = decode_init,
  1127. .close = decode_end,
  1128. .decode = decode_frame,
  1129. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DRAW_HORIZ_BAND |
  1130. AV_CODEC_CAP_FRAME_THREADS,
  1131. .init_thread_copy = ONLY_IF_THREADS_ENABLED(decode_init_thread_copy),
  1132. };
  1133. #endif /* CONFIG_FFVHUFF_DECODER */