You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1275 lines
46KB

  1. /*
  2. * huffyuv decoder
  3. *
  4. * Copyright (c) 2002-2014 Michael Niedermayer <michaelni@gmx.at>
  5. *
  6. * see http://www.pcisys.net/~melanson/codecs/huffyuv.txt for a description of
  7. * the algorithm used
  8. *
  9. * This file is part of FFmpeg.
  10. *
  11. * FFmpeg is free software; you can redistribute it and/or
  12. * modify it under the terms of the GNU Lesser General Public
  13. * License as published by the Free Software Foundation; either
  14. * version 2.1 of the License, or (at your option) any later version.
  15. *
  16. * FFmpeg is distributed in the hope that it will be useful,
  17. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  19. * Lesser General Public License for more details.
  20. *
  21. * You should have received a copy of the GNU Lesser General Public
  22. * License along with FFmpeg; if not, write to the Free Software
  23. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  24. *
  25. * yuva, gray, 4:4:4, 4:1:1, 4:1:0 and >8 bit per sample support sponsored by NOA
  26. */
  27. /**
  28. * @file
  29. * huffyuv decoder
  30. */
  31. #define UNCHECKED_BITSTREAM_READER 1
  32. #include "avcodec.h"
  33. #include "get_bits.h"
  34. #include "huffyuv.h"
  35. #include "huffyuvdsp.h"
  36. #include "lossless_videodsp.h"
  37. #include "thread.h"
  38. #include "libavutil/imgutils.h"
  39. #include "libavutil/pixdesc.h"
  40. #define classic_shift_luma_table_size 42
  41. static const unsigned char classic_shift_luma[classic_shift_luma_table_size + AV_INPUT_BUFFER_PADDING_SIZE] = {
  42. 34, 36, 35, 69, 135, 232, 9, 16, 10, 24, 11, 23, 12, 16, 13, 10,
  43. 14, 8, 15, 8, 16, 8, 17, 20, 16, 10, 207, 206, 205, 236, 11, 8,
  44. 10, 21, 9, 23, 8, 8, 199, 70, 69, 68, 0,
  45. 0,0,0,0,0,0,0,0,
  46. };
  47. #define classic_shift_chroma_table_size 59
  48. static const unsigned char classic_shift_chroma[classic_shift_chroma_table_size + AV_INPUT_BUFFER_PADDING_SIZE] = {
  49. 66, 36, 37, 38, 39, 40, 41, 75, 76, 77, 110, 239, 144, 81, 82, 83,
  50. 84, 85, 118, 183, 56, 57, 88, 89, 56, 89, 154, 57, 58, 57, 26, 141,
  51. 57, 56, 58, 57, 58, 57, 184, 119, 214, 245, 116, 83, 82, 49, 80, 79,
  52. 78, 77, 44, 75, 41, 40, 39, 38, 37, 36, 34, 0,
  53. 0,0,0,0,0,0,0,0,
  54. };
  55. static const unsigned char classic_add_luma[256] = {
  56. 3, 9, 5, 12, 10, 35, 32, 29, 27, 50, 48, 45, 44, 41, 39, 37,
  57. 73, 70, 68, 65, 64, 61, 58, 56, 53, 50, 49, 46, 44, 41, 38, 36,
  58. 68, 65, 63, 61, 58, 55, 53, 51, 48, 46, 45, 43, 41, 39, 38, 36,
  59. 35, 33, 32, 30, 29, 27, 26, 25, 48, 47, 46, 44, 43, 41, 40, 39,
  60. 37, 36, 35, 34, 32, 31, 30, 28, 27, 26, 24, 23, 22, 20, 19, 37,
  61. 35, 34, 33, 31, 30, 29, 27, 26, 24, 23, 21, 20, 18, 17, 15, 29,
  62. 27, 26, 24, 22, 21, 19, 17, 16, 14, 26, 25, 23, 21, 19, 18, 16,
  63. 15, 27, 25, 23, 21, 19, 17, 16, 14, 26, 25, 23, 21, 18, 17, 14,
  64. 12, 17, 19, 13, 4, 9, 2, 11, 1, 7, 8, 0, 16, 3, 14, 6,
  65. 12, 10, 5, 15, 18, 11, 10, 13, 15, 16, 19, 20, 22, 24, 27, 15,
  66. 18, 20, 22, 24, 26, 14, 17, 20, 22, 24, 27, 15, 18, 20, 23, 25,
  67. 28, 16, 19, 22, 25, 28, 32, 36, 21, 25, 29, 33, 38, 42, 45, 49,
  68. 28, 31, 34, 37, 40, 42, 44, 47, 49, 50, 52, 54, 56, 57, 59, 60,
  69. 62, 64, 66, 67, 69, 35, 37, 39, 40, 42, 43, 45, 47, 48, 51, 52,
  70. 54, 55, 57, 59, 60, 62, 63, 66, 67, 69, 71, 72, 38, 40, 42, 43,
  71. 46, 47, 49, 51, 26, 28, 30, 31, 33, 34, 18, 19, 11, 13, 7, 8,
  72. };
  73. static const unsigned char classic_add_chroma[256] = {
  74. 3, 1, 2, 2, 2, 2, 3, 3, 7, 5, 7, 5, 8, 6, 11, 9,
  75. 7, 13, 11, 10, 9, 8, 7, 5, 9, 7, 6, 4, 7, 5, 8, 7,
  76. 11, 8, 13, 11, 19, 15, 22, 23, 20, 33, 32, 28, 27, 29, 51, 77,
  77. 43, 45, 76, 81, 46, 82, 75, 55, 56, 144, 58, 80, 60, 74, 147, 63,
  78. 143, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
  79. 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 27, 30, 21, 22,
  80. 17, 14, 5, 6, 100, 54, 47, 50, 51, 53, 106, 107, 108, 109, 110, 111,
  81. 112, 113, 114, 115, 4, 117, 118, 92, 94, 121, 122, 3, 124, 103, 2, 1,
  82. 0, 129, 130, 131, 120, 119, 126, 125, 136, 137, 138, 139, 140, 141, 142, 134,
  83. 135, 132, 133, 104, 64, 101, 62, 57, 102, 95, 93, 59, 61, 28, 97, 96,
  84. 52, 49, 48, 29, 32, 25, 24, 46, 23, 98, 45, 44, 43, 20, 42, 41,
  85. 19, 18, 99, 40, 15, 39, 38, 16, 13, 12, 11, 37, 10, 9, 8, 36,
  86. 7, 128, 127, 105, 123, 116, 35, 34, 33, 145, 31, 79, 42, 146, 78, 26,
  87. 83, 48, 49, 50, 44, 47, 26, 31, 30, 18, 17, 19, 21, 24, 25, 13,
  88. 14, 16, 17, 18, 20, 21, 12, 14, 15, 9, 10, 6, 9, 6, 5, 8,
  89. 6, 12, 8, 10, 7, 9, 6, 4, 6, 2, 2, 3, 3, 3, 3, 2,
  90. };
  91. static int read_len_table(uint8_t *dst, GetBitContext *gb, int n)
  92. {
  93. int i, val, repeat;
  94. for (i = 0; i < n;) {
  95. repeat = get_bits(gb, 3);
  96. val = get_bits(gb, 5);
  97. if (repeat == 0)
  98. repeat = get_bits(gb, 8);
  99. if (i + repeat > n || get_bits_left(gb) < 0) {
  100. av_log(NULL, AV_LOG_ERROR, "Error reading huffman table\n");
  101. return AVERROR_INVALIDDATA;
  102. }
  103. while (repeat--)
  104. dst[i++] = val;
  105. }
  106. return 0;
  107. }
  108. static int generate_joint_tables(HYuvContext *s)
  109. {
  110. int ret;
  111. uint16_t *symbols = av_mallocz(5 << VLC_BITS);
  112. uint16_t *bits;
  113. uint8_t *len;
  114. if (!symbols)
  115. return AVERROR(ENOMEM);
  116. bits = symbols + (1 << VLC_BITS);
  117. len = (uint8_t *)(bits + (1 << VLC_BITS));
  118. if (s->bitstream_bpp < 24 || s->version > 2) {
  119. int p, i, y, u;
  120. for (p = 0; p < 4; p++) {
  121. int p0 = s->version > 2 ? p : 0;
  122. for (i = y = 0; y < s->vlc_n; y++) {
  123. int len0 = s->len[p0][y];
  124. int limit = VLC_BITS - len0;
  125. if (limit <= 0 || !len0)
  126. continue;
  127. if ((sign_extend(y, 8) & (s->vlc_n-1)) != y)
  128. continue;
  129. for (u = 0; u < s->vlc_n; u++) {
  130. int len1 = s->len[p][u];
  131. if (len1 > limit || !len1)
  132. continue;
  133. if ((sign_extend(u, 8) & (s->vlc_n-1)) != u)
  134. continue;
  135. av_assert0(i < (1 << VLC_BITS));
  136. len[i] = len0 + len1;
  137. bits[i] = (s->bits[p0][y] << len1) + s->bits[p][u];
  138. symbols[i] = (y << 8) + (u & 0xFF);
  139. i++;
  140. }
  141. }
  142. ff_free_vlc(&s->vlc[4 + p]);
  143. if ((ret = ff_init_vlc_sparse(&s->vlc[4 + p], VLC_BITS, i, len, 1, 1,
  144. bits, 2, 2, symbols, 2, 2, 0)) < 0)
  145. goto out;
  146. }
  147. } else {
  148. uint8_t (*map)[4] = (uint8_t(*)[4]) s->pix_bgr_map;
  149. int i, b, g, r, code;
  150. int p0 = s->decorrelate;
  151. int p1 = !s->decorrelate;
  152. /* Restrict the range to +/-16 because that's pretty much guaranteed
  153. * to cover all the combinations that fit in 11 bits total, and it
  154. * does not matter if we miss a few rare codes. */
  155. for (i = 0, g = -16; g < 16; g++) {
  156. int len0 = s->len[p0][g & 255];
  157. int limit0 = VLC_BITS - len0;
  158. if (limit0 < 2 || !len0)
  159. continue;
  160. for (b = -16; b < 16; b++) {
  161. int len1 = s->len[p1][b & 255];
  162. int limit1 = limit0 - len1;
  163. if (limit1 < 1 || !len1)
  164. continue;
  165. code = (s->bits[p0][g & 255] << len1) + s->bits[p1][b & 255];
  166. for (r = -16; r < 16; r++) {
  167. int len2 = s->len[2][r & 255];
  168. if (len2 > limit1 || !len2)
  169. continue;
  170. av_assert0(i < (1 << VLC_BITS));
  171. len[i] = len0 + len1 + len2;
  172. bits[i] = (code << len2) + s->bits[2][r & 255];
  173. if (s->decorrelate) {
  174. map[i][G] = g;
  175. map[i][B] = g + b;
  176. map[i][R] = g + r;
  177. } else {
  178. map[i][B] = g;
  179. map[i][G] = b;
  180. map[i][R] = r;
  181. }
  182. i++;
  183. }
  184. }
  185. }
  186. ff_free_vlc(&s->vlc[4]);
  187. if ((ret = init_vlc(&s->vlc[4], VLC_BITS, i, len, 1, 1,
  188. bits, 2, 2, 0)) < 0)
  189. goto out;
  190. }
  191. ret = 0;
  192. out:
  193. av_freep(&symbols);
  194. return ret;
  195. }
  196. static int read_huffman_tables(HYuvContext *s, const uint8_t *src, int length)
  197. {
  198. GetBitContext gb;
  199. int i, ret;
  200. int count = 3;
  201. if ((ret = init_get_bits(&gb, src, length * 8)) < 0)
  202. return ret;
  203. if (s->version > 2)
  204. count = 1 + s->alpha + 2*s->chroma;
  205. for (i = 0; i < count; i++) {
  206. if ((ret = read_len_table(s->len[i], &gb, s->vlc_n)) < 0)
  207. return ret;
  208. if ((ret = ff_huffyuv_generate_bits_table(s->bits[i], s->len[i], s->vlc_n)) < 0)
  209. return ret;
  210. ff_free_vlc(&s->vlc[i]);
  211. if ((ret = init_vlc(&s->vlc[i], VLC_BITS, s->vlc_n, s->len[i], 1, 1,
  212. s->bits[i], 4, 4, 0)) < 0)
  213. return ret;
  214. }
  215. if ((ret = generate_joint_tables(s)) < 0)
  216. return ret;
  217. return (get_bits_count(&gb) + 7) / 8;
  218. }
  219. static int read_old_huffman_tables(HYuvContext *s)
  220. {
  221. GetBitContext gb;
  222. int i, ret;
  223. init_get_bits(&gb, classic_shift_luma,
  224. classic_shift_luma_table_size * 8);
  225. if ((ret = read_len_table(s->len[0], &gb, 256)) < 0)
  226. return ret;
  227. init_get_bits(&gb, classic_shift_chroma,
  228. classic_shift_chroma_table_size * 8);
  229. if ((ret = read_len_table(s->len[1], &gb, 256)) < 0)
  230. return ret;
  231. for (i = 0; i < 256; i++)
  232. s->bits[0][i] = classic_add_luma[i];
  233. for (i = 0; i < 256; i++)
  234. s->bits[1][i] = classic_add_chroma[i];
  235. if (s->bitstream_bpp >= 24) {
  236. memcpy(s->bits[1], s->bits[0], 256 * sizeof(uint32_t));
  237. memcpy(s->len[1], s->len[0], 256 * sizeof(uint8_t));
  238. }
  239. memcpy(s->bits[2], s->bits[1], 256 * sizeof(uint32_t));
  240. memcpy(s->len[2], s->len[1], 256 * sizeof(uint8_t));
  241. for (i = 0; i < 4; i++) {
  242. ff_free_vlc(&s->vlc[i]);
  243. if ((ret = init_vlc(&s->vlc[i], VLC_BITS, 256, s->len[i], 1, 1,
  244. s->bits[i], 4, 4, 0)) < 0)
  245. return ret;
  246. }
  247. if ((ret = generate_joint_tables(s)) < 0)
  248. return ret;
  249. return 0;
  250. }
  251. static av_cold int decode_end(AVCodecContext *avctx)
  252. {
  253. HYuvContext *s = avctx->priv_data;
  254. int i;
  255. ff_huffyuv_common_end(s);
  256. av_freep(&s->bitstream_buffer);
  257. for (i = 0; i < 8; i++)
  258. ff_free_vlc(&s->vlc[i]);
  259. return 0;
  260. }
  261. static av_cold int decode_init(AVCodecContext *avctx)
  262. {
  263. HYuvContext *s = avctx->priv_data;
  264. int ret;
  265. ret = av_image_check_size(avctx->width, avctx->height, 0, avctx);
  266. if (ret < 0)
  267. return ret;
  268. ff_huffyuvdsp_init(&s->hdsp, avctx->pix_fmt);
  269. ff_llviddsp_init(&s->llviddsp);
  270. memset(s->vlc, 0, 4 * sizeof(VLC));
  271. s->interlaced = avctx->height > 288;
  272. s->bgr32 = 1;
  273. if (avctx->extradata_size) {
  274. if ((avctx->bits_per_coded_sample & 7) &&
  275. avctx->bits_per_coded_sample != 12)
  276. s->version = 1; // do such files exist at all?
  277. else if (avctx->extradata_size > 3 && avctx->extradata[3] == 0)
  278. s->version = 2;
  279. else
  280. s->version = 3;
  281. } else
  282. s->version = 0;
  283. s->bps = 8;
  284. s->n = 1<<s->bps;
  285. s->vlc_n = FFMIN(s->n, MAX_VLC_N);
  286. s->chroma = 1;
  287. if (s->version >= 2) {
  288. int method, interlace;
  289. if (avctx->extradata_size < 4)
  290. return AVERROR_INVALIDDATA;
  291. method = avctx->extradata[0];
  292. s->decorrelate = method & 64 ? 1 : 0;
  293. s->predictor = method & 63;
  294. if (s->version == 2) {
  295. s->bitstream_bpp = avctx->extradata[1];
  296. if (s->bitstream_bpp == 0)
  297. s->bitstream_bpp = avctx->bits_per_coded_sample & ~7;
  298. } else {
  299. s->bps = (avctx->extradata[1] >> 4) + 1;
  300. s->n = 1<<s->bps;
  301. s->vlc_n = FFMIN(s->n, MAX_VLC_N);
  302. s->chroma_h_shift = avctx->extradata[1] & 3;
  303. s->chroma_v_shift = (avctx->extradata[1] >> 2) & 3;
  304. s->yuv = !!(avctx->extradata[2] & 1);
  305. s->chroma= !!(avctx->extradata[2] & 3);
  306. s->alpha = !!(avctx->extradata[2] & 4);
  307. }
  308. interlace = (avctx->extradata[2] & 0x30) >> 4;
  309. s->interlaced = (interlace == 1) ? 1 : (interlace == 2) ? 0 : s->interlaced;
  310. s->context = avctx->extradata[2] & 0x40 ? 1 : 0;
  311. if ((ret = read_huffman_tables(s, avctx->extradata + 4,
  312. avctx->extradata_size - 4)) < 0)
  313. goto error;
  314. } else {
  315. switch (avctx->bits_per_coded_sample & 7) {
  316. case 1:
  317. s->predictor = LEFT;
  318. s->decorrelate = 0;
  319. break;
  320. case 2:
  321. s->predictor = LEFT;
  322. s->decorrelate = 1;
  323. break;
  324. case 3:
  325. s->predictor = PLANE;
  326. s->decorrelate = avctx->bits_per_coded_sample >= 24;
  327. break;
  328. case 4:
  329. s->predictor = MEDIAN;
  330. s->decorrelate = 0;
  331. break;
  332. default:
  333. s->predictor = LEFT; // OLD
  334. s->decorrelate = 0;
  335. break;
  336. }
  337. s->bitstream_bpp = avctx->bits_per_coded_sample & ~7;
  338. s->context = 0;
  339. if ((ret = read_old_huffman_tables(s)) < 0)
  340. goto error;
  341. }
  342. if (s->version <= 2) {
  343. switch (s->bitstream_bpp) {
  344. case 12:
  345. avctx->pix_fmt = AV_PIX_FMT_YUV420P;
  346. s->yuv = 1;
  347. break;
  348. case 16:
  349. if (s->yuy2)
  350. avctx->pix_fmt = AV_PIX_FMT_YUYV422;
  351. else
  352. avctx->pix_fmt = AV_PIX_FMT_YUV422P;
  353. s->yuv = 1;
  354. break;
  355. case 24:
  356. if (s->bgr32)
  357. avctx->pix_fmt = AV_PIX_FMT_0RGB32;
  358. else
  359. avctx->pix_fmt = AV_PIX_FMT_BGR24;
  360. break;
  361. case 32:
  362. av_assert0(s->bgr32);
  363. avctx->pix_fmt = AV_PIX_FMT_RGB32;
  364. s->alpha = 1;
  365. break;
  366. default:
  367. ret = AVERROR_INVALIDDATA;
  368. goto error;
  369. }
  370. av_pix_fmt_get_chroma_sub_sample(avctx->pix_fmt,
  371. &s->chroma_h_shift,
  372. &s->chroma_v_shift);
  373. } else {
  374. switch ( (s->chroma<<10) | (s->yuv<<9) | (s->alpha<<8) | ((s->bps-1)<<4) | s->chroma_h_shift | (s->chroma_v_shift<<2)) {
  375. case 0x070:
  376. avctx->pix_fmt = AV_PIX_FMT_GRAY8;
  377. break;
  378. case 0x0F0:
  379. avctx->pix_fmt = AV_PIX_FMT_GRAY16;
  380. break;
  381. case 0x170:
  382. avctx->pix_fmt = AV_PIX_FMT_GRAY8A;
  383. break;
  384. case 0x470:
  385. avctx->pix_fmt = AV_PIX_FMT_GBRP;
  386. break;
  387. case 0x480:
  388. avctx->pix_fmt = AV_PIX_FMT_GBRP9;
  389. break;
  390. case 0x490:
  391. avctx->pix_fmt = AV_PIX_FMT_GBRP10;
  392. break;
  393. case 0x4B0:
  394. avctx->pix_fmt = AV_PIX_FMT_GBRP12;
  395. break;
  396. case 0x4D0:
  397. avctx->pix_fmt = AV_PIX_FMT_GBRP14;
  398. break;
  399. case 0x4F0:
  400. avctx->pix_fmt = AV_PIX_FMT_GBRP16;
  401. break;
  402. case 0x570:
  403. avctx->pix_fmt = AV_PIX_FMT_GBRAP;
  404. break;
  405. case 0x670:
  406. avctx->pix_fmt = AV_PIX_FMT_YUV444P;
  407. break;
  408. case 0x680:
  409. avctx->pix_fmt = AV_PIX_FMT_YUV444P9;
  410. break;
  411. case 0x690:
  412. avctx->pix_fmt = AV_PIX_FMT_YUV444P10;
  413. break;
  414. case 0x6B0:
  415. avctx->pix_fmt = AV_PIX_FMT_YUV444P12;
  416. break;
  417. case 0x6D0:
  418. avctx->pix_fmt = AV_PIX_FMT_YUV444P14;
  419. break;
  420. case 0x6F0:
  421. avctx->pix_fmt = AV_PIX_FMT_YUV444P16;
  422. break;
  423. case 0x671:
  424. avctx->pix_fmt = AV_PIX_FMT_YUV422P;
  425. break;
  426. case 0x681:
  427. avctx->pix_fmt = AV_PIX_FMT_YUV422P9;
  428. break;
  429. case 0x691:
  430. avctx->pix_fmt = AV_PIX_FMT_YUV422P10;
  431. break;
  432. case 0x6B1:
  433. avctx->pix_fmt = AV_PIX_FMT_YUV422P12;
  434. break;
  435. case 0x6D1:
  436. avctx->pix_fmt = AV_PIX_FMT_YUV422P14;
  437. break;
  438. case 0x6F1:
  439. avctx->pix_fmt = AV_PIX_FMT_YUV422P16;
  440. break;
  441. case 0x672:
  442. avctx->pix_fmt = AV_PIX_FMT_YUV411P;
  443. break;
  444. case 0x674:
  445. avctx->pix_fmt = AV_PIX_FMT_YUV440P;
  446. break;
  447. case 0x675:
  448. avctx->pix_fmt = AV_PIX_FMT_YUV420P;
  449. break;
  450. case 0x685:
  451. avctx->pix_fmt = AV_PIX_FMT_YUV420P9;
  452. break;
  453. case 0x695:
  454. avctx->pix_fmt = AV_PIX_FMT_YUV420P10;
  455. break;
  456. case 0x6B5:
  457. avctx->pix_fmt = AV_PIX_FMT_YUV420P12;
  458. break;
  459. case 0x6D5:
  460. avctx->pix_fmt = AV_PIX_FMT_YUV420P14;
  461. break;
  462. case 0x6F5:
  463. avctx->pix_fmt = AV_PIX_FMT_YUV420P16;
  464. break;
  465. case 0x67A:
  466. avctx->pix_fmt = AV_PIX_FMT_YUV410P;
  467. break;
  468. case 0x770:
  469. avctx->pix_fmt = AV_PIX_FMT_YUVA444P;
  470. break;
  471. case 0x780:
  472. avctx->pix_fmt = AV_PIX_FMT_YUVA444P9;
  473. break;
  474. case 0x790:
  475. avctx->pix_fmt = AV_PIX_FMT_YUVA444P10;
  476. break;
  477. case 0x7F0:
  478. avctx->pix_fmt = AV_PIX_FMT_YUVA444P16;
  479. break;
  480. case 0x771:
  481. avctx->pix_fmt = AV_PIX_FMT_YUVA422P;
  482. break;
  483. case 0x781:
  484. avctx->pix_fmt = AV_PIX_FMT_YUVA422P9;
  485. break;
  486. case 0x791:
  487. avctx->pix_fmt = AV_PIX_FMT_YUVA422P10;
  488. break;
  489. case 0x7F1:
  490. avctx->pix_fmt = AV_PIX_FMT_YUVA422P16;
  491. break;
  492. case 0x775:
  493. avctx->pix_fmt = AV_PIX_FMT_YUVA420P;
  494. break;
  495. case 0x785:
  496. avctx->pix_fmt = AV_PIX_FMT_YUVA420P9;
  497. break;
  498. case 0x795:
  499. avctx->pix_fmt = AV_PIX_FMT_YUVA420P10;
  500. break;
  501. case 0x7F5:
  502. avctx->pix_fmt = AV_PIX_FMT_YUVA420P16;
  503. break;
  504. default:
  505. ret = AVERROR_INVALIDDATA;
  506. goto error;
  507. }
  508. }
  509. ff_huffyuv_common_init(avctx);
  510. if ((avctx->pix_fmt == AV_PIX_FMT_YUV422P || avctx->pix_fmt == AV_PIX_FMT_YUV420P) && avctx->width & 1) {
  511. av_log(avctx, AV_LOG_ERROR, "width must be even for this colorspace\n");
  512. ret = AVERROR_INVALIDDATA;
  513. goto error;
  514. }
  515. if (s->predictor == MEDIAN && avctx->pix_fmt == AV_PIX_FMT_YUV422P &&
  516. avctx->width % 4) {
  517. av_log(avctx, AV_LOG_ERROR, "width must be a multiple of 4 "
  518. "for this combination of colorspace and predictor type.\n");
  519. ret = AVERROR_INVALIDDATA;
  520. goto error;
  521. }
  522. if ((ret = ff_huffyuv_alloc_temp(s)) < 0) {
  523. ff_huffyuv_common_end(s);
  524. goto error;
  525. }
  526. return 0;
  527. error:
  528. decode_end(avctx);
  529. return ret;
  530. }
  531. #if HAVE_THREADS
  532. static av_cold int decode_init_thread_copy(AVCodecContext *avctx)
  533. {
  534. HYuvContext *s = avctx->priv_data;
  535. int i, ret;
  536. s->avctx = avctx;
  537. if ((ret = ff_huffyuv_alloc_temp(s)) < 0) {
  538. ff_huffyuv_common_end(s);
  539. return ret;
  540. }
  541. for (i = 0; i < 8; i++)
  542. s->vlc[i].table = NULL;
  543. if (s->version >= 2) {
  544. if ((ret = read_huffman_tables(s, avctx->extradata + 4,
  545. avctx->extradata_size)) < 0)
  546. return ret;
  547. } else {
  548. if ((ret = read_old_huffman_tables(s)) < 0)
  549. return ret;
  550. }
  551. return 0;
  552. }
  553. #endif
  554. /** Subset of GET_VLC for use in hand-roller VLC code */
  555. #define VLC_INTERN(dst, table, gb, name, bits, max_depth) \
  556. code = table[index][0]; \
  557. n = table[index][1]; \
  558. if (max_depth > 1 && n < 0) { \
  559. LAST_SKIP_BITS(name, gb, bits); \
  560. UPDATE_CACHE(name, gb); \
  561. \
  562. nb_bits = -n; \
  563. index = SHOW_UBITS(name, gb, nb_bits) + code; \
  564. code = table[index][0]; \
  565. n = table[index][1]; \
  566. if (max_depth > 2 && n < 0) { \
  567. LAST_SKIP_BITS(name, gb, nb_bits); \
  568. UPDATE_CACHE(name, gb); \
  569. \
  570. nb_bits = -n; \
  571. index = SHOW_UBITS(name, gb, nb_bits) + code; \
  572. code = table[index][0]; \
  573. n = table[index][1]; \
  574. } \
  575. } \
  576. dst = code; \
  577. LAST_SKIP_BITS(name, gb, n)
  578. #define GET_VLC_DUAL(dst0, dst1, name, gb, dtable, table1, table2, \
  579. bits, max_depth, OP) \
  580. do { \
  581. unsigned int index = SHOW_UBITS(name, gb, bits); \
  582. int code, n = dtable[index][1]; \
  583. \
  584. if (n<=0) { \
  585. int nb_bits; \
  586. VLC_INTERN(dst0, table1, gb, name, bits, max_depth); \
  587. \
  588. UPDATE_CACHE(re, gb); \
  589. index = SHOW_UBITS(name, gb, bits); \
  590. VLC_INTERN(dst1, table2, gb, name, bits, max_depth); \
  591. } else { \
  592. code = dtable[index][0]; \
  593. OP(dst0, dst1, code); \
  594. LAST_SKIP_BITS(name, gb, n); \
  595. } \
  596. } while (0)
  597. #define OP8bits(dst0, dst1, code) dst0 = code>>8; dst1 = code
  598. #define READ_2PIX(dst0, dst1, plane1) \
  599. UPDATE_CACHE(re, &s->gb); \
  600. GET_VLC_DUAL(dst0, dst1, re, &s->gb, s->vlc[4+plane1].table, \
  601. s->vlc[0].table, s->vlc[plane1].table, VLC_BITS, 3, OP8bits)
  602. static void decode_422_bitstream(HYuvContext *s, int count)
  603. {
  604. int i, icount;
  605. OPEN_READER(re, &s->gb);
  606. count /= 2;
  607. icount = get_bits_left(&s->gb) / (32 * 4);
  608. if (count >= icount) {
  609. for (i = 0; i < icount; i++) {
  610. READ_2PIX(s->temp[0][2 * i], s->temp[1][i], 1);
  611. READ_2PIX(s->temp[0][2 * i + 1], s->temp[2][i], 2);
  612. }
  613. for (; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
  614. READ_2PIX(s->temp[0][2 * i ], s->temp[1][i], 1);
  615. if (BITS_LEFT(re, &s->gb) <= 0) break;
  616. READ_2PIX(s->temp[0][2 * i + 1], s->temp[2][i], 2);
  617. }
  618. for (; i < count; i++)
  619. s->temp[0][2 * i ] = s->temp[1][i] =
  620. s->temp[0][2 * i + 1] = s->temp[2][i] = 0;
  621. } else {
  622. for (i = 0; i < count; i++) {
  623. READ_2PIX(s->temp[0][2 * i], s->temp[1][i], 1);
  624. READ_2PIX(s->temp[0][2 * i + 1], s->temp[2][i], 2);
  625. }
  626. }
  627. CLOSE_READER(re, &s->gb);
  628. }
  629. #define READ_2PIX_PLANE(dst0, dst1, plane, OP) \
  630. UPDATE_CACHE(re, &s->gb); \
  631. GET_VLC_DUAL(dst0, dst1, re, &s->gb, s->vlc[4+plane].table, \
  632. s->vlc[plane].table, s->vlc[plane].table, VLC_BITS, 3, OP)
  633. #define OP14bits(dst0, dst1, code) dst0 = code>>8; dst1 = sign_extend(code, 8)
  634. /* TODO instead of restarting the read when the code isn't in the first level
  635. * of the joint table, jump into the 2nd level of the individual table. */
  636. #define READ_2PIX_PLANE16(dst0, dst1, plane){\
  637. dst0 = get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3)<<2;\
  638. dst0 += get_bits(&s->gb, 2);\
  639. dst1 = get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3)<<2;\
  640. dst1 += get_bits(&s->gb, 2);\
  641. }
  642. static void decode_plane_bitstream(HYuvContext *s, int width, int plane)
  643. {
  644. int i, count = width/2;
  645. if (s->bps <= 8) {
  646. OPEN_READER(re, &s->gb);
  647. if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
  648. for (i = 0; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
  649. READ_2PIX_PLANE(s->temp[0][2 * i], s->temp[0][2 * i + 1], plane, OP8bits);
  650. }
  651. } else {
  652. for(i=0; i<count; i++){
  653. READ_2PIX_PLANE(s->temp[0][2 * i], s->temp[0][2 * i + 1], plane, OP8bits);
  654. }
  655. }
  656. if( width&1 && BITS_LEFT(re, &s->gb)>0 ) {
  657. unsigned int index;
  658. int nb_bits, code, n;
  659. UPDATE_CACHE(re, &s->gb);
  660. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  661. VLC_INTERN(s->temp[0][width-1], s->vlc[plane].table,
  662. &s->gb, re, VLC_BITS, 3);
  663. }
  664. CLOSE_READER(re, &s->gb);
  665. } else if (s->bps <= 14) {
  666. OPEN_READER(re, &s->gb);
  667. if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
  668. for (i = 0; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
  669. READ_2PIX_PLANE(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane, OP14bits);
  670. }
  671. } else {
  672. for(i=0; i<count; i++){
  673. READ_2PIX_PLANE(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane, OP14bits);
  674. }
  675. }
  676. if( width&1 && BITS_LEFT(re, &s->gb)>0 ) {
  677. unsigned int index;
  678. int nb_bits, code, n;
  679. UPDATE_CACHE(re, &s->gb);
  680. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  681. VLC_INTERN(s->temp16[0][width-1], s->vlc[plane].table,
  682. &s->gb, re, VLC_BITS, 3);
  683. }
  684. CLOSE_READER(re, &s->gb);
  685. } else {
  686. if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
  687. for (i = 0; i < count && get_bits_left(&s->gb) > 0; i++) {
  688. READ_2PIX_PLANE16(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane);
  689. }
  690. } else {
  691. for(i=0; i<count; i++){
  692. READ_2PIX_PLANE16(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane);
  693. }
  694. }
  695. if( width&1 && get_bits_left(&s->gb)>0 ) {
  696. int dst = get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3)<<2;
  697. s->temp16[0][width-1] = dst + get_bits(&s->gb, 2);
  698. }
  699. }
  700. }
  701. static void decode_gray_bitstream(HYuvContext *s, int count)
  702. {
  703. int i;
  704. OPEN_READER(re, &s->gb);
  705. count /= 2;
  706. if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
  707. for (i = 0; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
  708. READ_2PIX(s->temp[0][2 * i], s->temp[0][2 * i + 1], 0);
  709. }
  710. } else {
  711. for (i = 0; i < count; i++) {
  712. READ_2PIX(s->temp[0][2 * i], s->temp[0][2 * i + 1], 0);
  713. }
  714. }
  715. CLOSE_READER(re, &s->gb);
  716. }
  717. static av_always_inline void decode_bgr_1(HYuvContext *s, int count,
  718. int decorrelate, int alpha)
  719. {
  720. int i;
  721. OPEN_READER(re, &s->gb);
  722. for (i = 0; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
  723. unsigned int index;
  724. int code, n, nb_bits;
  725. UPDATE_CACHE(re, &s->gb);
  726. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  727. n = s->vlc[4].table[index][1];
  728. if (n>0) {
  729. code = s->vlc[4].table[index][0];
  730. *(uint32_t *) &s->temp[0][4 * i] = s->pix_bgr_map[code];
  731. LAST_SKIP_BITS(re, &s->gb, n);
  732. } else {
  733. if (decorrelate) {
  734. VLC_INTERN(s->temp[0][4 * i + G], s->vlc[1].table,
  735. &s->gb, re, VLC_BITS, 3);
  736. UPDATE_CACHE(re, &s->gb);
  737. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  738. VLC_INTERN(code, s->vlc[0].table, &s->gb, re, VLC_BITS, 3);
  739. s->temp[0][4 * i + B] = code + s->temp[0][4 * i + G];
  740. UPDATE_CACHE(re, &s->gb);
  741. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  742. VLC_INTERN(code, s->vlc[2].table, &s->gb, re, VLC_BITS, 3);
  743. s->temp[0][4 * i + R] = code + s->temp[0][4 * i + G];
  744. } else {
  745. VLC_INTERN(s->temp[0][4 * i + B], s->vlc[0].table,
  746. &s->gb, re, VLC_BITS, 3);
  747. UPDATE_CACHE(re, &s->gb);
  748. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  749. VLC_INTERN(s->temp[0][4 * i + G], s->vlc[1].table,
  750. &s->gb, re, VLC_BITS, 3);
  751. UPDATE_CACHE(re, &s->gb);
  752. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  753. VLC_INTERN(s->temp[0][4 * i + R], s->vlc[2].table,
  754. &s->gb, re, VLC_BITS, 3);
  755. }
  756. }
  757. if (alpha) {
  758. UPDATE_CACHE(re, &s->gb);
  759. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  760. VLC_INTERN(s->temp[0][4 * i + A], s->vlc[2].table,
  761. &s->gb, re, VLC_BITS, 3);
  762. } else
  763. s->temp[0][4 * i + A] = 0;
  764. }
  765. CLOSE_READER(re, &s->gb);
  766. }
  767. static void decode_bgr_bitstream(HYuvContext *s, int count)
  768. {
  769. if (s->decorrelate) {
  770. if (s->bitstream_bpp == 24)
  771. decode_bgr_1(s, count, 1, 0);
  772. else
  773. decode_bgr_1(s, count, 1, 1);
  774. } else {
  775. if (s->bitstream_bpp == 24)
  776. decode_bgr_1(s, count, 0, 0);
  777. else
  778. decode_bgr_1(s, count, 0, 1);
  779. }
  780. }
  781. static void draw_slice(HYuvContext *s, AVFrame *frame, int y)
  782. {
  783. int h, cy, i;
  784. int offset[AV_NUM_DATA_POINTERS];
  785. if (!s->avctx->draw_horiz_band)
  786. return;
  787. h = y - s->last_slice_end;
  788. y -= h;
  789. if (s->bitstream_bpp == 12)
  790. cy = y >> 1;
  791. else
  792. cy = y;
  793. offset[0] = frame->linesize[0] * y;
  794. offset[1] = frame->linesize[1] * cy;
  795. offset[2] = frame->linesize[2] * cy;
  796. for (i = 3; i < AV_NUM_DATA_POINTERS; i++)
  797. offset[i] = 0;
  798. emms_c();
  799. s->avctx->draw_horiz_band(s->avctx, frame, offset, y, 3, h);
  800. s->last_slice_end = y + h;
  801. }
  802. static int left_prediction(HYuvContext *s, uint8_t *dst, const uint8_t *src, int w, int acc)
  803. {
  804. if (s->bps <= 8) {
  805. return s->llviddsp.add_left_pred(dst, src, w, acc);
  806. } else {
  807. return s->llviddsp.add_left_pred_int16(( uint16_t *)dst, (const uint16_t *)src, s->n-1, w, acc);
  808. }
  809. }
  810. static void add_bytes(HYuvContext *s, uint8_t *dst, uint8_t *src, int w)
  811. {
  812. if (s->bps <= 8) {
  813. s->llviddsp.add_bytes(dst, src, w);
  814. } else {
  815. s->hdsp.add_int16((uint16_t*)dst, (const uint16_t*)src, s->n - 1, w);
  816. }
  817. }
  818. static void add_median_prediction(HYuvContext *s, uint8_t *dst, const uint8_t *src, const uint8_t *diff, int w, int *left, int *left_top)
  819. {
  820. if (s->bps <= 8) {
  821. s->llviddsp.add_median_pred(dst, src, diff, w, left, left_top);
  822. } else {
  823. s->hdsp.add_hfyu_median_pred_int16((uint16_t *)dst, (const uint16_t *)src, (const uint16_t *)diff, s->n-1, w, left, left_top);
  824. }
  825. }
  826. static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame,
  827. AVPacket *avpkt)
  828. {
  829. const uint8_t *buf = avpkt->data;
  830. int buf_size = avpkt->size;
  831. HYuvContext *s = avctx->priv_data;
  832. const int width = s->width;
  833. const int width2 = s->width >> 1;
  834. const int height = s->height;
  835. int fake_ystride, fake_ustride, fake_vstride;
  836. ThreadFrame frame = { .f = data };
  837. AVFrame *const p = data;
  838. int table_size = 0, ret;
  839. if (buf_size < (width * height + 7)/8)
  840. return AVERROR_INVALIDDATA;
  841. av_fast_padded_malloc(&s->bitstream_buffer,
  842. &s->bitstream_buffer_size,
  843. buf_size);
  844. if (!s->bitstream_buffer)
  845. return AVERROR(ENOMEM);
  846. s->bdsp.bswap_buf((uint32_t *) s->bitstream_buffer,
  847. (const uint32_t *) buf, buf_size / 4);
  848. if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
  849. return ret;
  850. if (s->context) {
  851. table_size = read_huffman_tables(s, s->bitstream_buffer, buf_size);
  852. if (table_size < 0)
  853. return table_size;
  854. }
  855. if ((unsigned) (buf_size - table_size) >= INT_MAX / 8)
  856. return AVERROR_INVALIDDATA;
  857. if ((ret = init_get_bits(&s->gb, s->bitstream_buffer + table_size,
  858. (buf_size - table_size) * 8)) < 0)
  859. return ret;
  860. fake_ystride = s->interlaced ? p->linesize[0] * 2 : p->linesize[0];
  861. fake_ustride = s->interlaced ? p->linesize[1] * 2 : p->linesize[1];
  862. fake_vstride = s->interlaced ? p->linesize[2] * 2 : p->linesize[2];
  863. s->last_slice_end = 0;
  864. if (s->version > 2) {
  865. int plane;
  866. for(plane = 0; plane < 1 + 2*s->chroma + s->alpha; plane++) {
  867. int left, lefttop, y;
  868. int w = width;
  869. int h = height;
  870. int fake_stride = fake_ystride;
  871. if (s->chroma && (plane == 1 || plane == 2)) {
  872. w >>= s->chroma_h_shift;
  873. h >>= s->chroma_v_shift;
  874. fake_stride = plane == 1 ? fake_ustride : fake_vstride;
  875. }
  876. switch (s->predictor) {
  877. case LEFT:
  878. case PLANE:
  879. decode_plane_bitstream(s, w, plane);
  880. left = left_prediction(s, p->data[plane], s->temp[0], w, 0);
  881. for (y = 1; y < h; y++) {
  882. uint8_t *dst = p->data[plane] + p->linesize[plane]*y;
  883. decode_plane_bitstream(s, w, plane);
  884. left = left_prediction(s, dst, s->temp[0], w, left);
  885. if (s->predictor == PLANE) {
  886. if (y > s->interlaced) {
  887. add_bytes(s, dst, dst - fake_stride, w);
  888. }
  889. }
  890. }
  891. break;
  892. case MEDIAN:
  893. decode_plane_bitstream(s, w, plane);
  894. left= left_prediction(s, p->data[plane], s->temp[0], w, 0);
  895. y = 1;
  896. /* second line is left predicted for interlaced case */
  897. if (s->interlaced) {
  898. decode_plane_bitstream(s, w, plane);
  899. left = left_prediction(s, p->data[plane] + p->linesize[plane], s->temp[0], w, left);
  900. y++;
  901. }
  902. lefttop = p->data[plane][0];
  903. decode_plane_bitstream(s, w, plane);
  904. add_median_prediction(s, p->data[plane] + fake_stride, p->data[plane], s->temp[0], w, &left, &lefttop);
  905. y++;
  906. for (; y<h; y++) {
  907. uint8_t *dst;
  908. decode_plane_bitstream(s, w, plane);
  909. dst = p->data[plane] + p->linesize[plane] * y;
  910. add_median_prediction(s, dst, dst - fake_stride, s->temp[0], w, &left, &lefttop);
  911. }
  912. break;
  913. }
  914. }
  915. draw_slice(s, p, height);
  916. } else if (s->bitstream_bpp < 24) {
  917. int y, cy;
  918. int lefty, leftu, leftv;
  919. int lefttopy, lefttopu, lefttopv;
  920. if (s->yuy2) {
  921. p->data[0][3] = get_bits(&s->gb, 8);
  922. p->data[0][2] = get_bits(&s->gb, 8);
  923. p->data[0][1] = get_bits(&s->gb, 8);
  924. p->data[0][0] = get_bits(&s->gb, 8);
  925. av_log(avctx, AV_LOG_ERROR,
  926. "YUY2 output is not implemented yet\n");
  927. return AVERROR_PATCHWELCOME;
  928. } else {
  929. leftv =
  930. p->data[2][0] = get_bits(&s->gb, 8);
  931. lefty =
  932. p->data[0][1] = get_bits(&s->gb, 8);
  933. leftu =
  934. p->data[1][0] = get_bits(&s->gb, 8);
  935. p->data[0][0] = get_bits(&s->gb, 8);
  936. switch (s->predictor) {
  937. case LEFT:
  938. case PLANE:
  939. decode_422_bitstream(s, width - 2);
  940. lefty = s->llviddsp.add_left_pred(p->data[0] + 2, s->temp[0],
  941. width - 2, lefty);
  942. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  943. leftu = s->llviddsp.add_left_pred(p->data[1] + 1, s->temp[1], width2 - 1, leftu);
  944. leftv = s->llviddsp.add_left_pred(p->data[2] + 1, s->temp[2], width2 - 1, leftv);
  945. }
  946. for (cy = y = 1; y < s->height; y++, cy++) {
  947. uint8_t *ydst, *udst, *vdst;
  948. if (s->bitstream_bpp == 12) {
  949. decode_gray_bitstream(s, width);
  950. ydst = p->data[0] + p->linesize[0] * y;
  951. lefty = s->llviddsp.add_left_pred(ydst, s->temp[0],
  952. width, lefty);
  953. if (s->predictor == PLANE) {
  954. if (y > s->interlaced)
  955. s->llviddsp.add_bytes(ydst, ydst - fake_ystride, width);
  956. }
  957. y++;
  958. if (y >= s->height)
  959. break;
  960. }
  961. draw_slice(s, p, y);
  962. ydst = p->data[0] + p->linesize[0] * y;
  963. udst = p->data[1] + p->linesize[1] * cy;
  964. vdst = p->data[2] + p->linesize[2] * cy;
  965. decode_422_bitstream(s, width);
  966. lefty = s->llviddsp.add_left_pred(ydst, s->temp[0],
  967. width, lefty);
  968. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  969. leftu = s->llviddsp.add_left_pred(udst, s->temp[1], width2, leftu);
  970. leftv = s->llviddsp.add_left_pred(vdst, s->temp[2], width2, leftv);
  971. }
  972. if (s->predictor == PLANE) {
  973. if (cy > s->interlaced) {
  974. s->llviddsp.add_bytes(ydst, ydst - fake_ystride, width);
  975. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  976. s->llviddsp.add_bytes(udst, udst - fake_ustride, width2);
  977. s->llviddsp.add_bytes(vdst, vdst - fake_vstride, width2);
  978. }
  979. }
  980. }
  981. }
  982. draw_slice(s, p, height);
  983. break;
  984. case MEDIAN:
  985. /* first line except first 2 pixels is left predicted */
  986. decode_422_bitstream(s, width - 2);
  987. lefty = s->llviddsp.add_left_pred(p->data[0] + 2, s->temp[0],
  988. width - 2, lefty);
  989. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  990. leftu = s->llviddsp.add_left_pred(p->data[1] + 1, s->temp[1], width2 - 1, leftu);
  991. leftv = s->llviddsp.add_left_pred(p->data[2] + 1, s->temp[2], width2 - 1, leftv);
  992. }
  993. cy = y = 1;
  994. /* second line is left predicted for interlaced case */
  995. if (s->interlaced) {
  996. decode_422_bitstream(s, width);
  997. lefty = s->llviddsp.add_left_pred(p->data[0] + p->linesize[0],
  998. s->temp[0], width, lefty);
  999. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  1000. leftu = s->llviddsp.add_left_pred(p->data[1] + p->linesize[2], s->temp[1], width2, leftu);
  1001. leftv = s->llviddsp.add_left_pred(p->data[2] + p->linesize[1], s->temp[2], width2, leftv);
  1002. }
  1003. y++;
  1004. cy++;
  1005. }
  1006. /* next 4 pixels are left predicted too */
  1007. decode_422_bitstream(s, 4);
  1008. lefty = s->llviddsp.add_left_pred(p->data[0] + fake_ystride,
  1009. s->temp[0], 4, lefty);
  1010. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  1011. leftu = s->llviddsp.add_left_pred(p->data[1] + fake_ustride, s->temp[1], 2, leftu);
  1012. leftv = s->llviddsp.add_left_pred(p->data[2] + fake_vstride, s->temp[2], 2, leftv);
  1013. }
  1014. /* next line except the first 4 pixels is median predicted */
  1015. lefttopy = p->data[0][3];
  1016. decode_422_bitstream(s, width - 4);
  1017. s->llviddsp.add_median_pred(p->data[0] + fake_ystride + 4,
  1018. p->data[0] + 4, s->temp[0],
  1019. width - 4, &lefty, &lefttopy);
  1020. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  1021. lefttopu = p->data[1][1];
  1022. lefttopv = p->data[2][1];
  1023. s->llviddsp.add_median_pred(p->data[1] + fake_ustride + 2, p->data[1] + 2, s->temp[1], width2 - 2, &leftu, &lefttopu);
  1024. s->llviddsp.add_median_pred(p->data[2] + fake_vstride + 2, p->data[2] + 2, s->temp[2], width2 - 2, &leftv, &lefttopv);
  1025. }
  1026. y++;
  1027. cy++;
  1028. for (; y < height; y++, cy++) {
  1029. uint8_t *ydst, *udst, *vdst;
  1030. if (s->bitstream_bpp == 12) {
  1031. while (2 * cy > y) {
  1032. decode_gray_bitstream(s, width);
  1033. ydst = p->data[0] + p->linesize[0] * y;
  1034. s->llviddsp.add_median_pred(ydst, ydst - fake_ystride,
  1035. s->temp[0], width,
  1036. &lefty, &lefttopy);
  1037. y++;
  1038. }
  1039. if (y >= height)
  1040. break;
  1041. }
  1042. draw_slice(s, p, y);
  1043. decode_422_bitstream(s, width);
  1044. ydst = p->data[0] + p->linesize[0] * y;
  1045. udst = p->data[1] + p->linesize[1] * cy;
  1046. vdst = p->data[2] + p->linesize[2] * cy;
  1047. s->llviddsp.add_median_pred(ydst, ydst - fake_ystride,
  1048. s->temp[0], width,
  1049. &lefty, &lefttopy);
  1050. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  1051. s->llviddsp.add_median_pred(udst, udst - fake_ustride, s->temp[1], width2, &leftu, &lefttopu);
  1052. s->llviddsp.add_median_pred(vdst, vdst - fake_vstride, s->temp[2], width2, &leftv, &lefttopv);
  1053. }
  1054. }
  1055. draw_slice(s, p, height);
  1056. break;
  1057. }
  1058. }
  1059. } else {
  1060. int y;
  1061. uint8_t left[4];
  1062. const int last_line = (height - 1) * p->linesize[0];
  1063. if (s->bitstream_bpp == 32) {
  1064. left[A] = p->data[0][last_line + A] = get_bits(&s->gb, 8);
  1065. left[R] = p->data[0][last_line + R] = get_bits(&s->gb, 8);
  1066. left[G] = p->data[0][last_line + G] = get_bits(&s->gb, 8);
  1067. left[B] = p->data[0][last_line + B] = get_bits(&s->gb, 8);
  1068. } else {
  1069. left[R] = p->data[0][last_line + R] = get_bits(&s->gb, 8);
  1070. left[G] = p->data[0][last_line + G] = get_bits(&s->gb, 8);
  1071. left[B] = p->data[0][last_line + B] = get_bits(&s->gb, 8);
  1072. left[A] = p->data[0][last_line + A] = 255;
  1073. skip_bits(&s->gb, 8);
  1074. }
  1075. if (s->bgr32) {
  1076. switch (s->predictor) {
  1077. case LEFT:
  1078. case PLANE:
  1079. decode_bgr_bitstream(s, width - 1);
  1080. s->hdsp.add_hfyu_left_pred_bgr32(p->data[0] + last_line + 4,
  1081. s->temp[0], width - 1, left);
  1082. for (y = s->height - 2; y >= 0; y--) { // Yes it is stored upside down.
  1083. decode_bgr_bitstream(s, width);
  1084. s->hdsp.add_hfyu_left_pred_bgr32(p->data[0] + p->linesize[0] * y,
  1085. s->temp[0], width, left);
  1086. if (s->predictor == PLANE) {
  1087. if (s->bitstream_bpp != 32)
  1088. left[A] = 0;
  1089. if (y < s->height - 1 - s->interlaced) {
  1090. s->llviddsp.add_bytes(p->data[0] + p->linesize[0] * y,
  1091. p->data[0] + p->linesize[0] * y +
  1092. fake_ystride, 4 * width);
  1093. }
  1094. }
  1095. }
  1096. // just 1 large slice as this is not possible in reverse order
  1097. draw_slice(s, p, height);
  1098. break;
  1099. default:
  1100. av_log(avctx, AV_LOG_ERROR,
  1101. "prediction type not supported!\n");
  1102. }
  1103. } else {
  1104. av_log(avctx, AV_LOG_ERROR,
  1105. "BGR24 output is not implemented yet\n");
  1106. return AVERROR_PATCHWELCOME;
  1107. }
  1108. }
  1109. emms_c();
  1110. *got_frame = 1;
  1111. return (get_bits_count(&s->gb) + 31) / 32 * 4 + table_size;
  1112. }
  1113. AVCodec ff_huffyuv_decoder = {
  1114. .name = "huffyuv",
  1115. .long_name = NULL_IF_CONFIG_SMALL("Huffyuv / HuffYUV"),
  1116. .type = AVMEDIA_TYPE_VIDEO,
  1117. .id = AV_CODEC_ID_HUFFYUV,
  1118. .priv_data_size = sizeof(HYuvContext),
  1119. .init = decode_init,
  1120. .close = decode_end,
  1121. .decode = decode_frame,
  1122. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DRAW_HORIZ_BAND |
  1123. AV_CODEC_CAP_FRAME_THREADS,
  1124. .init_thread_copy = ONLY_IF_THREADS_ENABLED(decode_init_thread_copy),
  1125. };
  1126. #if CONFIG_FFVHUFF_DECODER
  1127. AVCodec ff_ffvhuff_decoder = {
  1128. .name = "ffvhuff",
  1129. .long_name = NULL_IF_CONFIG_SMALL("Huffyuv FFmpeg variant"),
  1130. .type = AVMEDIA_TYPE_VIDEO,
  1131. .id = AV_CODEC_ID_FFVHUFF,
  1132. .priv_data_size = sizeof(HYuvContext),
  1133. .init = decode_init,
  1134. .close = decode_end,
  1135. .decode = decode_frame,
  1136. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DRAW_HORIZ_BAND |
  1137. AV_CODEC_CAP_FRAME_THREADS,
  1138. .init_thread_copy = ONLY_IF_THREADS_ENABLED(decode_init_thread_copy),
  1139. };
  1140. #endif /* CONFIG_FFVHUFF_DECODER */