You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1226 lines
45KB

  1. /*
  2. * huffyuv decoder
  3. *
  4. * Copyright (c) 2002-2014 Michael Niedermayer <michaelni@gmx.at>
  5. *
  6. * see http://www.pcisys.net/~melanson/codecs/huffyuv.txt for a description of
  7. * the algorithm used
  8. *
  9. * This file is part of FFmpeg.
  10. *
  11. * FFmpeg is free software; you can redistribute it and/or
  12. * modify it under the terms of the GNU Lesser General Public
  13. * License as published by the Free Software Foundation; either
  14. * version 2.1 of the License, or (at your option) any later version.
  15. *
  16. * FFmpeg is distributed in the hope that it will be useful,
  17. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  19. * Lesser General Public License for more details.
  20. *
  21. * You should have received a copy of the GNU Lesser General Public
  22. * License along with FFmpeg; if not, write to the Free Software
  23. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  24. *
  25. * yuva, gray, 4:4:4, 4:1:1, 4:1:0 and >8 bit per sample support sponsored by NOA
  26. */
  27. /**
  28. * @file
  29. * huffyuv decoder
  30. */
  31. #define UNCHECKED_BITSTREAM_READER 1
  32. #include "avcodec.h"
  33. #include "get_bits.h"
  34. #include "huffyuv.h"
  35. #include "huffyuvdsp.h"
  36. #include "thread.h"
  37. #include "libavutil/pixdesc.h"
  38. #define classic_shift_luma_table_size 42
  39. static const unsigned char classic_shift_luma[classic_shift_luma_table_size + FF_INPUT_BUFFER_PADDING_SIZE] = {
  40. 34, 36, 35, 69, 135, 232, 9, 16, 10, 24, 11, 23, 12, 16, 13, 10,
  41. 14, 8, 15, 8, 16, 8, 17, 20, 16, 10, 207, 206, 205, 236, 11, 8,
  42. 10, 21, 9, 23, 8, 8, 199, 70, 69, 68, 0,
  43. 0,0,0,0,0,0,0,0,
  44. };
  45. #define classic_shift_chroma_table_size 59
  46. static const unsigned char classic_shift_chroma[classic_shift_chroma_table_size + FF_INPUT_BUFFER_PADDING_SIZE] = {
  47. 66, 36, 37, 38, 39, 40, 41, 75, 76, 77, 110, 239, 144, 81, 82, 83,
  48. 84, 85, 118, 183, 56, 57, 88, 89, 56, 89, 154, 57, 58, 57, 26, 141,
  49. 57, 56, 58, 57, 58, 57, 184, 119, 214, 245, 116, 83, 82, 49, 80, 79,
  50. 78, 77, 44, 75, 41, 40, 39, 38, 37, 36, 34, 0,
  51. 0,0,0,0,0,0,0,0,
  52. };
  53. static const unsigned char classic_add_luma[256] = {
  54. 3, 9, 5, 12, 10, 35, 32, 29, 27, 50, 48, 45, 44, 41, 39, 37,
  55. 73, 70, 68, 65, 64, 61, 58, 56, 53, 50, 49, 46, 44, 41, 38, 36,
  56. 68, 65, 63, 61, 58, 55, 53, 51, 48, 46, 45, 43, 41, 39, 38, 36,
  57. 35, 33, 32, 30, 29, 27, 26, 25, 48, 47, 46, 44, 43, 41, 40, 39,
  58. 37, 36, 35, 34, 32, 31, 30, 28, 27, 26, 24, 23, 22, 20, 19, 37,
  59. 35, 34, 33, 31, 30, 29, 27, 26, 24, 23, 21, 20, 18, 17, 15, 29,
  60. 27, 26, 24, 22, 21, 19, 17, 16, 14, 26, 25, 23, 21, 19, 18, 16,
  61. 15, 27, 25, 23, 21, 19, 17, 16, 14, 26, 25, 23, 21, 18, 17, 14,
  62. 12, 17, 19, 13, 4, 9, 2, 11, 1, 7, 8, 0, 16, 3, 14, 6,
  63. 12, 10, 5, 15, 18, 11, 10, 13, 15, 16, 19, 20, 22, 24, 27, 15,
  64. 18, 20, 22, 24, 26, 14, 17, 20, 22, 24, 27, 15, 18, 20, 23, 25,
  65. 28, 16, 19, 22, 25, 28, 32, 36, 21, 25, 29, 33, 38, 42, 45, 49,
  66. 28, 31, 34, 37, 40, 42, 44, 47, 49, 50, 52, 54, 56, 57, 59, 60,
  67. 62, 64, 66, 67, 69, 35, 37, 39, 40, 42, 43, 45, 47, 48, 51, 52,
  68. 54, 55, 57, 59, 60, 62, 63, 66, 67, 69, 71, 72, 38, 40, 42, 43,
  69. 46, 47, 49, 51, 26, 28, 30, 31, 33, 34, 18, 19, 11, 13, 7, 8,
  70. };
  71. static const unsigned char classic_add_chroma[256] = {
  72. 3, 1, 2, 2, 2, 2, 3, 3, 7, 5, 7, 5, 8, 6, 11, 9,
  73. 7, 13, 11, 10, 9, 8, 7, 5, 9, 7, 6, 4, 7, 5, 8, 7,
  74. 11, 8, 13, 11, 19, 15, 22, 23, 20, 33, 32, 28, 27, 29, 51, 77,
  75. 43, 45, 76, 81, 46, 82, 75, 55, 56, 144, 58, 80, 60, 74, 147, 63,
  76. 143, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
  77. 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 27, 30, 21, 22,
  78. 17, 14, 5, 6, 100, 54, 47, 50, 51, 53, 106, 107, 108, 109, 110, 111,
  79. 112, 113, 114, 115, 4, 117, 118, 92, 94, 121, 122, 3, 124, 103, 2, 1,
  80. 0, 129, 130, 131, 120, 119, 126, 125, 136, 137, 138, 139, 140, 141, 142, 134,
  81. 135, 132, 133, 104, 64, 101, 62, 57, 102, 95, 93, 59, 61, 28, 97, 96,
  82. 52, 49, 48, 29, 32, 25, 24, 46, 23, 98, 45, 44, 43, 20, 42, 41,
  83. 19, 18, 99, 40, 15, 39, 38, 16, 13, 12, 11, 37, 10, 9, 8, 36,
  84. 7, 128, 127, 105, 123, 116, 35, 34, 33, 145, 31, 79, 42, 146, 78, 26,
  85. 83, 48, 49, 50, 44, 47, 26, 31, 30, 18, 17, 19, 21, 24, 25, 13,
  86. 14, 16, 17, 18, 20, 21, 12, 14, 15, 9, 10, 6, 9, 6, 5, 8,
  87. 6, 12, 8, 10, 7, 9, 6, 4, 6, 2, 2, 3, 3, 3, 3, 2,
  88. };
  89. static int read_len_table(uint8_t *dst, GetBitContext *gb, int n)
  90. {
  91. int i, val, repeat;
  92. for (i = 0; i < n;) {
  93. repeat = get_bits(gb, 3);
  94. val = get_bits(gb, 5);
  95. if (repeat == 0)
  96. repeat = get_bits(gb, 8);
  97. if (i + repeat > n || get_bits_left(gb) < 0) {
  98. av_log(NULL, AV_LOG_ERROR, "Error reading huffman table\n");
  99. return -1;
  100. }
  101. while (repeat--)
  102. dst[i++] = val;
  103. }
  104. return 0;
  105. }
  106. static int generate_joint_tables(HYuvContext *s)
  107. {
  108. uint16_t symbols[1 << VLC_BITS];
  109. uint16_t bits[1 << VLC_BITS];
  110. uint8_t len[1 << VLC_BITS];
  111. int ret;
  112. if (s->bitstream_bpp < 24 || s->version > 2) {
  113. int p, i, y, u;
  114. for (p = 0; p < 4; p++) {
  115. int p0 = s->version > 2 ? p : 0;
  116. for (i = y = 0; y < s->vlc_n; y++) {
  117. int len0 = s->len[p0][y];
  118. int limit = VLC_BITS - len0;
  119. if (limit <= 0 || !len0)
  120. continue;
  121. if ((sign_extend(y, 8) & (s->vlc_n-1)) != y)
  122. continue;
  123. for (u = 0; u < s->vlc_n; u++) {
  124. int len1 = s->len[p][u];
  125. if (len1 > limit || !len1)
  126. continue;
  127. if ((sign_extend(u, 8) & (s->vlc_n-1)) != u)
  128. continue;
  129. av_assert0(i < (1 << VLC_BITS));
  130. len[i] = len0 + len1;
  131. bits[i] = (s->bits[p0][y] << len1) + s->bits[p][u];
  132. symbols[i] = (y << 8) + (u & 0xFF);
  133. i++;
  134. }
  135. }
  136. ff_free_vlc(&s->vlc[4 + p]);
  137. if ((ret = ff_init_vlc_sparse(&s->vlc[4 + p], VLC_BITS, i, len, 1, 1,
  138. bits, 2, 2, symbols, 2, 2, 0)) < 0)
  139. return ret;
  140. }
  141. } else {
  142. uint8_t (*map)[4] = (uint8_t(*)[4]) s->pix_bgr_map;
  143. int i, b, g, r, code;
  144. int p0 = s->decorrelate;
  145. int p1 = !s->decorrelate;
  146. /* Restrict the range to +/-16 because that's pretty much guaranteed
  147. * to cover all the combinations that fit in 11 bits total, and it
  148. * does not matter if we miss a few rare codes. */
  149. for (i = 0, g = -16; g < 16; g++) {
  150. int len0 = s->len[p0][g & 255];
  151. int limit0 = VLC_BITS - len0;
  152. if (limit0 < 2 || !len0)
  153. continue;
  154. for (b = -16; b < 16; b++) {
  155. int len1 = s->len[p1][b & 255];
  156. int limit1 = limit0 - len1;
  157. if (limit1 < 1 || !len1)
  158. continue;
  159. code = (s->bits[p0][g & 255] << len1) + s->bits[p1][b & 255];
  160. for (r = -16; r < 16; r++) {
  161. int len2 = s->len[2][r & 255];
  162. if (len2 > limit1 || !len2)
  163. continue;
  164. av_assert0(i < (1 << VLC_BITS));
  165. len[i] = len0 + len1 + len2;
  166. bits[i] = (code << len2) + s->bits[2][r & 255];
  167. if (s->decorrelate) {
  168. map[i][G] = g;
  169. map[i][B] = g + b;
  170. map[i][R] = g + r;
  171. } else {
  172. map[i][B] = g;
  173. map[i][G] = b;
  174. map[i][R] = r;
  175. }
  176. i++;
  177. }
  178. }
  179. }
  180. ff_free_vlc(&s->vlc[4]);
  181. if ((ret = init_vlc(&s->vlc[4], VLC_BITS, i, len, 1, 1, bits, 2, 2, 0)) < 0)
  182. return ret;
  183. }
  184. return 0;
  185. }
  186. static int read_huffman_tables(HYuvContext *s, const uint8_t *src, int length)
  187. {
  188. GetBitContext gb;
  189. int i;
  190. int ret;
  191. int count = 3;
  192. init_get_bits(&gb, src, length * 8);
  193. if (s->version > 2)
  194. count = 1 + s->alpha + 2*s->chroma;
  195. for (i = 0; i < count; i++) {
  196. if (read_len_table(s->len[i], &gb, s->vlc_n) < 0)
  197. return -1;
  198. if (ff_huffyuv_generate_bits_table(s->bits[i], s->len[i], s->vlc_n) < 0)
  199. return -1;
  200. ff_free_vlc(&s->vlc[i]);
  201. if ((ret = init_vlc(&s->vlc[i], VLC_BITS, s->vlc_n, s->len[i], 1, 1,
  202. s->bits[i], 4, 4, 0)) < 0)
  203. return ret;
  204. }
  205. if ((ret = generate_joint_tables(s)) < 0)
  206. return ret;
  207. return (get_bits_count(&gb) + 7) / 8;
  208. }
  209. static int read_old_huffman_tables(HYuvContext *s)
  210. {
  211. GetBitContext gb;
  212. int i;
  213. int ret;
  214. init_get_bits(&gb, classic_shift_luma,
  215. classic_shift_luma_table_size * 8);
  216. if (read_len_table(s->len[0], &gb, 256) < 0)
  217. return -1;
  218. init_get_bits(&gb, classic_shift_chroma,
  219. classic_shift_chroma_table_size * 8);
  220. if (read_len_table(s->len[1], &gb, 256) < 0)
  221. return -1;
  222. for (i = 0; i < 256; i++)
  223. s->bits[0][i] = classic_add_luma[i];
  224. for (i = 0; i < 256; i++)
  225. s->bits[1][i] = classic_add_chroma[i];
  226. if (s->bitstream_bpp >= 24) {
  227. memcpy(s->bits[1], s->bits[0], 256 * sizeof(uint32_t));
  228. memcpy(s->len[1], s->len[0], 256 * sizeof(uint8_t));
  229. }
  230. memcpy(s->bits[2], s->bits[1], 256 * sizeof(uint32_t));
  231. memcpy(s->len[2], s->len[1], 256 * sizeof(uint8_t));
  232. for (i = 0; i < 4; i++) {
  233. ff_free_vlc(&s->vlc[i]);
  234. if ((ret = init_vlc(&s->vlc[i], VLC_BITS, 256, s->len[i], 1, 1,
  235. s->bits[i], 4, 4, 0)) < 0)
  236. return ret;
  237. }
  238. if ((ret = generate_joint_tables(s)) < 0)
  239. return ret;
  240. return 0;
  241. }
  242. static av_cold int decode_init(AVCodecContext *avctx)
  243. {
  244. HYuvContext *s = avctx->priv_data;
  245. ff_huffyuvdsp_init(&s->hdsp);
  246. memset(s->vlc, 0, 4 * sizeof(VLC));
  247. s->interlaced = avctx->height > 288;
  248. s->bgr32 = 1;
  249. if (avctx->extradata_size) {
  250. if ((avctx->bits_per_coded_sample & 7) &&
  251. avctx->bits_per_coded_sample != 12)
  252. s->version = 1; // do such files exist at all?
  253. else if (avctx->extradata_size > 3 && avctx->extradata[3] == 0)
  254. s->version = 2;
  255. else
  256. s->version = 3;
  257. } else
  258. s->version = 0;
  259. s->bps = 8;
  260. s->n = 1<<s->bps;
  261. s->vlc_n = FFMIN(s->n, MAX_VLC_N);
  262. s->chroma = 1;
  263. if (s->version >= 2) {
  264. int method, interlace;
  265. if (avctx->extradata_size < 4)
  266. return -1;
  267. method = ((uint8_t *) avctx->extradata)[0];
  268. s->decorrelate = method & 64 ? 1 : 0;
  269. s->predictor = method & 63;
  270. if (s->version == 2) {
  271. s->bitstream_bpp = ((uint8_t *) avctx->extradata)[1];
  272. if (s->bitstream_bpp == 0)
  273. s->bitstream_bpp = avctx->bits_per_coded_sample & ~7;
  274. } else {
  275. s->bps = (avctx->extradata[1] >> 4) + 1;
  276. s->n = 1<<s->bps;
  277. s->vlc_n = FFMIN(s->n, MAX_VLC_N);
  278. s->chroma_h_shift = avctx->extradata[1] & 3;
  279. s->chroma_v_shift = (avctx->extradata[1] >> 2) & 3;
  280. s->yuv = !!(((uint8_t *) avctx->extradata)[2] & 1);
  281. s->chroma= !!(((uint8_t *) avctx->extradata)[2] & 3);
  282. s->alpha = !!(((uint8_t *) avctx->extradata)[2] & 4);
  283. }
  284. interlace = (((uint8_t *) avctx->extradata)[2] & 0x30) >> 4;
  285. s->interlaced = (interlace == 1) ? 1 : (interlace == 2) ? 0 : s->interlaced;
  286. s->context = ((uint8_t *) avctx->extradata)[2] & 0x40 ? 1 : 0;
  287. if (read_huffman_tables(s, ((uint8_t *) avctx->extradata) + 4,
  288. avctx->extradata_size - 4) < 0)
  289. return AVERROR_INVALIDDATA;
  290. } else {
  291. switch (avctx->bits_per_coded_sample & 7) {
  292. case 1:
  293. s->predictor = LEFT;
  294. s->decorrelate = 0;
  295. break;
  296. case 2:
  297. s->predictor = LEFT;
  298. s->decorrelate = 1;
  299. break;
  300. case 3:
  301. s->predictor = PLANE;
  302. s->decorrelate = avctx->bits_per_coded_sample >= 24;
  303. break;
  304. case 4:
  305. s->predictor = MEDIAN;
  306. s->decorrelate = 0;
  307. break;
  308. default:
  309. s->predictor = LEFT; // OLD
  310. s->decorrelate = 0;
  311. break;
  312. }
  313. s->bitstream_bpp = avctx->bits_per_coded_sample & ~7;
  314. s->context = 0;
  315. if (read_old_huffman_tables(s) < 0)
  316. return AVERROR_INVALIDDATA;
  317. }
  318. if (s->version <= 2) {
  319. switch (s->bitstream_bpp) {
  320. case 12:
  321. avctx->pix_fmt = AV_PIX_FMT_YUV420P;
  322. s->yuv = 1;
  323. break;
  324. case 16:
  325. if (s->yuy2)
  326. avctx->pix_fmt = AV_PIX_FMT_YUYV422;
  327. else
  328. avctx->pix_fmt = AV_PIX_FMT_YUV422P;
  329. s->yuv = 1;
  330. break;
  331. case 24:
  332. if (s->bgr32)
  333. avctx->pix_fmt = AV_PIX_FMT_0RGB32;
  334. else
  335. avctx->pix_fmt = AV_PIX_FMT_BGR24;
  336. break;
  337. case 32:
  338. av_assert0(s->bgr32);
  339. avctx->pix_fmt = AV_PIX_FMT_RGB32;
  340. s->alpha = 1;
  341. break;
  342. default:
  343. return AVERROR_INVALIDDATA;
  344. }
  345. av_pix_fmt_get_chroma_sub_sample(avctx->pix_fmt,
  346. &s->chroma_h_shift,
  347. &s->chroma_v_shift);
  348. } else {
  349. switch ( (s->chroma<<10) | (s->yuv<<9) | (s->alpha<<8) | ((s->bps-1)<<4) | s->chroma_h_shift | (s->chroma_v_shift<<2)) {
  350. case 0x070:
  351. avctx->pix_fmt = AV_PIX_FMT_GRAY8;
  352. break;
  353. case 0x0F0:
  354. avctx->pix_fmt = AV_PIX_FMT_GRAY16;
  355. break;
  356. case 0x170:
  357. avctx->pix_fmt = AV_PIX_FMT_GRAY8A;
  358. break;
  359. case 0x470:
  360. avctx->pix_fmt = AV_PIX_FMT_GBRP;
  361. break;
  362. case 0x480:
  363. avctx->pix_fmt = AV_PIX_FMT_GBRP9;
  364. break;
  365. case 0x490:
  366. avctx->pix_fmt = AV_PIX_FMT_GBRP10;
  367. break;
  368. case 0x4B0:
  369. avctx->pix_fmt = AV_PIX_FMT_GBRP12;
  370. break;
  371. case 0x4D0:
  372. avctx->pix_fmt = AV_PIX_FMT_GBRP14;
  373. break;
  374. case 0x4F0:
  375. avctx->pix_fmt = AV_PIX_FMT_GBRP16;
  376. break;
  377. case 0x570:
  378. avctx->pix_fmt = AV_PIX_FMT_GBRAP;
  379. break;
  380. case 0x670:
  381. avctx->pix_fmt = AV_PIX_FMT_YUV444P;
  382. break;
  383. case 0x680:
  384. avctx->pix_fmt = AV_PIX_FMT_YUV444P9;
  385. break;
  386. case 0x690:
  387. avctx->pix_fmt = AV_PIX_FMT_YUV444P10;
  388. break;
  389. case 0x6B0:
  390. avctx->pix_fmt = AV_PIX_FMT_YUV444P12;
  391. break;
  392. case 0x6D0:
  393. avctx->pix_fmt = AV_PIX_FMT_YUV444P14;
  394. break;
  395. case 0x6F0:
  396. avctx->pix_fmt = AV_PIX_FMT_YUV444P16;
  397. break;
  398. case 0x671:
  399. avctx->pix_fmt = AV_PIX_FMT_YUV422P;
  400. break;
  401. case 0x681:
  402. avctx->pix_fmt = AV_PIX_FMT_YUV422P9;
  403. break;
  404. case 0x691:
  405. avctx->pix_fmt = AV_PIX_FMT_YUV422P10;
  406. break;
  407. case 0x6B1:
  408. avctx->pix_fmt = AV_PIX_FMT_YUV422P12;
  409. break;
  410. case 0x6D1:
  411. avctx->pix_fmt = AV_PIX_FMT_YUV422P14;
  412. break;
  413. case 0x6F1:
  414. avctx->pix_fmt = AV_PIX_FMT_YUV422P16;
  415. break;
  416. case 0x672:
  417. avctx->pix_fmt = AV_PIX_FMT_YUV411P;
  418. break;
  419. case 0x674:
  420. avctx->pix_fmt = AV_PIX_FMT_YUV440P;
  421. break;
  422. case 0x675:
  423. avctx->pix_fmt = AV_PIX_FMT_YUV420P;
  424. break;
  425. case 0x685:
  426. avctx->pix_fmt = AV_PIX_FMT_YUV420P9;
  427. break;
  428. case 0x695:
  429. avctx->pix_fmt = AV_PIX_FMT_YUV420P10;
  430. break;
  431. case 0x6B5:
  432. avctx->pix_fmt = AV_PIX_FMT_YUV420P12;
  433. break;
  434. case 0x6D5:
  435. avctx->pix_fmt = AV_PIX_FMT_YUV420P14;
  436. break;
  437. case 0x6F5:
  438. avctx->pix_fmt = AV_PIX_FMT_YUV420P16;
  439. break;
  440. case 0x67A:
  441. avctx->pix_fmt = AV_PIX_FMT_YUV410P;
  442. break;
  443. case 0x770:
  444. avctx->pix_fmt = AV_PIX_FMT_YUVA444P;
  445. break;
  446. case 0x780:
  447. avctx->pix_fmt = AV_PIX_FMT_YUVA444P9;
  448. break;
  449. case 0x790:
  450. avctx->pix_fmt = AV_PIX_FMT_YUVA444P10;
  451. break;
  452. case 0x7F0:
  453. avctx->pix_fmt = AV_PIX_FMT_YUVA444P16;
  454. break;
  455. case 0x771:
  456. avctx->pix_fmt = AV_PIX_FMT_YUVA422P;
  457. break;
  458. case 0x781:
  459. avctx->pix_fmt = AV_PIX_FMT_YUVA422P9;
  460. break;
  461. case 0x791:
  462. avctx->pix_fmt = AV_PIX_FMT_YUVA422P10;
  463. break;
  464. case 0x7F1:
  465. avctx->pix_fmt = AV_PIX_FMT_YUVA422P16;
  466. break;
  467. case 0x775:
  468. avctx->pix_fmt = AV_PIX_FMT_YUVA420P;
  469. break;
  470. case 0x785:
  471. avctx->pix_fmt = AV_PIX_FMT_YUVA420P9;
  472. break;
  473. case 0x795:
  474. avctx->pix_fmt = AV_PIX_FMT_YUVA420P10;
  475. break;
  476. case 0x7F5:
  477. avctx->pix_fmt = AV_PIX_FMT_YUVA420P16;
  478. break;
  479. default:
  480. return AVERROR_INVALIDDATA;
  481. }
  482. }
  483. ff_huffyuv_common_init(avctx);
  484. if ((avctx->pix_fmt == AV_PIX_FMT_YUV422P || avctx->pix_fmt == AV_PIX_FMT_YUV420P) && avctx->width & 1) {
  485. av_log(avctx, AV_LOG_ERROR, "width must be even for this colorspace\n");
  486. return AVERROR_INVALIDDATA;
  487. }
  488. if (s->predictor == MEDIAN && avctx->pix_fmt == AV_PIX_FMT_YUV422P && avctx->width%4) {
  489. av_log(avctx, AV_LOG_ERROR, "width must be a multiple of 4 this colorspace and predictor\n");
  490. return AVERROR_INVALIDDATA;
  491. }
  492. if (ff_huffyuv_alloc_temp(s)) {
  493. ff_huffyuv_common_end(s);
  494. return AVERROR(ENOMEM);
  495. }
  496. return 0;
  497. }
  498. static av_cold int decode_init_thread_copy(AVCodecContext *avctx)
  499. {
  500. HYuvContext *s = avctx->priv_data;
  501. int i;
  502. if (ff_huffyuv_alloc_temp(s)) {
  503. ff_huffyuv_common_end(s);
  504. return AVERROR(ENOMEM);
  505. }
  506. for (i = 0; i < 8; i++)
  507. s->vlc[i].table = NULL;
  508. if (s->version >= 2) {
  509. if (read_huffman_tables(s, ((uint8_t *) avctx->extradata) + 4,
  510. avctx->extradata_size) < 0)
  511. return AVERROR_INVALIDDATA;
  512. } else {
  513. if (read_old_huffman_tables(s) < 0)
  514. return AVERROR_INVALIDDATA;
  515. }
  516. return 0;
  517. }
  518. /** Subset of GET_VLC for use in hand-roller VLC code */
  519. #define VLC_INTERN(dst, table, gb, name, bits, max_depth) \
  520. code = table[index][0]; \
  521. n = table[index][1]; \
  522. if (max_depth > 1 && n < 0) { \
  523. LAST_SKIP_BITS(name, gb, bits); \
  524. UPDATE_CACHE(name, gb); \
  525. \
  526. nb_bits = -n; \
  527. index = SHOW_UBITS(name, gb, nb_bits) + code; \
  528. code = table[index][0]; \
  529. n = table[index][1]; \
  530. if (max_depth > 2 && n < 0) { \
  531. LAST_SKIP_BITS(name, gb, nb_bits); \
  532. UPDATE_CACHE(name, gb); \
  533. \
  534. nb_bits = -n; \
  535. index = SHOW_UBITS(name, gb, nb_bits) + code; \
  536. code = table[index][0]; \
  537. n = table[index][1]; \
  538. } \
  539. } \
  540. dst = code; \
  541. LAST_SKIP_BITS(name, gb, n)
  542. #define GET_VLC_DUAL(dst0, dst1, name, gb, dtable, table1, table2, \
  543. bits, max_depth, OP) \
  544. do { \
  545. unsigned int index = SHOW_UBITS(name, gb, bits); \
  546. int code, n = dtable[index][1]; \
  547. \
  548. if (n<=0) { \
  549. int nb_bits; \
  550. VLC_INTERN(dst0, table1, gb, name, bits, max_depth); \
  551. \
  552. UPDATE_CACHE(re, gb); \
  553. index = SHOW_UBITS(name, gb, bits); \
  554. VLC_INTERN(dst1, table2, gb, name, bits, max_depth); \
  555. } else { \
  556. code = dtable[index][0]; \
  557. OP(dst0, dst1, code); \
  558. LAST_SKIP_BITS(name, gb, n); \
  559. } \
  560. } while (0)
  561. #define OP8bits(dst0, dst1, code) dst0 = code>>8; dst1 = code
  562. #define READ_2PIX(dst0, dst1, plane1) \
  563. UPDATE_CACHE(re, &s->gb); \
  564. GET_VLC_DUAL(dst0, dst1, re, &s->gb, s->vlc[4+plane1].table, \
  565. s->vlc[0].table, s->vlc[plane1].table, VLC_BITS, 3, OP8bits)
  566. static void decode_422_bitstream(HYuvContext *s, int count)
  567. {
  568. int i, icount;
  569. OPEN_READER(re, &s->gb);
  570. count /= 2;
  571. icount = get_bits_left(&s->gb) / (32 * 4);
  572. if (count >= icount) {
  573. for (i = 0; i < icount; i++) {
  574. READ_2PIX(s->temp[0][2 * i], s->temp[1][i], 1);
  575. READ_2PIX(s->temp[0][2 * i + 1], s->temp[2][i], 2);
  576. }
  577. for (; i < count && get_bits_left(&s->gb) > 0; i++) {
  578. READ_2PIX(s->temp[0][2 * i ], s->temp[1][i], 1);
  579. if (get_bits_left(&s->gb) <= 0) break;
  580. READ_2PIX(s->temp[0][2 * i + 1], s->temp[2][i], 2);
  581. }
  582. for (; i < count; i++)
  583. s->temp[0][2 * i ] = s->temp[1][i] =
  584. s->temp[0][2 * i + 1] = s->temp[2][i] = 0;
  585. } else {
  586. for (i = 0; i < count; i++) {
  587. READ_2PIX(s->temp[0][2 * i], s->temp[1][i], 1);
  588. READ_2PIX(s->temp[0][2 * i + 1], s->temp[2][i], 2);
  589. }
  590. }
  591. CLOSE_READER(re, &s->gb);
  592. }
  593. #define READ_2PIX_PLANE(dst0, dst1, plane, OP) \
  594. UPDATE_CACHE(re, &s->gb); \
  595. GET_VLC_DUAL(dst0, dst1, re, &s->gb, s->vlc[4+plane].table, \
  596. s->vlc[plane].table, s->vlc[plane].table, VLC_BITS, 3, OP)
  597. #define OP14bits(dst0, dst1, code) dst0 = code>>8; dst1 = sign_extend(code, 8)
  598. /* TODO instead of restarting the read when the code isn't in the first level
  599. * of the joint table, jump into the 2nd level of the individual table. */
  600. #define READ_2PIX_PLANE16(dst0, dst1, plane){\
  601. dst0 = get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3)<<2;\
  602. dst0 += get_bits(&s->gb, 2);\
  603. dst1 = get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3)<<2;\
  604. dst1 += get_bits(&s->gb, 2);\
  605. }
  606. static void decode_plane_bitstream(HYuvContext *s, int count, int plane)
  607. {
  608. int i;
  609. count /= 2;
  610. if (s->bps <= 8) {
  611. OPEN_READER(re, &s->gb);
  612. if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
  613. for (i = 0; i < count && get_bits_left(&s->gb) > 0; i++) {
  614. READ_2PIX_PLANE(s->temp[0][2 * i], s->temp[0][2 * i + 1], plane, OP8bits);
  615. }
  616. } else {
  617. for(i=0; i<count; i++){
  618. READ_2PIX_PLANE(s->temp[0][2 * i], s->temp[0][2 * i + 1], plane, OP8bits);
  619. }
  620. }
  621. CLOSE_READER(re, &s->gb);
  622. } else if (s->bps <= 14) {
  623. OPEN_READER(re, &s->gb);
  624. if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
  625. for (i = 0; i < count && get_bits_left(&s->gb) > 0; i++) {
  626. READ_2PIX_PLANE(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane, OP14bits);
  627. }
  628. } else {
  629. for(i=0; i<count; i++){
  630. READ_2PIX_PLANE(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane, OP14bits);
  631. }
  632. }
  633. CLOSE_READER(re, &s->gb);
  634. } else {
  635. if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
  636. for (i = 0; i < count && get_bits_left(&s->gb) > 0; i++) {
  637. READ_2PIX_PLANE16(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane);
  638. }
  639. } else {
  640. for(i=0; i<count; i++){
  641. READ_2PIX_PLANE16(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane);
  642. }
  643. }
  644. }
  645. }
  646. static void decode_gray_bitstream(HYuvContext *s, int count)
  647. {
  648. int i;
  649. OPEN_READER(re, &s->gb);
  650. count /= 2;
  651. if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
  652. for (i = 0; i < count && get_bits_left(&s->gb) > 0; i++) {
  653. READ_2PIX(s->temp[0][2 * i], s->temp[0][2 * i + 1], 0);
  654. }
  655. } else {
  656. for (i = 0; i < count; i++) {
  657. READ_2PIX(s->temp[0][2 * i], s->temp[0][2 * i + 1], 0);
  658. }
  659. }
  660. CLOSE_READER(re, &s->gb);
  661. }
  662. static av_always_inline void decode_bgr_1(HYuvContext *s, int count,
  663. int decorrelate, int alpha)
  664. {
  665. int i;
  666. OPEN_READER(re, &s->gb);
  667. for (i = 0; i < count && get_bits_left(&s->gb) > 0; i++) {
  668. unsigned int index;
  669. int code, n;
  670. UPDATE_CACHE(re, &s->gb);
  671. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  672. n = s->vlc[4].table[index][1];
  673. if (n>0) {
  674. code = s->vlc[4].table[index][0];
  675. *(uint32_t *) &s->temp[0][4 * i] = s->pix_bgr_map[code];
  676. LAST_SKIP_BITS(re, &s->gb, n);
  677. } else {
  678. int nb_bits;
  679. if (decorrelate) {
  680. VLC_INTERN(s->temp[0][4 * i + G], s->vlc[1].table,
  681. &s->gb, re, VLC_BITS, 3);
  682. UPDATE_CACHE(re, &s->gb);
  683. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  684. VLC_INTERN(code, s->vlc[0].table, &s->gb, re, VLC_BITS, 3);
  685. s->temp[0][4 * i + B] = code + s->temp[0][4 * i + G];
  686. UPDATE_CACHE(re, &s->gb);
  687. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  688. VLC_INTERN(code, s->vlc[2].table, &s->gb, re, VLC_BITS, 3);
  689. s->temp[0][4 * i + R] = code + s->temp[0][4 * i + G];
  690. } else {
  691. VLC_INTERN(s->temp[0][4 * i + B], s->vlc[0].table,
  692. &s->gb, re, VLC_BITS, 3);
  693. UPDATE_CACHE(re, &s->gb);
  694. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  695. VLC_INTERN(s->temp[0][4 * i + G], s->vlc[1].table,
  696. &s->gb, re, VLC_BITS, 3);
  697. UPDATE_CACHE(re, &s->gb);
  698. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  699. VLC_INTERN(s->temp[0][4 * i + R], s->vlc[2].table,
  700. &s->gb, re, VLC_BITS, 3);
  701. }
  702. if (alpha) {
  703. UPDATE_CACHE(re, &s->gb);
  704. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  705. VLC_INTERN(s->temp[0][4 * i + A], s->vlc[2].table,
  706. &s->gb, re, VLC_BITS, 3);
  707. } else
  708. s->temp[0][4 * i + A] = 0;
  709. }
  710. }
  711. CLOSE_READER(re, &s->gb);
  712. }
  713. static void decode_bgr_bitstream(HYuvContext *s, int count)
  714. {
  715. if (s->decorrelate) {
  716. if (s->bitstream_bpp == 24)
  717. decode_bgr_1(s, count, 1, 0);
  718. else
  719. decode_bgr_1(s, count, 1, 1);
  720. } else {
  721. if (s->bitstream_bpp == 24)
  722. decode_bgr_1(s, count, 0, 0);
  723. else
  724. decode_bgr_1(s, count, 0, 1);
  725. }
  726. }
  727. static void draw_slice(HYuvContext *s, AVFrame *frame, int y)
  728. {
  729. int h, cy, i;
  730. int offset[AV_NUM_DATA_POINTERS];
  731. if (s->avctx->draw_horiz_band == NULL)
  732. return;
  733. h = y - s->last_slice_end;
  734. y -= h;
  735. if (s->bitstream_bpp == 12)
  736. cy = y >> 1;
  737. else
  738. cy = y;
  739. offset[0] = frame->linesize[0] * y;
  740. offset[1] = frame->linesize[1] * cy;
  741. offset[2] = frame->linesize[2] * cy;
  742. for (i = 3; i < AV_NUM_DATA_POINTERS; i++)
  743. offset[i] = 0;
  744. emms_c();
  745. s->avctx->draw_horiz_band(s->avctx, frame, offset, y, 3, h);
  746. s->last_slice_end = y + h;
  747. }
  748. static int left_prediction(HYuvContext *s, uint8_t *dst, const uint8_t *src, int w, int acc)
  749. {
  750. if (s->bps <= 8) {
  751. return s->hdsp.add_hfyu_left_pred(dst, src, w, acc);
  752. } else {
  753. return s->llviddsp.add_hfyu_left_pred_int16(( uint16_t *)dst, (const uint16_t *)src, s->n-1, w, acc);
  754. }
  755. }
  756. static void add_bytes(HYuvContext *s, uint8_t *dst, uint8_t *src, int w)
  757. {
  758. if (s->bps <= 8) {
  759. s->hdsp.add_bytes(dst, src, w);
  760. } else {
  761. s->llviddsp.add_int16((uint16_t*)dst, (const uint16_t*)src, s->n - 1, w);
  762. }
  763. }
  764. static void add_median_prediction(HYuvContext *s, uint8_t *dst, const uint8_t *src, const uint8_t *diff, int w, int *left, int *left_top)
  765. {
  766. if (s->bps <= 8) {
  767. s->hdsp.add_hfyu_median_pred(dst, src, diff, w, left, left_top);
  768. } else {
  769. s->llviddsp.add_hfyu_median_pred_int16((uint16_t *)dst, (const uint16_t *)src, (const uint16_t *)diff, s->n-1, w, left, left_top);
  770. }
  771. }
  772. static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame,
  773. AVPacket *avpkt)
  774. {
  775. const uint8_t *buf = avpkt->data;
  776. int buf_size = avpkt->size;
  777. HYuvContext *s = avctx->priv_data;
  778. const int width = s->width;
  779. const int width2 = s->width >> 1;
  780. const int height = s->height;
  781. int fake_ystride, fake_ustride, fake_vstride;
  782. ThreadFrame frame = { .f = data };
  783. AVFrame *const p = data;
  784. int table_size = 0, ret;
  785. av_fast_padded_malloc(&s->bitstream_buffer,
  786. &s->bitstream_buffer_size,
  787. buf_size);
  788. if (!s->bitstream_buffer)
  789. return AVERROR(ENOMEM);
  790. s->bdsp.bswap_buf((uint32_t *) s->bitstream_buffer,
  791. (const uint32_t *) buf, buf_size / 4);
  792. if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
  793. return ret;
  794. if (s->context) {
  795. table_size = read_huffman_tables(s, s->bitstream_buffer, buf_size);
  796. if (table_size < 0)
  797. return AVERROR_INVALIDDATA;
  798. }
  799. if ((unsigned) (buf_size - table_size) >= INT_MAX / 8)
  800. return AVERROR_INVALIDDATA;
  801. init_get_bits(&s->gb, s->bitstream_buffer + table_size,
  802. (buf_size - table_size) * 8);
  803. fake_ystride = s->interlaced ? p->linesize[0] * 2 : p->linesize[0];
  804. fake_ustride = s->interlaced ? p->linesize[1] * 2 : p->linesize[1];
  805. fake_vstride = s->interlaced ? p->linesize[2] * 2 : p->linesize[2];
  806. s->last_slice_end = 0;
  807. if (s->version > 2) {
  808. int plane;
  809. for(plane = 0; plane < 1 + 2*s->chroma + s->alpha; plane++) {
  810. int left, lefttop, y;
  811. int w = width;
  812. int h = height;
  813. int fake_stride = fake_ystride;
  814. if (s->chroma && (plane == 1 || plane == 2)) {
  815. w >>= s->chroma_h_shift;
  816. h >>= s->chroma_v_shift;
  817. fake_stride = plane == 1 ? fake_ustride : fake_vstride;
  818. }
  819. switch (s->predictor) {
  820. case LEFT:
  821. case PLANE:
  822. decode_plane_bitstream(s, w, plane);
  823. left = left_prediction(s, p->data[plane], s->temp[0], w, 0);
  824. for (y = 1; y < h; y++) {
  825. uint8_t *dst = p->data[plane] + p->linesize[plane]*y;
  826. decode_plane_bitstream(s, w, plane);
  827. left = left_prediction(s, dst, s->temp[0], w, left);
  828. if (s->predictor == PLANE) {
  829. if (y > s->interlaced) {
  830. add_bytes(s, dst, dst - fake_stride, w);
  831. }
  832. }
  833. }
  834. break;
  835. case MEDIAN:
  836. decode_plane_bitstream(s, w, plane);
  837. left= left_prediction(s, p->data[plane], s->temp[0], w, 0);
  838. y = 1;
  839. /* second line is left predicted for interlaced case */
  840. if (s->interlaced) {
  841. decode_plane_bitstream(s, w, plane);
  842. left = left_prediction(s, p->data[plane] + p->linesize[plane], s->temp[0], w, left);
  843. y++;
  844. }
  845. lefttop = p->data[plane][0];
  846. decode_plane_bitstream(s, w, plane);
  847. add_median_prediction(s, p->data[plane] + fake_stride, p->data[plane], s->temp[0], w, &left, &lefttop);
  848. y++;
  849. for (; y<h; y++) {
  850. uint8_t *dst;
  851. decode_plane_bitstream(s, w, plane);
  852. dst = p->data[plane] + p->linesize[plane] * y;
  853. add_median_prediction(s, dst, dst - fake_stride, s->temp[0], w, &left, &lefttop);
  854. }
  855. break;
  856. }
  857. }
  858. draw_slice(s, p, height);
  859. } else if (s->bitstream_bpp < 24) {
  860. int y, cy;
  861. int lefty, leftu, leftv;
  862. int lefttopy, lefttopu, lefttopv;
  863. if (s->yuy2) {
  864. p->data[0][3] = get_bits(&s->gb, 8);
  865. p->data[0][2] = get_bits(&s->gb, 8);
  866. p->data[0][1] = get_bits(&s->gb, 8);
  867. p->data[0][0] = get_bits(&s->gb, 8);
  868. av_log(avctx, AV_LOG_ERROR,
  869. "YUY2 output is not implemented yet\n");
  870. return AVERROR_PATCHWELCOME;
  871. } else {
  872. leftv =
  873. p->data[2][0] = get_bits(&s->gb, 8);
  874. lefty =
  875. p->data[0][1] = get_bits(&s->gb, 8);
  876. leftu =
  877. p->data[1][0] = get_bits(&s->gb, 8);
  878. p->data[0][0] = get_bits(&s->gb, 8);
  879. switch (s->predictor) {
  880. case LEFT:
  881. case PLANE:
  882. decode_422_bitstream(s, width - 2);
  883. lefty = s->hdsp.add_hfyu_left_pred(p->data[0] + 2, s->temp[0],
  884. width - 2, lefty);
  885. if (!(s->flags & CODEC_FLAG_GRAY)) {
  886. leftu = s->hdsp.add_hfyu_left_pred(p->data[1] + 1, s->temp[1], width2 - 1, leftu);
  887. leftv = s->hdsp.add_hfyu_left_pred(p->data[2] + 1, s->temp[2], width2 - 1, leftv);
  888. }
  889. for (cy = y = 1; y < s->height; y++, cy++) {
  890. uint8_t *ydst, *udst, *vdst;
  891. if (s->bitstream_bpp == 12) {
  892. decode_gray_bitstream(s, width);
  893. ydst = p->data[0] + p->linesize[0] * y;
  894. lefty = s->hdsp.add_hfyu_left_pred(ydst, s->temp[0],
  895. width, lefty);
  896. if (s->predictor == PLANE) {
  897. if (y > s->interlaced)
  898. s->hdsp.add_bytes(ydst, ydst - fake_ystride, width);
  899. }
  900. y++;
  901. if (y >= s->height)
  902. break;
  903. }
  904. draw_slice(s, p, y);
  905. ydst = p->data[0] + p->linesize[0] * y;
  906. udst = p->data[1] + p->linesize[1] * cy;
  907. vdst = p->data[2] + p->linesize[2] * cy;
  908. decode_422_bitstream(s, width);
  909. lefty = s->hdsp.add_hfyu_left_pred(ydst, s->temp[0],
  910. width, lefty);
  911. if (!(s->flags & CODEC_FLAG_GRAY)) {
  912. leftu = s->hdsp.add_hfyu_left_pred(udst, s->temp[1], width2, leftu);
  913. leftv = s->hdsp.add_hfyu_left_pred(vdst, s->temp[2], width2, leftv);
  914. }
  915. if (s->predictor == PLANE) {
  916. if (cy > s->interlaced) {
  917. s->hdsp.add_bytes(ydst, ydst - fake_ystride, width);
  918. if (!(s->flags & CODEC_FLAG_GRAY)) {
  919. s->hdsp.add_bytes(udst, udst - fake_ustride, width2);
  920. s->hdsp.add_bytes(vdst, vdst - fake_vstride, width2);
  921. }
  922. }
  923. }
  924. }
  925. draw_slice(s, p, height);
  926. break;
  927. case MEDIAN:
  928. /* first line except first 2 pixels is left predicted */
  929. decode_422_bitstream(s, width - 2);
  930. lefty = s->hdsp.add_hfyu_left_pred(p->data[0] + 2, s->temp[0],
  931. width - 2, lefty);
  932. if (!(s->flags & CODEC_FLAG_GRAY)) {
  933. leftu = s->hdsp.add_hfyu_left_pred(p->data[1] + 1, s->temp[1], width2 - 1, leftu);
  934. leftv = s->hdsp.add_hfyu_left_pred(p->data[2] + 1, s->temp[2], width2 - 1, leftv);
  935. }
  936. cy = y = 1;
  937. /* second line is left predicted for interlaced case */
  938. if (s->interlaced) {
  939. decode_422_bitstream(s, width);
  940. lefty = s->hdsp.add_hfyu_left_pred(p->data[0] + p->linesize[0],
  941. s->temp[0], width, lefty);
  942. if (!(s->flags & CODEC_FLAG_GRAY)) {
  943. leftu = s->hdsp.add_hfyu_left_pred(p->data[1] + p->linesize[2], s->temp[1], width2, leftu);
  944. leftv = s->hdsp.add_hfyu_left_pred(p->data[2] + p->linesize[1], s->temp[2], width2, leftv);
  945. }
  946. y++;
  947. cy++;
  948. }
  949. /* next 4 pixels are left predicted too */
  950. decode_422_bitstream(s, 4);
  951. lefty = s->hdsp.add_hfyu_left_pred(p->data[0] + fake_ystride,
  952. s->temp[0], 4, lefty);
  953. if (!(s->flags & CODEC_FLAG_GRAY)) {
  954. leftu = s->hdsp.add_hfyu_left_pred(p->data[1] + fake_ustride, s->temp[1], 2, leftu);
  955. leftv = s->hdsp.add_hfyu_left_pred(p->data[2] + fake_vstride, s->temp[2], 2, leftv);
  956. }
  957. /* next line except the first 4 pixels is median predicted */
  958. lefttopy = p->data[0][3];
  959. decode_422_bitstream(s, width - 4);
  960. s->hdsp.add_hfyu_median_pred(p->data[0] + fake_ystride + 4,
  961. p->data[0] + 4, s->temp[0],
  962. width - 4, &lefty, &lefttopy);
  963. if (!(s->flags & CODEC_FLAG_GRAY)) {
  964. lefttopu = p->data[1][1];
  965. lefttopv = p->data[2][1];
  966. s->hdsp.add_hfyu_median_pred(p->data[1] + fake_ustride + 2, p->data[1] + 2, s->temp[1], width2 - 2, &leftu, &lefttopu);
  967. s->hdsp.add_hfyu_median_pred(p->data[2] + fake_vstride + 2, p->data[2] + 2, s->temp[2], width2 - 2, &leftv, &lefttopv);
  968. }
  969. y++;
  970. cy++;
  971. for (; y < height; y++, cy++) {
  972. uint8_t *ydst, *udst, *vdst;
  973. if (s->bitstream_bpp == 12) {
  974. while (2 * cy > y) {
  975. decode_gray_bitstream(s, width);
  976. ydst = p->data[0] + p->linesize[0] * y;
  977. s->hdsp.add_hfyu_median_pred(ydst, ydst - fake_ystride,
  978. s->temp[0], width,
  979. &lefty, &lefttopy);
  980. y++;
  981. }
  982. if (y >= height)
  983. break;
  984. }
  985. draw_slice(s, p, y);
  986. decode_422_bitstream(s, width);
  987. ydst = p->data[0] + p->linesize[0] * y;
  988. udst = p->data[1] + p->linesize[1] * cy;
  989. vdst = p->data[2] + p->linesize[2] * cy;
  990. s->hdsp.add_hfyu_median_pred(ydst, ydst - fake_ystride,
  991. s->temp[0], width,
  992. &lefty, &lefttopy);
  993. if (!(s->flags & CODEC_FLAG_GRAY)) {
  994. s->hdsp.add_hfyu_median_pred(udst, udst - fake_ustride, s->temp[1], width2, &leftu, &lefttopu);
  995. s->hdsp.add_hfyu_median_pred(vdst, vdst - fake_vstride, s->temp[2], width2, &leftv, &lefttopv);
  996. }
  997. }
  998. draw_slice(s, p, height);
  999. break;
  1000. }
  1001. }
  1002. } else {
  1003. int y;
  1004. uint8_t left[4];
  1005. const int last_line = (height - 1) * p->linesize[0];
  1006. if (s->bitstream_bpp == 32) {
  1007. left[A] = p->data[0][last_line + A] = get_bits(&s->gb, 8);
  1008. left[R] = p->data[0][last_line + R] = get_bits(&s->gb, 8);
  1009. left[G] = p->data[0][last_line + G] = get_bits(&s->gb, 8);
  1010. left[B] = p->data[0][last_line + B] = get_bits(&s->gb, 8);
  1011. } else {
  1012. left[R] = p->data[0][last_line + R] = get_bits(&s->gb, 8);
  1013. left[G] = p->data[0][last_line + G] = get_bits(&s->gb, 8);
  1014. left[B] = p->data[0][last_line + B] = get_bits(&s->gb, 8);
  1015. left[A] = p->data[0][last_line + A] = 255;
  1016. skip_bits(&s->gb, 8);
  1017. }
  1018. if (s->bgr32) {
  1019. switch (s->predictor) {
  1020. case LEFT:
  1021. case PLANE:
  1022. decode_bgr_bitstream(s, width - 1);
  1023. s->hdsp.add_hfyu_left_pred_bgr32(p->data[0] + last_line + 4,
  1024. s->temp[0], width - 1, left);
  1025. for (y = s->height - 2; y >= 0; y--) { // Yes it is stored upside down.
  1026. decode_bgr_bitstream(s, width);
  1027. s->hdsp.add_hfyu_left_pred_bgr32(p->data[0] + p->linesize[0] * y,
  1028. s->temp[0], width, left);
  1029. if (s->predictor == PLANE) {
  1030. if (s->bitstream_bpp != 32)
  1031. left[A] = 0;
  1032. if ((y & s->interlaced) == 0 &&
  1033. y < s->height - 1 - s->interlaced) {
  1034. s->hdsp.add_bytes(p->data[0] + p->linesize[0] * y,
  1035. p->data[0] + p->linesize[0] * y +
  1036. fake_ystride, fake_ystride);
  1037. }
  1038. }
  1039. }
  1040. // just 1 large slice as this is not possible in reverse order
  1041. draw_slice(s, p, height);
  1042. break;
  1043. default:
  1044. av_log(avctx, AV_LOG_ERROR,
  1045. "prediction type not supported!\n");
  1046. }
  1047. } else {
  1048. av_log(avctx, AV_LOG_ERROR,
  1049. "BGR24 output is not implemented yet\n");
  1050. return AVERROR_PATCHWELCOME;
  1051. }
  1052. }
  1053. emms_c();
  1054. *got_frame = 1;
  1055. return (get_bits_count(&s->gb) + 31) / 32 * 4 + table_size;
  1056. }
  1057. static av_cold int decode_end(AVCodecContext *avctx)
  1058. {
  1059. HYuvContext *s = avctx->priv_data;
  1060. int i;
  1061. ff_huffyuv_common_end(s);
  1062. av_freep(&s->bitstream_buffer);
  1063. for (i = 0; i < 8; i++)
  1064. ff_free_vlc(&s->vlc[i]);
  1065. return 0;
  1066. }
  1067. AVCodec ff_huffyuv_decoder = {
  1068. .name = "huffyuv",
  1069. .long_name = NULL_IF_CONFIG_SMALL("Huffyuv / HuffYUV"),
  1070. .type = AVMEDIA_TYPE_VIDEO,
  1071. .id = AV_CODEC_ID_HUFFYUV,
  1072. .priv_data_size = sizeof(HYuvContext),
  1073. .init = decode_init,
  1074. .close = decode_end,
  1075. .decode = decode_frame,
  1076. .capabilities = CODEC_CAP_DR1 | CODEC_CAP_DRAW_HORIZ_BAND |
  1077. CODEC_CAP_FRAME_THREADS,
  1078. .init_thread_copy = ONLY_IF_THREADS_ENABLED(decode_init_thread_copy),
  1079. };
  1080. #if CONFIG_FFVHUFF_DECODER
  1081. AVCodec ff_ffvhuff_decoder = {
  1082. .name = "ffvhuff",
  1083. .long_name = NULL_IF_CONFIG_SMALL("Huffyuv FFmpeg variant"),
  1084. .type = AVMEDIA_TYPE_VIDEO,
  1085. .id = AV_CODEC_ID_FFVHUFF,
  1086. .priv_data_size = sizeof(HYuvContext),
  1087. .init = decode_init,
  1088. .close = decode_end,
  1089. .decode = decode_frame,
  1090. .capabilities = CODEC_CAP_DR1 | CODEC_CAP_DRAW_HORIZ_BAND |
  1091. CODEC_CAP_FRAME_THREADS,
  1092. .init_thread_copy = ONLY_IF_THREADS_ENABLED(decode_init_thread_copy),
  1093. };
  1094. #endif /* CONFIG_FFVHUFF_DECODER */