You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1315 lines
48KB

  1. /*
  2. * huffyuv decoder
  3. *
  4. * Copyright (c) 2002-2014 Michael Niedermayer <michaelni@gmx.at>
  5. *
  6. * see http://www.pcisys.net/~melanson/codecs/huffyuv.txt for a description of
  7. * the algorithm used
  8. *
  9. * This file is part of FFmpeg.
  10. *
  11. * FFmpeg is free software; you can redistribute it and/or
  12. * modify it under the terms of the GNU Lesser General Public
  13. * License as published by the Free Software Foundation; either
  14. * version 2.1 of the License, or (at your option) any later version.
  15. *
  16. * FFmpeg is distributed in the hope that it will be useful,
  17. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  18. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  19. * Lesser General Public License for more details.
  20. *
  21. * You should have received a copy of the GNU Lesser General Public
  22. * License along with FFmpeg; if not, write to the Free Software
  23. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  24. *
  25. * yuva, gray, 4:4:4, 4:1:1, 4:1:0 and >8 bit per sample support sponsored by NOA
  26. */
  27. /**
  28. * @file
  29. * huffyuv decoder
  30. */
  31. #define UNCHECKED_BITSTREAM_READER 1
  32. #include "avcodec.h"
  33. #include "get_bits.h"
  34. #include "huffyuv.h"
  35. #include "huffyuvdsp.h"
  36. #include "lossless_videodsp.h"
  37. #include "thread.h"
  38. #include "libavutil/imgutils.h"
  39. #include "libavutil/pixdesc.h"
  40. #define classic_shift_luma_table_size 42
  41. static const unsigned char classic_shift_luma[classic_shift_luma_table_size + AV_INPUT_BUFFER_PADDING_SIZE] = {
  42. 34, 36, 35, 69, 135, 232, 9, 16, 10, 24, 11, 23, 12, 16, 13, 10,
  43. 14, 8, 15, 8, 16, 8, 17, 20, 16, 10, 207, 206, 205, 236, 11, 8,
  44. 10, 21, 9, 23, 8, 8, 199, 70, 69, 68, 0,
  45. 0,0,0,0,0,0,0,0,
  46. };
  47. #define classic_shift_chroma_table_size 59
  48. static const unsigned char classic_shift_chroma[classic_shift_chroma_table_size + AV_INPUT_BUFFER_PADDING_SIZE] = {
  49. 66, 36, 37, 38, 39, 40, 41, 75, 76, 77, 110, 239, 144, 81, 82, 83,
  50. 84, 85, 118, 183, 56, 57, 88, 89, 56, 89, 154, 57, 58, 57, 26, 141,
  51. 57, 56, 58, 57, 58, 57, 184, 119, 214, 245, 116, 83, 82, 49, 80, 79,
  52. 78, 77, 44, 75, 41, 40, 39, 38, 37, 36, 34, 0,
  53. 0,0,0,0,0,0,0,0,
  54. };
  55. static const unsigned char classic_add_luma[256] = {
  56. 3, 9, 5, 12, 10, 35, 32, 29, 27, 50, 48, 45, 44, 41, 39, 37,
  57. 73, 70, 68, 65, 64, 61, 58, 56, 53, 50, 49, 46, 44, 41, 38, 36,
  58. 68, 65, 63, 61, 58, 55, 53, 51, 48, 46, 45, 43, 41, 39, 38, 36,
  59. 35, 33, 32, 30, 29, 27, 26, 25, 48, 47, 46, 44, 43, 41, 40, 39,
  60. 37, 36, 35, 34, 32, 31, 30, 28, 27, 26, 24, 23, 22, 20, 19, 37,
  61. 35, 34, 33, 31, 30, 29, 27, 26, 24, 23, 21, 20, 18, 17, 15, 29,
  62. 27, 26, 24, 22, 21, 19, 17, 16, 14, 26, 25, 23, 21, 19, 18, 16,
  63. 15, 27, 25, 23, 21, 19, 17, 16, 14, 26, 25, 23, 21, 18, 17, 14,
  64. 12, 17, 19, 13, 4, 9, 2, 11, 1, 7, 8, 0, 16, 3, 14, 6,
  65. 12, 10, 5, 15, 18, 11, 10, 13, 15, 16, 19, 20, 22, 24, 27, 15,
  66. 18, 20, 22, 24, 26, 14, 17, 20, 22, 24, 27, 15, 18, 20, 23, 25,
  67. 28, 16, 19, 22, 25, 28, 32, 36, 21, 25, 29, 33, 38, 42, 45, 49,
  68. 28, 31, 34, 37, 40, 42, 44, 47, 49, 50, 52, 54, 56, 57, 59, 60,
  69. 62, 64, 66, 67, 69, 35, 37, 39, 40, 42, 43, 45, 47, 48, 51, 52,
  70. 54, 55, 57, 59, 60, 62, 63, 66, 67, 69, 71, 72, 38, 40, 42, 43,
  71. 46, 47, 49, 51, 26, 28, 30, 31, 33, 34, 18, 19, 11, 13, 7, 8,
  72. };
  73. static const unsigned char classic_add_chroma[256] = {
  74. 3, 1, 2, 2, 2, 2, 3, 3, 7, 5, 7, 5, 8, 6, 11, 9,
  75. 7, 13, 11, 10, 9, 8, 7, 5, 9, 7, 6, 4, 7, 5, 8, 7,
  76. 11, 8, 13, 11, 19, 15, 22, 23, 20, 33, 32, 28, 27, 29, 51, 77,
  77. 43, 45, 76, 81, 46, 82, 75, 55, 56, 144, 58, 80, 60, 74, 147, 63,
  78. 143, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
  79. 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 27, 30, 21, 22,
  80. 17, 14, 5, 6, 100, 54, 47, 50, 51, 53, 106, 107, 108, 109, 110, 111,
  81. 112, 113, 114, 115, 4, 117, 118, 92, 94, 121, 122, 3, 124, 103, 2, 1,
  82. 0, 129, 130, 131, 120, 119, 126, 125, 136, 137, 138, 139, 140, 141, 142, 134,
  83. 135, 132, 133, 104, 64, 101, 62, 57, 102, 95, 93, 59, 61, 28, 97, 96,
  84. 52, 49, 48, 29, 32, 25, 24, 46, 23, 98, 45, 44, 43, 20, 42, 41,
  85. 19, 18, 99, 40, 15, 39, 38, 16, 13, 12, 11, 37, 10, 9, 8, 36,
  86. 7, 128, 127, 105, 123, 116, 35, 34, 33, 145, 31, 79, 42, 146, 78, 26,
  87. 83, 48, 49, 50, 44, 47, 26, 31, 30, 18, 17, 19, 21, 24, 25, 13,
  88. 14, 16, 17, 18, 20, 21, 12, 14, 15, 9, 10, 6, 9, 6, 5, 8,
  89. 6, 12, 8, 10, 7, 9, 6, 4, 6, 2, 2, 3, 3, 3, 3, 2,
  90. };
  91. static int read_len_table(uint8_t *dst, GetBitContext *gb, int n)
  92. {
  93. int i, val, repeat;
  94. for (i = 0; i < n;) {
  95. repeat = get_bits(gb, 3);
  96. val = get_bits(gb, 5);
  97. if (repeat == 0)
  98. repeat = get_bits(gb, 8);
  99. if (i + repeat > n || get_bits_left(gb) < 0) {
  100. av_log(NULL, AV_LOG_ERROR, "Error reading huffman table\n");
  101. return AVERROR_INVALIDDATA;
  102. }
  103. while (repeat--)
  104. dst[i++] = val;
  105. }
  106. return 0;
  107. }
  108. static int generate_joint_tables(HYuvContext *s)
  109. {
  110. int ret;
  111. uint16_t *symbols = av_mallocz(5 << VLC_BITS);
  112. uint16_t *bits;
  113. uint8_t *len;
  114. if (!symbols)
  115. return AVERROR(ENOMEM);
  116. bits = symbols + (1 << VLC_BITS);
  117. len = (uint8_t *)(bits + (1 << VLC_BITS));
  118. if (s->bitstream_bpp < 24 || s->version > 2) {
  119. int p, i, y, u;
  120. for (p = 0; p < 4; p++) {
  121. int p0 = s->version > 2 ? p : 0;
  122. for (i = y = 0; y < s->vlc_n; y++) {
  123. int len0 = s->len[p0][y];
  124. int limit = VLC_BITS - len0;
  125. if (limit <= 0 || !len0)
  126. continue;
  127. if ((sign_extend(y, 8) & (s->vlc_n-1)) != y)
  128. continue;
  129. for (u = 0; u < s->vlc_n; u++) {
  130. int len1 = s->len[p][u];
  131. if (len1 > limit || !len1)
  132. continue;
  133. if ((sign_extend(u, 8) & (s->vlc_n-1)) != u)
  134. continue;
  135. av_assert0(i < (1 << VLC_BITS));
  136. len[i] = len0 + len1;
  137. bits[i] = (s->bits[p0][y] << len1) + s->bits[p][u];
  138. symbols[i] = (y << 8) + (u & 0xFF);
  139. i++;
  140. }
  141. }
  142. ff_free_vlc(&s->vlc[4 + p]);
  143. if ((ret = ff_init_vlc_sparse(&s->vlc[4 + p], VLC_BITS, i, len, 1, 1,
  144. bits, 2, 2, symbols, 2, 2, 0)) < 0)
  145. goto out;
  146. }
  147. } else {
  148. uint8_t (*map)[4] = (uint8_t(*)[4]) s->pix_bgr_map;
  149. int i, b, g, r, code;
  150. int p0 = s->decorrelate;
  151. int p1 = !s->decorrelate;
  152. /* Restrict the range to +/-16 because that's pretty much guaranteed
  153. * to cover all the combinations that fit in 11 bits total, and it
  154. * does not matter if we miss a few rare codes. */
  155. for (i = 0, g = -16; g < 16; g++) {
  156. int len0 = s->len[p0][g & 255];
  157. int limit0 = VLC_BITS - len0;
  158. if (limit0 < 2 || !len0)
  159. continue;
  160. for (b = -16; b < 16; b++) {
  161. int len1 = s->len[p1][b & 255];
  162. int limit1 = limit0 - len1;
  163. if (limit1 < 1 || !len1)
  164. continue;
  165. code = (s->bits[p0][g & 255] << len1) + s->bits[p1][b & 255];
  166. for (r = -16; r < 16; r++) {
  167. int len2 = s->len[2][r & 255];
  168. if (len2 > limit1 || !len2)
  169. continue;
  170. av_assert0(i < (1 << VLC_BITS));
  171. len[i] = len0 + len1 + len2;
  172. bits[i] = (code << len2) + s->bits[2][r & 255];
  173. if (s->decorrelate) {
  174. map[i][G] = g;
  175. map[i][B] = g + b;
  176. map[i][R] = g + r;
  177. } else {
  178. map[i][B] = g;
  179. map[i][G] = b;
  180. map[i][R] = r;
  181. }
  182. i++;
  183. }
  184. }
  185. }
  186. ff_free_vlc(&s->vlc[4]);
  187. if ((ret = init_vlc(&s->vlc[4], VLC_BITS, i, len, 1, 1,
  188. bits, 2, 2, 0)) < 0)
  189. goto out;
  190. }
  191. ret = 0;
  192. out:
  193. av_freep(&symbols);
  194. return ret;
  195. }
  196. static int read_huffman_tables(HYuvContext *s, const uint8_t *src, int length)
  197. {
  198. GetBitContext gb;
  199. int i, ret;
  200. int count = 3;
  201. if ((ret = init_get_bits(&gb, src, length * 8)) < 0)
  202. return ret;
  203. if (s->version > 2)
  204. count = 1 + s->alpha + 2*s->chroma;
  205. for (i = 0; i < count; i++) {
  206. if ((ret = read_len_table(s->len[i], &gb, s->vlc_n)) < 0)
  207. return ret;
  208. if ((ret = ff_huffyuv_generate_bits_table(s->bits[i], s->len[i], s->vlc_n)) < 0)
  209. return ret;
  210. ff_free_vlc(&s->vlc[i]);
  211. if ((ret = init_vlc(&s->vlc[i], VLC_BITS, s->vlc_n, s->len[i], 1, 1,
  212. s->bits[i], 4, 4, 0)) < 0)
  213. return ret;
  214. }
  215. if ((ret = generate_joint_tables(s)) < 0)
  216. return ret;
  217. return (get_bits_count(&gb) + 7) / 8;
  218. }
  219. static int read_old_huffman_tables(HYuvContext *s)
  220. {
  221. GetBitContext gb;
  222. int i, ret;
  223. init_get_bits(&gb, classic_shift_luma,
  224. classic_shift_luma_table_size * 8);
  225. if ((ret = read_len_table(s->len[0], &gb, 256)) < 0)
  226. return ret;
  227. init_get_bits(&gb, classic_shift_chroma,
  228. classic_shift_chroma_table_size * 8);
  229. if ((ret = read_len_table(s->len[1], &gb, 256)) < 0)
  230. return ret;
  231. for (i = 0; i < 256; i++)
  232. s->bits[0][i] = classic_add_luma[i];
  233. for (i = 0; i < 256; i++)
  234. s->bits[1][i] = classic_add_chroma[i];
  235. if (s->bitstream_bpp >= 24) {
  236. memcpy(s->bits[1], s->bits[0], 256 * sizeof(uint32_t));
  237. memcpy(s->len[1], s->len[0], 256 * sizeof(uint8_t));
  238. }
  239. memcpy(s->bits[2], s->bits[1], 256 * sizeof(uint32_t));
  240. memcpy(s->len[2], s->len[1], 256 * sizeof(uint8_t));
  241. for (i = 0; i < 4; i++) {
  242. ff_free_vlc(&s->vlc[i]);
  243. if ((ret = init_vlc(&s->vlc[i], VLC_BITS, 256, s->len[i], 1, 1,
  244. s->bits[i], 4, 4, 0)) < 0)
  245. return ret;
  246. }
  247. if ((ret = generate_joint_tables(s)) < 0)
  248. return ret;
  249. return 0;
  250. }
  251. static av_cold int decode_end(AVCodecContext *avctx)
  252. {
  253. HYuvContext *s = avctx->priv_data;
  254. int i;
  255. ff_huffyuv_common_end(s);
  256. av_freep(&s->bitstream_buffer);
  257. for (i = 0; i < 8; i++)
  258. ff_free_vlc(&s->vlc[i]);
  259. return 0;
  260. }
  261. static av_cold int decode_init(AVCodecContext *avctx)
  262. {
  263. HYuvContext *s = avctx->priv_data;
  264. int ret;
  265. ret = av_image_check_size(avctx->width, avctx->height, 0, avctx);
  266. if (ret < 0)
  267. return ret;
  268. ff_huffyuvdsp_init(&s->hdsp, avctx->pix_fmt);
  269. ff_llviddsp_init(&s->llviddsp);
  270. memset(s->vlc, 0, 4 * sizeof(VLC));
  271. s->interlaced = avctx->height > 288;
  272. s->bgr32 = 1;
  273. if (avctx->extradata_size) {
  274. if ((avctx->bits_per_coded_sample & 7) &&
  275. avctx->bits_per_coded_sample != 12)
  276. s->version = 1; // do such files exist at all?
  277. else if (avctx->extradata_size > 3 && avctx->extradata[3] == 0)
  278. s->version = 2;
  279. else
  280. s->version = 3;
  281. } else
  282. s->version = 0;
  283. s->bps = 8;
  284. s->n = 1<<s->bps;
  285. s->vlc_n = FFMIN(s->n, MAX_VLC_N);
  286. s->chroma = 1;
  287. if (s->version >= 2) {
  288. int method, interlace;
  289. if (avctx->extradata_size < 4)
  290. return AVERROR_INVALIDDATA;
  291. method = avctx->extradata[0];
  292. s->decorrelate = method & 64 ? 1 : 0;
  293. s->predictor = method & 63;
  294. if (s->version == 2) {
  295. s->bitstream_bpp = avctx->extradata[1];
  296. if (s->bitstream_bpp == 0)
  297. s->bitstream_bpp = avctx->bits_per_coded_sample & ~7;
  298. } else {
  299. s->bps = (avctx->extradata[1] >> 4) + 1;
  300. s->n = 1<<s->bps;
  301. s->vlc_n = FFMIN(s->n, MAX_VLC_N);
  302. s->chroma_h_shift = avctx->extradata[1] & 3;
  303. s->chroma_v_shift = (avctx->extradata[1] >> 2) & 3;
  304. s->yuv = !!(avctx->extradata[2] & 1);
  305. s->chroma= !!(avctx->extradata[2] & 3);
  306. s->alpha = !!(avctx->extradata[2] & 4);
  307. }
  308. interlace = (avctx->extradata[2] & 0x30) >> 4;
  309. s->interlaced = (interlace == 1) ? 1 : (interlace == 2) ? 0 : s->interlaced;
  310. s->context = avctx->extradata[2] & 0x40 ? 1 : 0;
  311. if ((ret = read_huffman_tables(s, avctx->extradata + 4,
  312. avctx->extradata_size - 4)) < 0)
  313. goto error;
  314. } else {
  315. switch (avctx->bits_per_coded_sample & 7) {
  316. case 1:
  317. s->predictor = LEFT;
  318. s->decorrelate = 0;
  319. break;
  320. case 2:
  321. s->predictor = LEFT;
  322. s->decorrelate = 1;
  323. break;
  324. case 3:
  325. s->predictor = PLANE;
  326. s->decorrelate = avctx->bits_per_coded_sample >= 24;
  327. break;
  328. case 4:
  329. s->predictor = MEDIAN;
  330. s->decorrelate = 0;
  331. break;
  332. default:
  333. s->predictor = LEFT; // OLD
  334. s->decorrelate = 0;
  335. break;
  336. }
  337. s->bitstream_bpp = avctx->bits_per_coded_sample & ~7;
  338. s->context = 0;
  339. if ((ret = read_old_huffman_tables(s)) < 0)
  340. goto error;
  341. }
  342. if (s->version <= 2) {
  343. switch (s->bitstream_bpp) {
  344. case 12:
  345. avctx->pix_fmt = AV_PIX_FMT_YUV420P;
  346. s->yuv = 1;
  347. break;
  348. case 16:
  349. if (s->yuy2)
  350. avctx->pix_fmt = AV_PIX_FMT_YUYV422;
  351. else
  352. avctx->pix_fmt = AV_PIX_FMT_YUV422P;
  353. s->yuv = 1;
  354. break;
  355. case 24:
  356. if (s->bgr32)
  357. avctx->pix_fmt = AV_PIX_FMT_0RGB32;
  358. else
  359. avctx->pix_fmt = AV_PIX_FMT_BGR24;
  360. break;
  361. case 32:
  362. av_assert0(s->bgr32);
  363. avctx->pix_fmt = AV_PIX_FMT_RGB32;
  364. s->alpha = 1;
  365. break;
  366. default:
  367. ret = AVERROR_INVALIDDATA;
  368. goto error;
  369. }
  370. av_pix_fmt_get_chroma_sub_sample(avctx->pix_fmt,
  371. &s->chroma_h_shift,
  372. &s->chroma_v_shift);
  373. } else {
  374. switch ( (s->chroma<<10) | (s->yuv<<9) | (s->alpha<<8) | ((s->bps-1)<<4) | s->chroma_h_shift | (s->chroma_v_shift<<2)) {
  375. case 0x070:
  376. avctx->pix_fmt = AV_PIX_FMT_GRAY8;
  377. break;
  378. case 0x0F0:
  379. avctx->pix_fmt = AV_PIX_FMT_GRAY16;
  380. break;
  381. case 0x470:
  382. avctx->pix_fmt = AV_PIX_FMT_GBRP;
  383. break;
  384. case 0x480:
  385. avctx->pix_fmt = AV_PIX_FMT_GBRP9;
  386. break;
  387. case 0x490:
  388. avctx->pix_fmt = AV_PIX_FMT_GBRP10;
  389. break;
  390. case 0x4B0:
  391. avctx->pix_fmt = AV_PIX_FMT_GBRP12;
  392. break;
  393. case 0x4D0:
  394. avctx->pix_fmt = AV_PIX_FMT_GBRP14;
  395. break;
  396. case 0x4F0:
  397. avctx->pix_fmt = AV_PIX_FMT_GBRP16;
  398. break;
  399. case 0x570:
  400. avctx->pix_fmt = AV_PIX_FMT_GBRAP;
  401. break;
  402. case 0x670:
  403. avctx->pix_fmt = AV_PIX_FMT_YUV444P;
  404. break;
  405. case 0x680:
  406. avctx->pix_fmt = AV_PIX_FMT_YUV444P9;
  407. break;
  408. case 0x690:
  409. avctx->pix_fmt = AV_PIX_FMT_YUV444P10;
  410. break;
  411. case 0x6B0:
  412. avctx->pix_fmt = AV_PIX_FMT_YUV444P12;
  413. break;
  414. case 0x6D0:
  415. avctx->pix_fmt = AV_PIX_FMT_YUV444P14;
  416. break;
  417. case 0x6F0:
  418. avctx->pix_fmt = AV_PIX_FMT_YUV444P16;
  419. break;
  420. case 0x671:
  421. avctx->pix_fmt = AV_PIX_FMT_YUV422P;
  422. break;
  423. case 0x681:
  424. avctx->pix_fmt = AV_PIX_FMT_YUV422P9;
  425. break;
  426. case 0x691:
  427. avctx->pix_fmt = AV_PIX_FMT_YUV422P10;
  428. break;
  429. case 0x6B1:
  430. avctx->pix_fmt = AV_PIX_FMT_YUV422P12;
  431. break;
  432. case 0x6D1:
  433. avctx->pix_fmt = AV_PIX_FMT_YUV422P14;
  434. break;
  435. case 0x6F1:
  436. avctx->pix_fmt = AV_PIX_FMT_YUV422P16;
  437. break;
  438. case 0x672:
  439. avctx->pix_fmt = AV_PIX_FMT_YUV411P;
  440. break;
  441. case 0x674:
  442. avctx->pix_fmt = AV_PIX_FMT_YUV440P;
  443. break;
  444. case 0x675:
  445. avctx->pix_fmt = AV_PIX_FMT_YUV420P;
  446. break;
  447. case 0x685:
  448. avctx->pix_fmt = AV_PIX_FMT_YUV420P9;
  449. break;
  450. case 0x695:
  451. avctx->pix_fmt = AV_PIX_FMT_YUV420P10;
  452. break;
  453. case 0x6B5:
  454. avctx->pix_fmt = AV_PIX_FMT_YUV420P12;
  455. break;
  456. case 0x6D5:
  457. avctx->pix_fmt = AV_PIX_FMT_YUV420P14;
  458. break;
  459. case 0x6F5:
  460. avctx->pix_fmt = AV_PIX_FMT_YUV420P16;
  461. break;
  462. case 0x67A:
  463. avctx->pix_fmt = AV_PIX_FMT_YUV410P;
  464. break;
  465. case 0x770:
  466. avctx->pix_fmt = AV_PIX_FMT_YUVA444P;
  467. break;
  468. case 0x780:
  469. avctx->pix_fmt = AV_PIX_FMT_YUVA444P9;
  470. break;
  471. case 0x790:
  472. avctx->pix_fmt = AV_PIX_FMT_YUVA444P10;
  473. break;
  474. case 0x7F0:
  475. avctx->pix_fmt = AV_PIX_FMT_YUVA444P16;
  476. break;
  477. case 0x771:
  478. avctx->pix_fmt = AV_PIX_FMT_YUVA422P;
  479. break;
  480. case 0x781:
  481. avctx->pix_fmt = AV_PIX_FMT_YUVA422P9;
  482. break;
  483. case 0x791:
  484. avctx->pix_fmt = AV_PIX_FMT_YUVA422P10;
  485. break;
  486. case 0x7F1:
  487. avctx->pix_fmt = AV_PIX_FMT_YUVA422P16;
  488. break;
  489. case 0x775:
  490. avctx->pix_fmt = AV_PIX_FMT_YUVA420P;
  491. break;
  492. case 0x785:
  493. avctx->pix_fmt = AV_PIX_FMT_YUVA420P9;
  494. break;
  495. case 0x795:
  496. avctx->pix_fmt = AV_PIX_FMT_YUVA420P10;
  497. break;
  498. case 0x7F5:
  499. avctx->pix_fmt = AV_PIX_FMT_YUVA420P16;
  500. break;
  501. default:
  502. ret = AVERROR_INVALIDDATA;
  503. goto error;
  504. }
  505. }
  506. ff_huffyuv_common_init(avctx);
  507. if ((avctx->pix_fmt == AV_PIX_FMT_YUV422P || avctx->pix_fmt == AV_PIX_FMT_YUV420P) && avctx->width & 1) {
  508. av_log(avctx, AV_LOG_ERROR, "width must be even for this colorspace\n");
  509. ret = AVERROR_INVALIDDATA;
  510. goto error;
  511. }
  512. if (s->predictor == MEDIAN && avctx->pix_fmt == AV_PIX_FMT_YUV422P &&
  513. avctx->width % 4) {
  514. av_log(avctx, AV_LOG_ERROR, "width must be a multiple of 4 "
  515. "for this combination of colorspace and predictor type.\n");
  516. ret = AVERROR_INVALIDDATA;
  517. goto error;
  518. }
  519. if ((ret = ff_huffyuv_alloc_temp(s)) < 0) {
  520. ff_huffyuv_common_end(s);
  521. goto error;
  522. }
  523. return 0;
  524. error:
  525. decode_end(avctx);
  526. return ret;
  527. }
  528. /** Subset of GET_VLC for use in hand-roller VLC code */
  529. #define VLC_INTERN(dst, table, gb, name, bits, max_depth) \
  530. code = table[index][0]; \
  531. n = table[index][1]; \
  532. if (max_depth > 1 && n < 0) { \
  533. LAST_SKIP_BITS(name, gb, bits); \
  534. UPDATE_CACHE(name, gb); \
  535. \
  536. nb_bits = -n; \
  537. index = SHOW_UBITS(name, gb, nb_bits) + code; \
  538. code = table[index][0]; \
  539. n = table[index][1]; \
  540. if (max_depth > 2 && n < 0) { \
  541. LAST_SKIP_BITS(name, gb, nb_bits); \
  542. UPDATE_CACHE(name, gb); \
  543. \
  544. nb_bits = -n; \
  545. index = SHOW_UBITS(name, gb, nb_bits) + code; \
  546. code = table[index][0]; \
  547. n = table[index][1]; \
  548. } \
  549. } \
  550. dst = code; \
  551. LAST_SKIP_BITS(name, gb, n)
  552. #define GET_VLC_DUAL(dst0, dst1, name, gb, dtable, table1, table2, \
  553. bits, max_depth, OP) \
  554. do { \
  555. unsigned int index = SHOW_UBITS(name, gb, bits); \
  556. int code, n = dtable[index][1]; \
  557. \
  558. if (n<=0) { \
  559. int nb_bits; \
  560. VLC_INTERN(dst0, table1, gb, name, bits, max_depth); \
  561. \
  562. UPDATE_CACHE(re, gb); \
  563. index = SHOW_UBITS(name, gb, bits); \
  564. VLC_INTERN(dst1, table2, gb, name, bits, max_depth); \
  565. } else { \
  566. code = dtable[index][0]; \
  567. OP(dst0, dst1, code); \
  568. LAST_SKIP_BITS(name, gb, n); \
  569. } \
  570. } while (0)
  571. #define OP8bits(dst0, dst1, code) dst0 = code>>8; dst1 = code
  572. #define READ_2PIX(dst0, dst1, plane1) \
  573. UPDATE_CACHE(re, &s->gb); \
  574. GET_VLC_DUAL(dst0, dst1, re, &s->gb, s->vlc[4+plane1].table, \
  575. s->vlc[0].table, s->vlc[plane1].table, VLC_BITS, 3, OP8bits)
  576. static void decode_422_bitstream(HYuvContext *s, int count)
  577. {
  578. int i, icount;
  579. OPEN_READER(re, &s->gb);
  580. count /= 2;
  581. icount = get_bits_left(&s->gb) / (32 * 4);
  582. if (count >= icount) {
  583. for (i = 0; i < icount; i++) {
  584. READ_2PIX(s->temp[0][2 * i], s->temp[1][i], 1);
  585. READ_2PIX(s->temp[0][2 * i + 1], s->temp[2][i], 2);
  586. }
  587. for (; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
  588. READ_2PIX(s->temp[0][2 * i ], s->temp[1][i], 1);
  589. if (BITS_LEFT(re, &s->gb) <= 0) break;
  590. READ_2PIX(s->temp[0][2 * i + 1], s->temp[2][i], 2);
  591. }
  592. for (; i < count; i++)
  593. s->temp[0][2 * i ] = s->temp[1][i] =
  594. s->temp[0][2 * i + 1] = s->temp[2][i] = 0;
  595. } else {
  596. for (i = 0; i < count; i++) {
  597. READ_2PIX(s->temp[0][2 * i], s->temp[1][i], 1);
  598. READ_2PIX(s->temp[0][2 * i + 1], s->temp[2][i], 2);
  599. }
  600. }
  601. CLOSE_READER(re, &s->gb);
  602. }
  603. #define READ_2PIX_PLANE(dst0, dst1, plane, OP) \
  604. UPDATE_CACHE(re, &s->gb); \
  605. GET_VLC_DUAL(dst0, dst1, re, &s->gb, s->vlc[4+plane].table, \
  606. s->vlc[plane].table, s->vlc[plane].table, VLC_BITS, 3, OP)
  607. #define OP14bits(dst0, dst1, code) dst0 = code>>8; dst1 = sign_extend(code, 8)
  608. /* TODO instead of restarting the read when the code isn't in the first level
  609. * of the joint table, jump into the 2nd level of the individual table. */
  610. #define READ_2PIX_PLANE16(dst0, dst1, plane){\
  611. dst0 = get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3)<<2;\
  612. dst0 += get_bits(&s->gb, 2);\
  613. dst1 = get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3)<<2;\
  614. dst1 += get_bits(&s->gb, 2);\
  615. }
  616. static void decode_plane_bitstream(HYuvContext *s, int width, int plane)
  617. {
  618. int i, count = width/2;
  619. if (s->bps <= 8) {
  620. OPEN_READER(re, &s->gb);
  621. if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
  622. for (i = 0; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
  623. READ_2PIX_PLANE(s->temp[0][2 * i], s->temp[0][2 * i + 1], plane, OP8bits);
  624. }
  625. } else {
  626. for(i=0; i<count; i++){
  627. READ_2PIX_PLANE(s->temp[0][2 * i], s->temp[0][2 * i + 1], plane, OP8bits);
  628. }
  629. }
  630. if( width&1 && BITS_LEFT(re, &s->gb)>0 ) {
  631. unsigned int index;
  632. int nb_bits, code, n;
  633. UPDATE_CACHE(re, &s->gb);
  634. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  635. VLC_INTERN(s->temp[0][width-1], s->vlc[plane].table,
  636. &s->gb, re, VLC_BITS, 3);
  637. }
  638. CLOSE_READER(re, &s->gb);
  639. } else if (s->bps <= 14) {
  640. OPEN_READER(re, &s->gb);
  641. if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
  642. for (i = 0; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
  643. READ_2PIX_PLANE(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane, OP14bits);
  644. }
  645. } else {
  646. for(i=0; i<count; i++){
  647. READ_2PIX_PLANE(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane, OP14bits);
  648. }
  649. }
  650. if( width&1 && BITS_LEFT(re, &s->gb)>0 ) {
  651. unsigned int index;
  652. int nb_bits, code, n;
  653. UPDATE_CACHE(re, &s->gb);
  654. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  655. VLC_INTERN(s->temp16[0][width-1], s->vlc[plane].table,
  656. &s->gb, re, VLC_BITS, 3);
  657. }
  658. CLOSE_READER(re, &s->gb);
  659. } else {
  660. if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
  661. for (i = 0; i < count && get_bits_left(&s->gb) > 0; i++) {
  662. READ_2PIX_PLANE16(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane);
  663. }
  664. } else {
  665. for(i=0; i<count; i++){
  666. READ_2PIX_PLANE16(s->temp16[0][2 * i], s->temp16[0][2 * i + 1], plane);
  667. }
  668. }
  669. if( width&1 && get_bits_left(&s->gb)>0 ) {
  670. int dst = get_vlc2(&s->gb, s->vlc[plane].table, VLC_BITS, 3)<<2;
  671. s->temp16[0][width-1] = dst + get_bits(&s->gb, 2);
  672. }
  673. }
  674. }
  675. static void decode_gray_bitstream(HYuvContext *s, int count)
  676. {
  677. int i;
  678. OPEN_READER(re, &s->gb);
  679. count /= 2;
  680. if (count >= (get_bits_left(&s->gb)) / (32 * 2)) {
  681. for (i = 0; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
  682. READ_2PIX(s->temp[0][2 * i], s->temp[0][2 * i + 1], 0);
  683. }
  684. } else {
  685. for (i = 0; i < count; i++) {
  686. READ_2PIX(s->temp[0][2 * i], s->temp[0][2 * i + 1], 0);
  687. }
  688. }
  689. CLOSE_READER(re, &s->gb);
  690. }
  691. static av_always_inline void decode_bgr_1(HYuvContext *s, int count,
  692. int decorrelate, int alpha)
  693. {
  694. int i;
  695. OPEN_READER(re, &s->gb);
  696. for (i = 0; i < count && BITS_LEFT(re, &s->gb) > 0; i++) {
  697. unsigned int index;
  698. int code, n, nb_bits;
  699. UPDATE_CACHE(re, &s->gb);
  700. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  701. n = s->vlc[4].table[index][1];
  702. if (n>0) {
  703. code = s->vlc[4].table[index][0];
  704. *(uint32_t *) &s->temp[0][4 * i] = s->pix_bgr_map[code];
  705. LAST_SKIP_BITS(re, &s->gb, n);
  706. } else {
  707. if (decorrelate) {
  708. VLC_INTERN(s->temp[0][4 * i + G], s->vlc[1].table,
  709. &s->gb, re, VLC_BITS, 3);
  710. UPDATE_CACHE(re, &s->gb);
  711. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  712. VLC_INTERN(code, s->vlc[0].table, &s->gb, re, VLC_BITS, 3);
  713. s->temp[0][4 * i + B] = code + s->temp[0][4 * i + G];
  714. UPDATE_CACHE(re, &s->gb);
  715. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  716. VLC_INTERN(code, s->vlc[2].table, &s->gb, re, VLC_BITS, 3);
  717. s->temp[0][4 * i + R] = code + s->temp[0][4 * i + G];
  718. } else {
  719. VLC_INTERN(s->temp[0][4 * i + B], s->vlc[0].table,
  720. &s->gb, re, VLC_BITS, 3);
  721. UPDATE_CACHE(re, &s->gb);
  722. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  723. VLC_INTERN(s->temp[0][4 * i + G], s->vlc[1].table,
  724. &s->gb, re, VLC_BITS, 3);
  725. UPDATE_CACHE(re, &s->gb);
  726. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  727. VLC_INTERN(s->temp[0][4 * i + R], s->vlc[2].table,
  728. &s->gb, re, VLC_BITS, 3);
  729. }
  730. }
  731. if (alpha) {
  732. UPDATE_CACHE(re, &s->gb);
  733. index = SHOW_UBITS(re, &s->gb, VLC_BITS);
  734. VLC_INTERN(s->temp[0][4 * i + A], s->vlc[2].table,
  735. &s->gb, re, VLC_BITS, 3);
  736. } else
  737. s->temp[0][4 * i + A] = 0;
  738. }
  739. CLOSE_READER(re, &s->gb);
  740. }
  741. static void decode_bgr_bitstream(HYuvContext *s, int count)
  742. {
  743. if (s->decorrelate) {
  744. if (s->bitstream_bpp == 24)
  745. decode_bgr_1(s, count, 1, 0);
  746. else
  747. decode_bgr_1(s, count, 1, 1);
  748. } else {
  749. if (s->bitstream_bpp == 24)
  750. decode_bgr_1(s, count, 0, 0);
  751. else
  752. decode_bgr_1(s, count, 0, 1);
  753. }
  754. }
  755. static void draw_slice(HYuvContext *s, AVFrame *frame, int y)
  756. {
  757. int h, cy, i;
  758. int offset[AV_NUM_DATA_POINTERS];
  759. if (!s->avctx->draw_horiz_band)
  760. return;
  761. h = y - s->last_slice_end;
  762. y -= h;
  763. if (s->bitstream_bpp == 12)
  764. cy = y >> 1;
  765. else
  766. cy = y;
  767. offset[0] = frame->linesize[0] * y;
  768. offset[1] = frame->linesize[1] * cy;
  769. offset[2] = frame->linesize[2] * cy;
  770. for (i = 3; i < AV_NUM_DATA_POINTERS; i++)
  771. offset[i] = 0;
  772. emms_c();
  773. s->avctx->draw_horiz_band(s->avctx, frame, offset, y, 3, h);
  774. s->last_slice_end = y + h;
  775. }
  776. static int left_prediction(HYuvContext *s, uint8_t *dst, const uint8_t *src, int w, int acc)
  777. {
  778. if (s->bps <= 8) {
  779. return s->llviddsp.add_left_pred(dst, src, w, acc);
  780. } else {
  781. return s->llviddsp.add_left_pred_int16(( uint16_t *)dst, (const uint16_t *)src, s->n-1, w, acc);
  782. }
  783. }
  784. static void add_bytes(HYuvContext *s, uint8_t *dst, uint8_t *src, int w)
  785. {
  786. if (s->bps <= 8) {
  787. s->llviddsp.add_bytes(dst, src, w);
  788. } else {
  789. s->hdsp.add_int16((uint16_t*)dst, (const uint16_t*)src, s->n - 1, w);
  790. }
  791. }
  792. static void add_median_prediction(HYuvContext *s, uint8_t *dst, const uint8_t *src, const uint8_t *diff, int w, int *left, int *left_top)
  793. {
  794. if (s->bps <= 8) {
  795. s->llviddsp.add_median_pred(dst, src, diff, w, left, left_top);
  796. } else {
  797. s->hdsp.add_hfyu_median_pred_int16((uint16_t *)dst, (const uint16_t *)src, (const uint16_t *)diff, s->n-1, w, left, left_top);
  798. }
  799. }
  800. static int decode_slice(AVCodecContext *avctx, AVFrame *p, int height,
  801. int buf_size, int y_offset, int table_size)
  802. {
  803. HYuvContext *s = avctx->priv_data;
  804. int fake_ystride, fake_ustride, fake_vstride;
  805. const int width = s->width;
  806. const int width2 = s->width >> 1;
  807. int ret;
  808. if ((ret = init_get_bits8(&s->gb, s->bitstream_buffer + table_size, buf_size - table_size)) < 0)
  809. return ret;
  810. fake_ystride = s->interlaced ? p->linesize[0] * 2 : p->linesize[0];
  811. fake_ustride = s->interlaced ? p->linesize[1] * 2 : p->linesize[1];
  812. fake_vstride = s->interlaced ? p->linesize[2] * 2 : p->linesize[2];
  813. if (s->version > 2) {
  814. int plane;
  815. for(plane = 0; plane < 1 + 2*s->chroma + s->alpha; plane++) {
  816. int left, lefttop, y;
  817. int w = width;
  818. int h = height;
  819. int fake_stride = fake_ystride;
  820. if (s->chroma && (plane == 1 || plane == 2)) {
  821. w >>= s->chroma_h_shift;
  822. h >>= s->chroma_v_shift;
  823. fake_stride = plane == 1 ? fake_ustride : fake_vstride;
  824. }
  825. switch (s->predictor) {
  826. case LEFT:
  827. case PLANE:
  828. decode_plane_bitstream(s, w, plane);
  829. left = left_prediction(s, p->data[plane], s->temp[0], w, 0);
  830. for (y = 1; y < h; y++) {
  831. uint8_t *dst = p->data[plane] + p->linesize[plane]*y;
  832. decode_plane_bitstream(s, w, plane);
  833. left = left_prediction(s, dst, s->temp[0], w, left);
  834. if (s->predictor == PLANE) {
  835. if (y > s->interlaced) {
  836. add_bytes(s, dst, dst - fake_stride, w);
  837. }
  838. }
  839. }
  840. break;
  841. case MEDIAN:
  842. decode_plane_bitstream(s, w, plane);
  843. left= left_prediction(s, p->data[plane], s->temp[0], w, 0);
  844. y = 1;
  845. if (y >= h)
  846. break;
  847. /* second line is left predicted for interlaced case */
  848. if (s->interlaced) {
  849. decode_plane_bitstream(s, w, plane);
  850. left = left_prediction(s, p->data[plane] + p->linesize[plane], s->temp[0], w, left);
  851. y++;
  852. if (y >= h)
  853. break;
  854. }
  855. lefttop = p->data[plane][0];
  856. decode_plane_bitstream(s, w, plane);
  857. add_median_prediction(s, p->data[plane] + fake_stride, p->data[plane], s->temp[0], w, &left, &lefttop);
  858. y++;
  859. for (; y<h; y++) {
  860. uint8_t *dst;
  861. decode_plane_bitstream(s, w, plane);
  862. dst = p->data[plane] + p->linesize[plane] * y;
  863. add_median_prediction(s, dst, dst - fake_stride, s->temp[0], w, &left, &lefttop);
  864. }
  865. break;
  866. }
  867. }
  868. draw_slice(s, p, height);
  869. } else if (s->bitstream_bpp < 24) {
  870. int y, cy;
  871. int lefty, leftu, leftv;
  872. int lefttopy, lefttopu, lefttopv;
  873. if (s->yuy2) {
  874. p->data[0][3] = get_bits(&s->gb, 8);
  875. p->data[0][2] = get_bits(&s->gb, 8);
  876. p->data[0][1] = get_bits(&s->gb, 8);
  877. p->data[0][0] = get_bits(&s->gb, 8);
  878. av_log(avctx, AV_LOG_ERROR,
  879. "YUY2 output is not implemented yet\n");
  880. return AVERROR_PATCHWELCOME;
  881. } else {
  882. leftv =
  883. p->data[2][0 + y_offset * p->linesize[2]] = get_bits(&s->gb, 8);
  884. lefty =
  885. p->data[0][1 + y_offset * p->linesize[0]] = get_bits(&s->gb, 8);
  886. leftu =
  887. p->data[1][0 + y_offset * p->linesize[1]] = get_bits(&s->gb, 8);
  888. p->data[0][0 + y_offset * p->linesize[0]] = get_bits(&s->gb, 8);
  889. switch (s->predictor) {
  890. case LEFT:
  891. case PLANE:
  892. decode_422_bitstream(s, width - 2);
  893. lefty = s->llviddsp.add_left_pred(p->data[0] + p->linesize[0] * y_offset + 2, s->temp[0],
  894. width - 2, lefty);
  895. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  896. leftu = s->llviddsp.add_left_pred(p->data[1] + p->linesize[1] * y_offset + 1, s->temp[1], width2 - 1, leftu);
  897. leftv = s->llviddsp.add_left_pred(p->data[2] + p->linesize[2] * y_offset + 1, s->temp[2], width2 - 1, leftv);
  898. }
  899. for (cy = y = 1; y < height; y++, cy++) {
  900. uint8_t *ydst, *udst, *vdst;
  901. if (s->bitstream_bpp == 12) {
  902. decode_gray_bitstream(s, width);
  903. ydst = p->data[0] + p->linesize[0] * (y + y_offset);
  904. lefty = s->llviddsp.add_left_pred(ydst, s->temp[0],
  905. width, lefty);
  906. if (s->predictor == PLANE) {
  907. if (y > s->interlaced)
  908. s->llviddsp.add_bytes(ydst, ydst - fake_ystride, width);
  909. }
  910. y++;
  911. if (y >= height)
  912. break;
  913. }
  914. draw_slice(s, p, y);
  915. ydst = p->data[0] + p->linesize[0] * (y + y_offset);
  916. udst = p->data[1] + p->linesize[1] * (cy + y_offset);
  917. vdst = p->data[2] + p->linesize[2] * (cy + y_offset);
  918. decode_422_bitstream(s, width);
  919. lefty = s->llviddsp.add_left_pred(ydst, s->temp[0],
  920. width, lefty);
  921. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  922. leftu = s->llviddsp.add_left_pred(udst, s->temp[1], width2, leftu);
  923. leftv = s->llviddsp.add_left_pred(vdst, s->temp[2], width2, leftv);
  924. }
  925. if (s->predictor == PLANE) {
  926. if (cy > s->interlaced) {
  927. s->llviddsp.add_bytes(ydst, ydst - fake_ystride, width);
  928. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  929. s->llviddsp.add_bytes(udst, udst - fake_ustride, width2);
  930. s->llviddsp.add_bytes(vdst, vdst - fake_vstride, width2);
  931. }
  932. }
  933. }
  934. }
  935. draw_slice(s, p, height);
  936. break;
  937. case MEDIAN:
  938. /* first line except first 2 pixels is left predicted */
  939. decode_422_bitstream(s, width - 2);
  940. lefty = s->llviddsp.add_left_pred(p->data[0] + 2, s->temp[0],
  941. width - 2, lefty);
  942. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  943. leftu = s->llviddsp.add_left_pred(p->data[1] + 1, s->temp[1], width2 - 1, leftu);
  944. leftv = s->llviddsp.add_left_pred(p->data[2] + 1, s->temp[2], width2 - 1, leftv);
  945. }
  946. cy = y = 1;
  947. if (y >= height)
  948. break;
  949. /* second line is left predicted for interlaced case */
  950. if (s->interlaced) {
  951. decode_422_bitstream(s, width);
  952. lefty = s->llviddsp.add_left_pred(p->data[0] + p->linesize[0],
  953. s->temp[0], width, lefty);
  954. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  955. leftu = s->llviddsp.add_left_pred(p->data[1] + p->linesize[2], s->temp[1], width2, leftu);
  956. leftv = s->llviddsp.add_left_pred(p->data[2] + p->linesize[1], s->temp[2], width2, leftv);
  957. }
  958. y++;
  959. cy++;
  960. if (y >= height)
  961. break;
  962. }
  963. /* next 4 pixels are left predicted too */
  964. decode_422_bitstream(s, 4);
  965. lefty = s->llviddsp.add_left_pred(p->data[0] + fake_ystride,
  966. s->temp[0], 4, lefty);
  967. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  968. leftu = s->llviddsp.add_left_pred(p->data[1] + fake_ustride, s->temp[1], 2, leftu);
  969. leftv = s->llviddsp.add_left_pred(p->data[2] + fake_vstride, s->temp[2], 2, leftv);
  970. }
  971. /* next line except the first 4 pixels is median predicted */
  972. lefttopy = p->data[0][3];
  973. decode_422_bitstream(s, width - 4);
  974. s->llviddsp.add_median_pred(p->data[0] + fake_ystride + 4,
  975. p->data[0] + 4, s->temp[0],
  976. width - 4, &lefty, &lefttopy);
  977. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  978. lefttopu = p->data[1][1];
  979. lefttopv = p->data[2][1];
  980. s->llviddsp.add_median_pred(p->data[1] + fake_ustride + 2, p->data[1] + 2, s->temp[1], width2 - 2, &leftu, &lefttopu);
  981. s->llviddsp.add_median_pred(p->data[2] + fake_vstride + 2, p->data[2] + 2, s->temp[2], width2 - 2, &leftv, &lefttopv);
  982. }
  983. y++;
  984. cy++;
  985. for (; y < height; y++, cy++) {
  986. uint8_t *ydst, *udst, *vdst;
  987. if (s->bitstream_bpp == 12) {
  988. while (2 * cy > y) {
  989. decode_gray_bitstream(s, width);
  990. ydst = p->data[0] + p->linesize[0] * y;
  991. s->llviddsp.add_median_pred(ydst, ydst - fake_ystride,
  992. s->temp[0], width,
  993. &lefty, &lefttopy);
  994. y++;
  995. }
  996. if (y >= height)
  997. break;
  998. }
  999. draw_slice(s, p, y);
  1000. decode_422_bitstream(s, width);
  1001. ydst = p->data[0] + p->linesize[0] * y;
  1002. udst = p->data[1] + p->linesize[1] * cy;
  1003. vdst = p->data[2] + p->linesize[2] * cy;
  1004. s->llviddsp.add_median_pred(ydst, ydst - fake_ystride,
  1005. s->temp[0], width,
  1006. &lefty, &lefttopy);
  1007. if (!(s->flags & AV_CODEC_FLAG_GRAY)) {
  1008. s->llviddsp.add_median_pred(udst, udst - fake_ustride, s->temp[1], width2, &leftu, &lefttopu);
  1009. s->llviddsp.add_median_pred(vdst, vdst - fake_vstride, s->temp[2], width2, &leftv, &lefttopv);
  1010. }
  1011. }
  1012. draw_slice(s, p, height);
  1013. break;
  1014. }
  1015. }
  1016. } else {
  1017. int y;
  1018. uint8_t left[4];
  1019. const int last_line = (y_offset + height - 1) * p->linesize[0];
  1020. if (s->bitstream_bpp == 32) {
  1021. left[A] = p->data[0][last_line + A] = get_bits(&s->gb, 8);
  1022. left[R] = p->data[0][last_line + R] = get_bits(&s->gb, 8);
  1023. left[G] = p->data[0][last_line + G] = get_bits(&s->gb, 8);
  1024. left[B] = p->data[0][last_line + B] = get_bits(&s->gb, 8);
  1025. } else {
  1026. left[R] = p->data[0][last_line + R] = get_bits(&s->gb, 8);
  1027. left[G] = p->data[0][last_line + G] = get_bits(&s->gb, 8);
  1028. left[B] = p->data[0][last_line + B] = get_bits(&s->gb, 8);
  1029. left[A] = p->data[0][last_line + A] = 255;
  1030. skip_bits(&s->gb, 8);
  1031. }
  1032. if (s->bgr32) {
  1033. switch (s->predictor) {
  1034. case LEFT:
  1035. case PLANE:
  1036. decode_bgr_bitstream(s, width - 1);
  1037. s->hdsp.add_hfyu_left_pred_bgr32(p->data[0] + last_line + 4,
  1038. s->temp[0], width - 1, left);
  1039. for (y = height - 2; y >= 0; y--) { // Yes it is stored upside down.
  1040. decode_bgr_bitstream(s, width);
  1041. s->hdsp.add_hfyu_left_pred_bgr32(p->data[0] + p->linesize[0] * (y + y_offset),
  1042. s->temp[0], width, left);
  1043. if (s->predictor == PLANE) {
  1044. if (s->bitstream_bpp != 32)
  1045. left[A] = 0;
  1046. if (y < height - 1 - s->interlaced) {
  1047. s->llviddsp.add_bytes(p->data[0] + p->linesize[0] * (y + y_offset),
  1048. p->data[0] + p->linesize[0] * (y + y_offset) +
  1049. fake_ystride, 4 * width);
  1050. }
  1051. }
  1052. }
  1053. // just 1 large slice as this is not possible in reverse order
  1054. draw_slice(s, p, height);
  1055. break;
  1056. default:
  1057. av_log(avctx, AV_LOG_ERROR,
  1058. "prediction type not supported!\n");
  1059. }
  1060. } else {
  1061. av_log(avctx, AV_LOG_ERROR,
  1062. "BGR24 output is not implemented yet\n");
  1063. return AVERROR_PATCHWELCOME;
  1064. }
  1065. }
  1066. return 0;
  1067. }
  1068. static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame,
  1069. AVPacket *avpkt)
  1070. {
  1071. const uint8_t *buf = avpkt->data;
  1072. int buf_size = avpkt->size;
  1073. HYuvContext *s = avctx->priv_data;
  1074. const int width = s->width;
  1075. const int height = s->height;
  1076. ThreadFrame frame = { .f = data };
  1077. AVFrame *const p = data;
  1078. int slice, table_size = 0, ret, nb_slices;
  1079. unsigned slices_info_offset;
  1080. int slice_height;
  1081. if (buf_size < (width * height + 7)/8)
  1082. return AVERROR_INVALIDDATA;
  1083. av_fast_padded_malloc(&s->bitstream_buffer,
  1084. &s->bitstream_buffer_size,
  1085. buf_size);
  1086. if (!s->bitstream_buffer)
  1087. return AVERROR(ENOMEM);
  1088. s->bdsp.bswap_buf((uint32_t *) s->bitstream_buffer,
  1089. (const uint32_t *) buf, buf_size / 4);
  1090. if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
  1091. return ret;
  1092. if (s->context) {
  1093. table_size = read_huffman_tables(s, s->bitstream_buffer, buf_size);
  1094. if (table_size < 0)
  1095. return table_size;
  1096. }
  1097. if ((unsigned) (buf_size - table_size) >= INT_MAX / 8)
  1098. return AVERROR_INVALIDDATA;
  1099. s->last_slice_end = 0;
  1100. if (avctx->codec_id == AV_CODEC_ID_HYMT &&
  1101. (buf_size > 32 && AV_RL32(avpkt->data + buf_size - 16) == 0)) {
  1102. slices_info_offset = AV_RL32(avpkt->data + buf_size - 4);
  1103. slice_height = AV_RL32(avpkt->data + buf_size - 8);
  1104. nb_slices = AV_RL32(avpkt->data + buf_size - 12);
  1105. if (nb_slices * 8LL + slices_info_offset > buf_size - 16 ||
  1106. s->chroma_v_shift ||
  1107. slice_height <= 0 || nb_slices * (uint64_t)slice_height > height)
  1108. return AVERROR_INVALIDDATA;
  1109. } else {
  1110. slice_height = height;
  1111. nb_slices = 1;
  1112. }
  1113. for (slice = 0; slice < nb_slices; slice++) {
  1114. int y_offset, slice_offset, slice_size;
  1115. if (nb_slices > 1) {
  1116. slice_offset = AV_RL32(avpkt->data + slices_info_offset + slice * 8);
  1117. slice_size = AV_RL32(avpkt->data + slices_info_offset + slice * 8 + 4);
  1118. if (slice_offset < 0 || slice_size <= 0 || (slice_offset&3) ||
  1119. slice_offset + (int64_t)slice_size > buf_size)
  1120. return AVERROR_INVALIDDATA;
  1121. y_offset = height - (slice + 1) * slice_height;
  1122. s->bdsp.bswap_buf((uint32_t *)s->bitstream_buffer,
  1123. (const uint32_t *)(buf + slice_offset), slice_size / 4);
  1124. } else {
  1125. y_offset = 0;
  1126. slice_offset = 0;
  1127. slice_size = buf_size;
  1128. }
  1129. ret = decode_slice(avctx, p, slice_height, slice_size, y_offset, table_size);
  1130. emms_c();
  1131. if (ret < 0)
  1132. return ret;
  1133. }
  1134. *got_frame = 1;
  1135. return (get_bits_count(&s->gb) + 31) / 32 * 4 + table_size;
  1136. }
  1137. AVCodec ff_huffyuv_decoder = {
  1138. .name = "huffyuv",
  1139. .long_name = NULL_IF_CONFIG_SMALL("Huffyuv / HuffYUV"),
  1140. .type = AVMEDIA_TYPE_VIDEO,
  1141. .id = AV_CODEC_ID_HUFFYUV,
  1142. .priv_data_size = sizeof(HYuvContext),
  1143. .init = decode_init,
  1144. .close = decode_end,
  1145. .decode = decode_frame,
  1146. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DRAW_HORIZ_BAND |
  1147. AV_CODEC_CAP_FRAME_THREADS,
  1148. };
  1149. #if CONFIG_FFVHUFF_DECODER
  1150. AVCodec ff_ffvhuff_decoder = {
  1151. .name = "ffvhuff",
  1152. .long_name = NULL_IF_CONFIG_SMALL("Huffyuv FFmpeg variant"),
  1153. .type = AVMEDIA_TYPE_VIDEO,
  1154. .id = AV_CODEC_ID_FFVHUFF,
  1155. .priv_data_size = sizeof(HYuvContext),
  1156. .init = decode_init,
  1157. .close = decode_end,
  1158. .decode = decode_frame,
  1159. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DRAW_HORIZ_BAND |
  1160. AV_CODEC_CAP_FRAME_THREADS,
  1161. };
  1162. #endif /* CONFIG_FFVHUFF_DECODER */
  1163. #if CONFIG_HYMT_DECODER
  1164. AVCodec ff_hymt_decoder = {
  1165. .name = "hymt",
  1166. .long_name = NULL_IF_CONFIG_SMALL("HuffYUV MT"),
  1167. .type = AVMEDIA_TYPE_VIDEO,
  1168. .id = AV_CODEC_ID_HYMT,
  1169. .priv_data_size = sizeof(HYuvContext),
  1170. .init = decode_init,
  1171. .close = decode_end,
  1172. .decode = decode_frame,
  1173. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DRAW_HORIZ_BAND |
  1174. AV_CODEC_CAP_FRAME_THREADS,
  1175. };
  1176. #endif /* CONFIG_HYMT_DECODER */