You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1316 lines
49KB

  1. /*
  2. * VC-1 and WMV3 decoder
  3. * Copyright (c) 2011 Mashiat Sarker Shakkhar
  4. * Copyright (c) 2006-2007 Konstantin Shishkov
  5. * Partly based on vc9.c (c) 2005 Anonymous, Alex Beregszaszi, Michael Niedermayer
  6. *
  7. * This file is part of FFmpeg.
  8. *
  9. * FFmpeg is free software; you can redistribute it and/or
  10. * modify it under the terms of the GNU Lesser General Public
  11. * License as published by the Free Software Foundation; either
  12. * version 2.1 of the License, or (at your option) any later version.
  13. *
  14. * FFmpeg is distributed in the hope that it will be useful,
  15. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  17. * Lesser General Public License for more details.
  18. *
  19. * You should have received a copy of the GNU Lesser General Public
  20. * License along with FFmpeg; if not, write to the Free Software
  21. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  22. */
  23. /**
  24. * @file
  25. * VC-1 and WMV3 decoder
  26. */
  27. #include "avcodec.h"
  28. #include "blockdsp.h"
  29. #include "get_bits.h"
  30. #include "hwaccel.h"
  31. #include "internal.h"
  32. #include "mpeg_er.h"
  33. #include "mpegvideo.h"
  34. #include "msmpeg4.h"
  35. #include "msmpeg4data.h"
  36. #include "profiles.h"
  37. #include "vc1.h"
  38. #include "vc1data.h"
  39. #include "libavutil/avassert.h"
  40. #if CONFIG_WMV3IMAGE_DECODER || CONFIG_VC1IMAGE_DECODER
  41. typedef struct SpriteData {
  42. /**
  43. * Transform coefficients for both sprites in 16.16 fixed point format,
  44. * in the order they appear in the bitstream:
  45. * x scale
  46. * rotation 1 (unused)
  47. * x offset
  48. * rotation 2 (unused)
  49. * y scale
  50. * y offset
  51. * alpha
  52. */
  53. int coefs[2][7];
  54. int effect_type, effect_flag;
  55. int effect_pcount1, effect_pcount2; ///< amount of effect parameters stored in effect_params
  56. int effect_params1[15], effect_params2[10]; ///< effect parameters in 16.16 fixed point format
  57. } SpriteData;
  58. static inline int get_fp_val(GetBitContext* gb)
  59. {
  60. return (get_bits_long(gb, 30) - (1 << 29)) << 1;
  61. }
  62. static void vc1_sprite_parse_transform(GetBitContext* gb, int c[7])
  63. {
  64. c[1] = c[3] = 0;
  65. switch (get_bits(gb, 2)) {
  66. case 0:
  67. c[0] = 1 << 16;
  68. c[2] = get_fp_val(gb);
  69. c[4] = 1 << 16;
  70. break;
  71. case 1:
  72. c[0] = c[4] = get_fp_val(gb);
  73. c[2] = get_fp_val(gb);
  74. break;
  75. case 2:
  76. c[0] = get_fp_val(gb);
  77. c[2] = get_fp_val(gb);
  78. c[4] = get_fp_val(gb);
  79. break;
  80. case 3:
  81. c[0] = get_fp_val(gb);
  82. c[1] = get_fp_val(gb);
  83. c[2] = get_fp_val(gb);
  84. c[3] = get_fp_val(gb);
  85. c[4] = get_fp_val(gb);
  86. break;
  87. }
  88. c[5] = get_fp_val(gb);
  89. if (get_bits1(gb))
  90. c[6] = get_fp_val(gb);
  91. else
  92. c[6] = 1 << 16;
  93. }
  94. static int vc1_parse_sprites(VC1Context *v, GetBitContext* gb, SpriteData* sd)
  95. {
  96. AVCodecContext *avctx = v->s.avctx;
  97. int sprite, i;
  98. for (sprite = 0; sprite <= v->two_sprites; sprite++) {
  99. vc1_sprite_parse_transform(gb, sd->coefs[sprite]);
  100. if (sd->coefs[sprite][1] || sd->coefs[sprite][3])
  101. avpriv_request_sample(avctx, "Non-zero rotation coefficients");
  102. av_log(avctx, AV_LOG_DEBUG, sprite ? "S2:" : "S1:");
  103. for (i = 0; i < 7; i++)
  104. av_log(avctx, AV_LOG_DEBUG, " %d.%.3d",
  105. sd->coefs[sprite][i] / (1<<16),
  106. (abs(sd->coefs[sprite][i]) & 0xFFFF) * 1000 / (1 << 16));
  107. av_log(avctx, AV_LOG_DEBUG, "\n");
  108. }
  109. skip_bits(gb, 2);
  110. if (sd->effect_type = get_bits_long(gb, 30)) {
  111. switch (sd->effect_pcount1 = get_bits(gb, 4)) {
  112. case 7:
  113. vc1_sprite_parse_transform(gb, sd->effect_params1);
  114. break;
  115. case 14:
  116. vc1_sprite_parse_transform(gb, sd->effect_params1);
  117. vc1_sprite_parse_transform(gb, sd->effect_params1 + 7);
  118. break;
  119. default:
  120. for (i = 0; i < sd->effect_pcount1; i++)
  121. sd->effect_params1[i] = get_fp_val(gb);
  122. }
  123. if (sd->effect_type != 13 || sd->effect_params1[0] != sd->coefs[0][6]) {
  124. // effect 13 is simple alpha blending and matches the opacity above
  125. av_log(avctx, AV_LOG_DEBUG, "Effect: %d; params: ", sd->effect_type);
  126. for (i = 0; i < sd->effect_pcount1; i++)
  127. av_log(avctx, AV_LOG_DEBUG, " %d.%.2d",
  128. sd->effect_params1[i] / (1 << 16),
  129. (abs(sd->effect_params1[i]) & 0xFFFF) * 1000 / (1 << 16));
  130. av_log(avctx, AV_LOG_DEBUG, "\n");
  131. }
  132. sd->effect_pcount2 = get_bits(gb, 16);
  133. if (sd->effect_pcount2 > 10) {
  134. av_log(avctx, AV_LOG_ERROR, "Too many effect parameters\n");
  135. return AVERROR_INVALIDDATA;
  136. } else if (sd->effect_pcount2) {
  137. i = -1;
  138. av_log(avctx, AV_LOG_DEBUG, "Effect params 2: ");
  139. while (++i < sd->effect_pcount2) {
  140. sd->effect_params2[i] = get_fp_val(gb);
  141. av_log(avctx, AV_LOG_DEBUG, " %d.%.2d",
  142. sd->effect_params2[i] / (1 << 16),
  143. (abs(sd->effect_params2[i]) & 0xFFFF) * 1000 / (1 << 16));
  144. }
  145. av_log(avctx, AV_LOG_DEBUG, "\n");
  146. }
  147. }
  148. if (sd->effect_flag = get_bits1(gb))
  149. av_log(avctx, AV_LOG_DEBUG, "Effect flag set\n");
  150. if (get_bits_count(gb) >= gb->size_in_bits +
  151. (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE ? 64 : 0)) {
  152. av_log(avctx, AV_LOG_ERROR, "Buffer overrun\n");
  153. return AVERROR_INVALIDDATA;
  154. }
  155. if (get_bits_count(gb) < gb->size_in_bits - 8)
  156. av_log(avctx, AV_LOG_WARNING, "Buffer not fully read\n");
  157. return 0;
  158. }
  159. static void vc1_draw_sprites(VC1Context *v, SpriteData* sd)
  160. {
  161. int i, plane, row, sprite;
  162. int sr_cache[2][2] = { { -1, -1 }, { -1, -1 } };
  163. uint8_t* src_h[2][2];
  164. int xoff[2], xadv[2], yoff[2], yadv[2], alpha;
  165. int ysub[2];
  166. MpegEncContext *s = &v->s;
  167. for (i = 0; i <= v->two_sprites; i++) {
  168. xoff[i] = av_clip(sd->coefs[i][2], 0, v->sprite_width-1 << 16);
  169. xadv[i] = sd->coefs[i][0];
  170. if (xadv[i] != 1<<16 || (v->sprite_width << 16) - (v->output_width << 16) - xoff[i])
  171. xadv[i] = av_clip(xadv[i], 0, ((v->sprite_width<<16) - xoff[i] - 1) / v->output_width);
  172. yoff[i] = av_clip(sd->coefs[i][5], 0, v->sprite_height-1 << 16);
  173. yadv[i] = av_clip(sd->coefs[i][4], 0, ((v->sprite_height << 16) - yoff[i]) / v->output_height);
  174. }
  175. alpha = av_clip_uint16(sd->coefs[1][6]);
  176. for (plane = 0; plane < (CONFIG_GRAY && s->avctx->flags & AV_CODEC_FLAG_GRAY ? 1 : 3); plane++) {
  177. int width = v->output_width>>!!plane;
  178. for (row = 0; row < v->output_height>>!!plane; row++) {
  179. uint8_t *dst = v->sprite_output_frame->data[plane] +
  180. v->sprite_output_frame->linesize[plane] * row;
  181. for (sprite = 0; sprite <= v->two_sprites; sprite++) {
  182. uint8_t *iplane = s->current_picture.f->data[plane];
  183. int iline = s->current_picture.f->linesize[plane];
  184. int ycoord = yoff[sprite] + yadv[sprite] * row;
  185. int yline = ycoord >> 16;
  186. int next_line;
  187. ysub[sprite] = ycoord & 0xFFFF;
  188. if (sprite) {
  189. iplane = s->last_picture.f->data[plane];
  190. iline = s->last_picture.f->linesize[plane];
  191. }
  192. next_line = FFMIN(yline + 1, (v->sprite_height >> !!plane) - 1) * iline;
  193. if (!(xoff[sprite] & 0xFFFF) && xadv[sprite] == 1 << 16) {
  194. src_h[sprite][0] = iplane + (xoff[sprite] >> 16) + yline * iline;
  195. if (ysub[sprite])
  196. src_h[sprite][1] = iplane + (xoff[sprite] >> 16) + next_line;
  197. } else {
  198. if (sr_cache[sprite][0] != yline) {
  199. if (sr_cache[sprite][1] == yline) {
  200. FFSWAP(uint8_t*, v->sr_rows[sprite][0], v->sr_rows[sprite][1]);
  201. FFSWAP(int, sr_cache[sprite][0], sr_cache[sprite][1]);
  202. } else {
  203. v->vc1dsp.sprite_h(v->sr_rows[sprite][0], iplane + yline * iline, xoff[sprite], xadv[sprite], width);
  204. sr_cache[sprite][0] = yline;
  205. }
  206. }
  207. if (ysub[sprite] && sr_cache[sprite][1] != yline + 1) {
  208. v->vc1dsp.sprite_h(v->sr_rows[sprite][1],
  209. iplane + next_line, xoff[sprite],
  210. xadv[sprite], width);
  211. sr_cache[sprite][1] = yline + 1;
  212. }
  213. src_h[sprite][0] = v->sr_rows[sprite][0];
  214. src_h[sprite][1] = v->sr_rows[sprite][1];
  215. }
  216. }
  217. if (!v->two_sprites) {
  218. if (ysub[0]) {
  219. v->vc1dsp.sprite_v_single(dst, src_h[0][0], src_h[0][1], ysub[0], width);
  220. } else {
  221. memcpy(dst, src_h[0][0], width);
  222. }
  223. } else {
  224. if (ysub[0] && ysub[1]) {
  225. v->vc1dsp.sprite_v_double_twoscale(dst, src_h[0][0], src_h[0][1], ysub[0],
  226. src_h[1][0], src_h[1][1], ysub[1], alpha, width);
  227. } else if (ysub[0]) {
  228. v->vc1dsp.sprite_v_double_onescale(dst, src_h[0][0], src_h[0][1], ysub[0],
  229. src_h[1][0], alpha, width);
  230. } else if (ysub[1]) {
  231. v->vc1dsp.sprite_v_double_onescale(dst, src_h[1][0], src_h[1][1], ysub[1],
  232. src_h[0][0], (1<<16)-1-alpha, width);
  233. } else {
  234. v->vc1dsp.sprite_v_double_noscale(dst, src_h[0][0], src_h[1][0], alpha, width);
  235. }
  236. }
  237. }
  238. if (!plane) {
  239. for (i = 0; i <= v->two_sprites; i++) {
  240. xoff[i] >>= 1;
  241. yoff[i] >>= 1;
  242. }
  243. }
  244. }
  245. }
  246. static int vc1_decode_sprites(VC1Context *v, GetBitContext* gb)
  247. {
  248. int ret;
  249. MpegEncContext *s = &v->s;
  250. AVCodecContext *avctx = s->avctx;
  251. SpriteData sd;
  252. memset(&sd, 0, sizeof(sd));
  253. ret = vc1_parse_sprites(v, gb, &sd);
  254. if (ret < 0)
  255. return ret;
  256. if (!s->current_picture.f || !s->current_picture.f->data[0]) {
  257. av_log(avctx, AV_LOG_ERROR, "Got no sprites\n");
  258. return AVERROR_UNKNOWN;
  259. }
  260. if (v->two_sprites && (!s->last_picture_ptr || !s->last_picture.f->data[0])) {
  261. av_log(avctx, AV_LOG_WARNING, "Need two sprites, only got one\n");
  262. v->two_sprites = 0;
  263. }
  264. av_frame_unref(v->sprite_output_frame);
  265. if ((ret = ff_get_buffer(avctx, v->sprite_output_frame, 0)) < 0)
  266. return ret;
  267. vc1_draw_sprites(v, &sd);
  268. return 0;
  269. }
  270. static void vc1_sprite_flush(AVCodecContext *avctx)
  271. {
  272. VC1Context *v = avctx->priv_data;
  273. MpegEncContext *s = &v->s;
  274. AVFrame *f = s->current_picture.f;
  275. int plane, i;
  276. /* Windows Media Image codecs have a convergence interval of two keyframes.
  277. Since we can't enforce it, clear to black the missing sprite. This is
  278. wrong but it looks better than doing nothing. */
  279. if (f && f->data[0])
  280. for (plane = 0; plane < (CONFIG_GRAY && s->avctx->flags & AV_CODEC_FLAG_GRAY ? 1 : 3); plane++)
  281. for (i = 0; i < v->sprite_height>>!!plane; i++)
  282. memset(f->data[plane] + i * f->linesize[plane],
  283. plane ? 128 : 0, f->linesize[plane]);
  284. }
  285. #endif
  286. av_cold int ff_vc1_decode_init_alloc_tables(VC1Context *v)
  287. {
  288. MpegEncContext *s = &v->s;
  289. int i, ret = AVERROR(ENOMEM);
  290. int mb_height = FFALIGN(s->mb_height, 2);
  291. /* Allocate mb bitplanes */
  292. v->mv_type_mb_plane = av_malloc (s->mb_stride * mb_height);
  293. v->direct_mb_plane = av_malloc (s->mb_stride * mb_height);
  294. v->forward_mb_plane = av_malloc (s->mb_stride * mb_height);
  295. v->fieldtx_plane = av_mallocz(s->mb_stride * mb_height);
  296. v->acpred_plane = av_malloc (s->mb_stride * mb_height);
  297. v->over_flags_plane = av_malloc (s->mb_stride * mb_height);
  298. if (!v->mv_type_mb_plane || !v->direct_mb_plane || !v->forward_mb_plane ||
  299. !v->fieldtx_plane || !v->acpred_plane || !v->over_flags_plane)
  300. goto error;
  301. v->n_allocated_blks = s->mb_width + 2;
  302. v->block = av_malloc(sizeof(*v->block) * v->n_allocated_blks);
  303. v->cbp_base = av_malloc(sizeof(v->cbp_base[0]) * 3 * s->mb_stride);
  304. if (!v->block || !v->cbp_base)
  305. goto error;
  306. v->cbp = v->cbp_base + 2 * s->mb_stride;
  307. v->ttblk_base = av_malloc(sizeof(v->ttblk_base[0]) * 3 * s->mb_stride);
  308. if (!v->ttblk_base)
  309. goto error;
  310. v->ttblk = v->ttblk_base + 2 * s->mb_stride;
  311. v->is_intra_base = av_mallocz(sizeof(v->is_intra_base[0]) * 3 * s->mb_stride);
  312. if (!v->is_intra_base)
  313. goto error;
  314. v->is_intra = v->is_intra_base + 2 * s->mb_stride;
  315. v->luma_mv_base = av_mallocz(sizeof(v->luma_mv_base[0]) * 3 * s->mb_stride);
  316. if (!v->luma_mv_base)
  317. goto error;
  318. v->luma_mv = v->luma_mv_base + 2 * s->mb_stride;
  319. /* allocate block type info in that way so it could be used with s->block_index[] */
  320. v->mb_type_base = av_malloc(s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
  321. if (!v->mb_type_base)
  322. goto error;
  323. v->mb_type[0] = v->mb_type_base + s->b8_stride + 1;
  324. v->mb_type[1] = v->mb_type_base + s->b8_stride * (mb_height * 2 + 1) + s->mb_stride + 1;
  325. v->mb_type[2] = v->mb_type[1] + s->mb_stride * (mb_height + 1);
  326. /* allocate memory to store block level MV info */
  327. v->blk_mv_type_base = av_mallocz( s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
  328. if (!v->blk_mv_type_base)
  329. goto error;
  330. v->blk_mv_type = v->blk_mv_type_base + s->b8_stride + 1;
  331. v->mv_f_base = av_mallocz(2 * (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2));
  332. if (!v->mv_f_base)
  333. goto error;
  334. v->mv_f[0] = v->mv_f_base + s->b8_stride + 1;
  335. v->mv_f[1] = v->mv_f[0] + (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
  336. v->mv_f_next_base = av_mallocz(2 * (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2));
  337. if (!v->mv_f_next_base)
  338. goto error;
  339. v->mv_f_next[0] = v->mv_f_next_base + s->b8_stride + 1;
  340. v->mv_f_next[1] = v->mv_f_next[0] + (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
  341. if (s->avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || s->avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
  342. for (i = 0; i < 4; i++)
  343. if (!(v->sr_rows[i >> 1][i & 1] = av_malloc(v->output_width)))
  344. return AVERROR(ENOMEM);
  345. }
  346. ret = ff_intrax8_common_init(s->avctx, &v->x8, &s->idsp,
  347. s->block, s->block_last_index,
  348. s->mb_width, s->mb_height);
  349. if (ret < 0)
  350. goto error;
  351. return 0;
  352. error:
  353. ff_vc1_decode_end(s->avctx);
  354. return ret;
  355. }
  356. av_cold void ff_vc1_init_transposed_scantables(VC1Context *v)
  357. {
  358. int i;
  359. for (i = 0; i < 64; i++) {
  360. #define transpose(x) (((x) >> 3) | (((x) & 7) << 3))
  361. v->zz_8x8[0][i] = transpose(ff_wmv1_scantable[0][i]);
  362. v->zz_8x8[1][i] = transpose(ff_wmv1_scantable[1][i]);
  363. v->zz_8x8[2][i] = transpose(ff_wmv1_scantable[2][i]);
  364. v->zz_8x8[3][i] = transpose(ff_wmv1_scantable[3][i]);
  365. v->zzi_8x8[i] = transpose(ff_vc1_adv_interlaced_8x8_zz[i]);
  366. }
  367. v->left_blk_sh = 0;
  368. v->top_blk_sh = 3;
  369. }
  370. /** Initialize a VC1/WMV3 decoder
  371. * @todo TODO: Handle VC-1 IDUs (Transport level?)
  372. * @todo TODO: Decipher remaining bits in extra_data
  373. */
  374. static av_cold int vc1_decode_init(AVCodecContext *avctx)
  375. {
  376. VC1Context *v = avctx->priv_data;
  377. MpegEncContext *s = &v->s;
  378. GetBitContext gb;
  379. int ret;
  380. /* save the container output size for WMImage */
  381. v->output_width = avctx->width;
  382. v->output_height = avctx->height;
  383. if (!avctx->extradata_size || !avctx->extradata)
  384. return AVERROR_INVALIDDATA;
  385. v->s.avctx = avctx;
  386. if ((ret = ff_vc1_init_common(v)) < 0)
  387. return ret;
  388. if (avctx->codec_id == AV_CODEC_ID_WMV3 || avctx->codec_id == AV_CODEC_ID_WMV3IMAGE) {
  389. int count = 0;
  390. // looks like WMV3 has a sequence header stored in the extradata
  391. // advanced sequence header may be before the first frame
  392. // the last byte of the extradata is a version number, 1 for the
  393. // samples we can decode
  394. init_get_bits(&gb, avctx->extradata, avctx->extradata_size*8);
  395. if ((ret = ff_vc1_decode_sequence_header(avctx, v, &gb)) < 0)
  396. return ret;
  397. if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE && !v->res_sprite) {
  398. avpriv_request_sample(avctx, "Non sprite WMV3IMAGE");
  399. return AVERROR_PATCHWELCOME;
  400. }
  401. count = avctx->extradata_size*8 - get_bits_count(&gb);
  402. if (count > 0) {
  403. av_log(avctx, AV_LOG_INFO, "Extra data: %i bits left, value: %X\n",
  404. count, get_bits_long(&gb, FFMIN(count, 32)));
  405. } else if (count < 0) {
  406. av_log(avctx, AV_LOG_INFO, "Read %i bits in overflow\n", -count);
  407. }
  408. } else { // VC1/WVC1/WVP2
  409. const uint8_t *start = avctx->extradata;
  410. uint8_t *end = avctx->extradata + avctx->extradata_size;
  411. const uint8_t *next;
  412. int size, buf2_size;
  413. uint8_t *buf2 = NULL;
  414. int seq_initialized = 0, ep_initialized = 0;
  415. if (avctx->extradata_size < 16) {
  416. av_log(avctx, AV_LOG_ERROR, "Extradata size too small: %i\n", avctx->extradata_size);
  417. return AVERROR_INVALIDDATA;
  418. }
  419. buf2 = av_mallocz(avctx->extradata_size + AV_INPUT_BUFFER_PADDING_SIZE);
  420. if (!buf2)
  421. return AVERROR(ENOMEM);
  422. start = find_next_marker(start, end); // in WVC1 extradata first byte is its size, but can be 0 in mkv
  423. next = start;
  424. for (; next < end; start = next) {
  425. next = find_next_marker(start + 4, end);
  426. size = next - start - 4;
  427. if (size <= 0)
  428. continue;
  429. buf2_size = vc1_unescape_buffer(start + 4, size, buf2);
  430. init_get_bits(&gb, buf2, buf2_size * 8);
  431. switch (AV_RB32(start)) {
  432. case VC1_CODE_SEQHDR:
  433. if ((ret = ff_vc1_decode_sequence_header(avctx, v, &gb)) < 0) {
  434. av_free(buf2);
  435. return ret;
  436. }
  437. seq_initialized = 1;
  438. break;
  439. case VC1_CODE_ENTRYPOINT:
  440. if ((ret = ff_vc1_decode_entry_point(avctx, v, &gb)) < 0) {
  441. av_free(buf2);
  442. return ret;
  443. }
  444. ep_initialized = 1;
  445. break;
  446. }
  447. }
  448. av_free(buf2);
  449. if (!seq_initialized || !ep_initialized) {
  450. av_log(avctx, AV_LOG_ERROR, "Incomplete extradata\n");
  451. return AVERROR_INVALIDDATA;
  452. }
  453. v->res_sprite = (avctx->codec_id == AV_CODEC_ID_VC1IMAGE);
  454. }
  455. avctx->profile = v->profile;
  456. if (v->profile == PROFILE_ADVANCED)
  457. avctx->level = v->level;
  458. if (!CONFIG_GRAY || !(avctx->flags & AV_CODEC_FLAG_GRAY))
  459. avctx->pix_fmt = ff_get_format(avctx, avctx->codec->pix_fmts);
  460. else {
  461. avctx->pix_fmt = AV_PIX_FMT_GRAY8;
  462. if (avctx->color_range == AVCOL_RANGE_UNSPECIFIED)
  463. avctx->color_range = AVCOL_RANGE_MPEG;
  464. }
  465. // ensure static VLC tables are initialized
  466. if ((ret = ff_msmpeg4_decode_init(avctx)) < 0)
  467. return ret;
  468. if ((ret = ff_vc1_decode_init_alloc_tables(v)) < 0)
  469. return ret;
  470. // Hack to ensure the above functions will be called
  471. // again once we know all necessary settings.
  472. // That this is necessary might indicate a bug.
  473. ff_vc1_decode_end(avctx);
  474. ff_blockdsp_init(&s->bdsp, avctx);
  475. ff_h264chroma_init(&v->h264chroma, 8);
  476. ff_qpeldsp_init(&s->qdsp);
  477. // Must happen after calling ff_vc1_decode_end
  478. // to avoid de-allocating the sprite_output_frame
  479. v->sprite_output_frame = av_frame_alloc();
  480. if (!v->sprite_output_frame)
  481. return AVERROR(ENOMEM);
  482. avctx->has_b_frames = !!avctx->max_b_frames;
  483. if (v->color_prim == 1 || v->color_prim == 5 || v->color_prim == 6)
  484. avctx->color_primaries = v->color_prim;
  485. if (v->transfer_char == 1 || v->transfer_char == 7)
  486. avctx->color_trc = v->transfer_char;
  487. if (v->matrix_coef == 1 || v->matrix_coef == 6 || v->matrix_coef == 7)
  488. avctx->colorspace = v->matrix_coef;
  489. s->mb_width = (avctx->coded_width + 15) >> 4;
  490. s->mb_height = (avctx->coded_height + 15) >> 4;
  491. if (v->profile == PROFILE_ADVANCED || v->res_fasttx) {
  492. ff_vc1_init_transposed_scantables(v);
  493. } else {
  494. memcpy(v->zz_8x8, ff_wmv1_scantable, 4*64);
  495. v->left_blk_sh = 3;
  496. v->top_blk_sh = 0;
  497. }
  498. if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
  499. v->sprite_width = avctx->coded_width;
  500. v->sprite_height = avctx->coded_height;
  501. avctx->coded_width = avctx->width = v->output_width;
  502. avctx->coded_height = avctx->height = v->output_height;
  503. // prevent 16.16 overflows
  504. if (v->sprite_width > 1 << 14 ||
  505. v->sprite_height > 1 << 14 ||
  506. v->output_width > 1 << 14 ||
  507. v->output_height > 1 << 14) {
  508. ret = AVERROR_INVALIDDATA;
  509. goto error;
  510. }
  511. if ((v->sprite_width&1) || (v->sprite_height&1)) {
  512. avpriv_request_sample(avctx, "odd sprites support");
  513. ret = AVERROR_PATCHWELCOME;
  514. goto error;
  515. }
  516. }
  517. return 0;
  518. error:
  519. av_frame_free(&v->sprite_output_frame);
  520. return ret;
  521. }
  522. /** Close a VC1/WMV3 decoder
  523. * @warning Initial try at using MpegEncContext stuff
  524. */
  525. av_cold int ff_vc1_decode_end(AVCodecContext *avctx)
  526. {
  527. VC1Context *v = avctx->priv_data;
  528. int i;
  529. av_frame_free(&v->sprite_output_frame);
  530. for (i = 0; i < 4; i++)
  531. av_freep(&v->sr_rows[i >> 1][i & 1]);
  532. av_freep(&v->hrd_rate);
  533. av_freep(&v->hrd_buffer);
  534. ff_mpv_common_end(&v->s);
  535. av_freep(&v->mv_type_mb_plane);
  536. av_freep(&v->direct_mb_plane);
  537. av_freep(&v->forward_mb_plane);
  538. av_freep(&v->fieldtx_plane);
  539. av_freep(&v->acpred_plane);
  540. av_freep(&v->over_flags_plane);
  541. av_freep(&v->mb_type_base);
  542. av_freep(&v->blk_mv_type_base);
  543. av_freep(&v->mv_f_base);
  544. av_freep(&v->mv_f_next_base);
  545. av_freep(&v->block);
  546. av_freep(&v->cbp_base);
  547. av_freep(&v->ttblk_base);
  548. av_freep(&v->is_intra_base); // FIXME use v->mb_type[]
  549. av_freep(&v->luma_mv_base);
  550. ff_intrax8_common_end(&v->x8);
  551. return 0;
  552. }
  553. /** Decode a VC1/WMV3 frame
  554. * @todo TODO: Handle VC-1 IDUs (Transport level?)
  555. */
  556. static int vc1_decode_frame(AVCodecContext *avctx, void *data,
  557. int *got_frame, AVPacket *avpkt)
  558. {
  559. const uint8_t *buf = avpkt->data;
  560. int buf_size = avpkt->size, n_slices = 0, i, ret;
  561. VC1Context *v = avctx->priv_data;
  562. MpegEncContext *s = &v->s;
  563. AVFrame *pict = data;
  564. uint8_t *buf2 = NULL;
  565. const uint8_t *buf_start = buf, *buf_start_second_field = NULL;
  566. int mb_height, n_slices1=-1;
  567. struct {
  568. uint8_t *buf;
  569. GetBitContext gb;
  570. int mby_start;
  571. const uint8_t *rawbuf;
  572. int raw_size;
  573. } *slices = NULL, *tmp;
  574. v->second_field = 0;
  575. if(s->avctx->flags & AV_CODEC_FLAG_LOW_DELAY)
  576. s->low_delay = 1;
  577. /* no supplementary picture */
  578. if (buf_size == 0 || (buf_size == 4 && AV_RB32(buf) == VC1_CODE_ENDOFSEQ)) {
  579. /* special case for last picture */
  580. if (s->low_delay == 0 && s->next_picture_ptr) {
  581. if ((ret = av_frame_ref(pict, s->next_picture_ptr->f)) < 0)
  582. return ret;
  583. s->next_picture_ptr = NULL;
  584. *got_frame = 1;
  585. }
  586. return buf_size;
  587. }
  588. //for advanced profile we may need to parse and unescape data
  589. if (avctx->codec_id == AV_CODEC_ID_VC1 || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
  590. int buf_size2 = 0;
  591. buf2 = av_mallocz(buf_size + AV_INPUT_BUFFER_PADDING_SIZE);
  592. if (!buf2)
  593. return AVERROR(ENOMEM);
  594. if (IS_MARKER(AV_RB32(buf))) { /* frame starts with marker and needs to be parsed */
  595. const uint8_t *start, *end, *next;
  596. int size;
  597. next = buf;
  598. for (start = buf, end = buf + buf_size; next < end; start = next) {
  599. next = find_next_marker(start + 4, end);
  600. size = next - start - 4;
  601. if (size <= 0) continue;
  602. switch (AV_RB32(start)) {
  603. case VC1_CODE_FRAME:
  604. if (avctx->hwaccel)
  605. buf_start = start;
  606. buf_size2 = vc1_unescape_buffer(start + 4, size, buf2);
  607. break;
  608. case VC1_CODE_FIELD: {
  609. int buf_size3;
  610. if (avctx->hwaccel)
  611. buf_start_second_field = start;
  612. tmp = av_realloc_array(slices, sizeof(*slices), n_slices+1);
  613. if (!tmp) {
  614. ret = AVERROR(ENOMEM);
  615. goto err;
  616. }
  617. slices = tmp;
  618. slices[n_slices].buf = av_mallocz(buf_size + AV_INPUT_BUFFER_PADDING_SIZE);
  619. if (!slices[n_slices].buf) {
  620. ret = AVERROR(ENOMEM);
  621. goto err;
  622. }
  623. buf_size3 = vc1_unescape_buffer(start + 4, size,
  624. slices[n_slices].buf);
  625. init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
  626. buf_size3 << 3);
  627. slices[n_slices].mby_start = avctx->coded_height + 31 >> 5;
  628. slices[n_slices].rawbuf = start;
  629. slices[n_slices].raw_size = size + 4;
  630. n_slices1 = n_slices - 1; // index of the last slice of the first field
  631. n_slices++;
  632. break;
  633. }
  634. case VC1_CODE_ENTRYPOINT: /* it should be before frame data */
  635. buf_size2 = vc1_unescape_buffer(start + 4, size, buf2);
  636. init_get_bits(&s->gb, buf2, buf_size2 * 8);
  637. ff_vc1_decode_entry_point(avctx, v, &s->gb);
  638. break;
  639. case VC1_CODE_SLICE: {
  640. int buf_size3;
  641. tmp = av_realloc_array(slices, sizeof(*slices), n_slices+1);
  642. if (!tmp) {
  643. ret = AVERROR(ENOMEM);
  644. goto err;
  645. }
  646. slices = tmp;
  647. slices[n_slices].buf = av_mallocz(buf_size + AV_INPUT_BUFFER_PADDING_SIZE);
  648. if (!slices[n_slices].buf) {
  649. ret = AVERROR(ENOMEM);
  650. goto err;
  651. }
  652. buf_size3 = vc1_unescape_buffer(start + 4, size,
  653. slices[n_slices].buf);
  654. init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
  655. buf_size3 << 3);
  656. slices[n_slices].mby_start = get_bits(&slices[n_slices].gb, 9);
  657. slices[n_slices].rawbuf = start;
  658. slices[n_slices].raw_size = size + 4;
  659. n_slices++;
  660. break;
  661. }
  662. }
  663. }
  664. } else if (v->interlace && ((buf[0] & 0xC0) == 0xC0)) { /* WVC1 interlaced stores both fields divided by marker */
  665. const uint8_t *divider;
  666. int buf_size3;
  667. divider = find_next_marker(buf, buf + buf_size);
  668. if ((divider == (buf + buf_size)) || AV_RB32(divider) != VC1_CODE_FIELD) {
  669. av_log(avctx, AV_LOG_ERROR, "Error in WVC1 interlaced frame\n");
  670. ret = AVERROR_INVALIDDATA;
  671. goto err;
  672. } else { // found field marker, unescape second field
  673. if (avctx->hwaccel)
  674. buf_start_second_field = divider;
  675. tmp = av_realloc_array(slices, sizeof(*slices), n_slices+1);
  676. if (!tmp) {
  677. ret = AVERROR(ENOMEM);
  678. goto err;
  679. }
  680. slices = tmp;
  681. slices[n_slices].buf = av_mallocz(buf_size + AV_INPUT_BUFFER_PADDING_SIZE);
  682. if (!slices[n_slices].buf) {
  683. ret = AVERROR(ENOMEM);
  684. goto err;
  685. }
  686. buf_size3 = vc1_unescape_buffer(divider + 4, buf + buf_size - divider - 4, slices[n_slices].buf);
  687. init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
  688. buf_size3 << 3);
  689. slices[n_slices].mby_start = s->mb_height + 1 >> 1;
  690. slices[n_slices].rawbuf = divider;
  691. slices[n_slices].raw_size = buf + buf_size - divider;
  692. n_slices1 = n_slices - 1;
  693. n_slices++;
  694. }
  695. buf_size2 = vc1_unescape_buffer(buf, divider - buf, buf2);
  696. } else {
  697. buf_size2 = vc1_unescape_buffer(buf, buf_size, buf2);
  698. }
  699. init_get_bits(&s->gb, buf2, buf_size2*8);
  700. } else
  701. init_get_bits(&s->gb, buf, buf_size*8);
  702. if (v->res_sprite) {
  703. v->new_sprite = !get_bits1(&s->gb);
  704. v->two_sprites = get_bits1(&s->gb);
  705. /* res_sprite means a Windows Media Image stream, AV_CODEC_ID_*IMAGE means
  706. we're using the sprite compositor. These are intentionally kept separate
  707. so you can get the raw sprites by using the wmv3 decoder for WMVP or
  708. the vc1 one for WVP2 */
  709. if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
  710. if (v->new_sprite) {
  711. // switch AVCodecContext parameters to those of the sprites
  712. avctx->width = avctx->coded_width = v->sprite_width;
  713. avctx->height = avctx->coded_height = v->sprite_height;
  714. } else {
  715. goto image;
  716. }
  717. }
  718. }
  719. if (s->context_initialized &&
  720. (s->width != avctx->coded_width ||
  721. s->height != avctx->coded_height)) {
  722. ff_vc1_decode_end(avctx);
  723. }
  724. if (!s->context_initialized) {
  725. if ((ret = ff_msmpeg4_decode_init(avctx)) < 0)
  726. goto err;
  727. if ((ret = ff_vc1_decode_init_alloc_tables(v)) < 0) {
  728. ff_mpv_common_end(s);
  729. goto err;
  730. }
  731. s->low_delay = !avctx->has_b_frames || v->res_sprite;
  732. if (v->profile == PROFILE_ADVANCED) {
  733. if(avctx->coded_width<=1 || avctx->coded_height<=1) {
  734. ret = AVERROR_INVALIDDATA;
  735. goto err;
  736. }
  737. s->h_edge_pos = avctx->coded_width;
  738. s->v_edge_pos = avctx->coded_height;
  739. }
  740. }
  741. // do parse frame header
  742. v->pic_header_flag = 0;
  743. v->first_pic_header_flag = 1;
  744. if (v->profile < PROFILE_ADVANCED) {
  745. if ((ret = ff_vc1_parse_frame_header(v, &s->gb)) < 0) {
  746. goto err;
  747. }
  748. } else {
  749. if ((ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
  750. goto err;
  751. }
  752. }
  753. v->first_pic_header_flag = 0;
  754. if (avctx->debug & FF_DEBUG_PICT_INFO)
  755. av_log(v->s.avctx, AV_LOG_DEBUG, "pict_type: %c\n", av_get_picture_type_char(s->pict_type));
  756. if ((avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE)
  757. && s->pict_type != AV_PICTURE_TYPE_I) {
  758. av_log(v->s.avctx, AV_LOG_ERROR, "Sprite decoder: expected I-frame\n");
  759. ret = AVERROR_INVALIDDATA;
  760. goto err;
  761. }
  762. if ((s->mb_height >> v->field_mode) == 0) {
  763. av_log(v->s.avctx, AV_LOG_ERROR, "image too short\n");
  764. ret = AVERROR_INVALIDDATA;
  765. goto err;
  766. }
  767. // for skipping the frame
  768. s->current_picture.f->pict_type = s->pict_type;
  769. s->current_picture.f->key_frame = s->pict_type == AV_PICTURE_TYPE_I;
  770. /* skip B-frames if we don't have reference frames */
  771. if (!s->last_picture_ptr && (s->pict_type == AV_PICTURE_TYPE_B || s->droppable)) {
  772. av_log(v->s.avctx, AV_LOG_DEBUG, "Skipping B frame without reference frames\n");
  773. goto end;
  774. }
  775. if ((avctx->skip_frame >= AVDISCARD_NONREF && s->pict_type == AV_PICTURE_TYPE_B) ||
  776. (avctx->skip_frame >= AVDISCARD_NONKEY && s->pict_type != AV_PICTURE_TYPE_I) ||
  777. avctx->skip_frame >= AVDISCARD_ALL) {
  778. goto end;
  779. }
  780. if (s->next_p_frame_damaged) {
  781. if (s->pict_type == AV_PICTURE_TYPE_B)
  782. goto end;
  783. else
  784. s->next_p_frame_damaged = 0;
  785. }
  786. if ((ret = ff_mpv_frame_start(s, avctx)) < 0) {
  787. goto err;
  788. }
  789. v->s.current_picture_ptr->field_picture = v->field_mode;
  790. v->s.current_picture_ptr->f->interlaced_frame = (v->fcm != PROGRESSIVE);
  791. v->s.current_picture_ptr->f->top_field_first = v->tff;
  792. // process pulldown flags
  793. s->current_picture_ptr->f->repeat_pict = 0;
  794. // Pulldown flags are only valid when 'broadcast' has been set.
  795. // So ticks_per_frame will be 2
  796. if (v->rff) {
  797. // repeat field
  798. s->current_picture_ptr->f->repeat_pict = 1;
  799. } else if (v->rptfrm) {
  800. // repeat frames
  801. s->current_picture_ptr->f->repeat_pict = v->rptfrm * 2;
  802. }
  803. s->me.qpel_put = s->qdsp.put_qpel_pixels_tab;
  804. s->me.qpel_avg = s->qdsp.avg_qpel_pixels_tab;
  805. if (avctx->hwaccel) {
  806. s->mb_y = 0;
  807. if (v->field_mode && buf_start_second_field) {
  808. // decode first field
  809. s->picture_structure = PICT_BOTTOM_FIELD - v->tff;
  810. if ((ret = avctx->hwaccel->start_frame(avctx, buf_start, buf_start_second_field - buf_start)) < 0)
  811. goto err;
  812. if (n_slices1 == -1) {
  813. // no slices, decode the field as-is
  814. if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, buf_start_second_field - buf_start)) < 0)
  815. goto err;
  816. } else {
  817. if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, slices[0].rawbuf - buf_start)) < 0)
  818. goto err;
  819. for (i = 0 ; i < n_slices1 + 1; i++) {
  820. s->gb = slices[i].gb;
  821. s->mb_y = slices[i].mby_start;
  822. v->pic_header_flag = get_bits1(&s->gb);
  823. if (v->pic_header_flag) {
  824. if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
  825. av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
  826. ret = AVERROR_INVALIDDATA;
  827. if (avctx->err_recognition & AV_EF_EXPLODE)
  828. goto err;
  829. continue;
  830. }
  831. }
  832. if ((ret = avctx->hwaccel->decode_slice(avctx, slices[i].rawbuf, slices[i].raw_size)) < 0)
  833. goto err;
  834. }
  835. }
  836. if ((ret = avctx->hwaccel->end_frame(avctx)) < 0)
  837. goto err;
  838. // decode second field
  839. s->gb = slices[n_slices1 + 1].gb;
  840. s->mb_y = slices[n_slices1 + 1].mby_start;
  841. s->picture_structure = PICT_TOP_FIELD + v->tff;
  842. v->second_field = 1;
  843. v->pic_header_flag = 0;
  844. if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
  845. av_log(avctx, AV_LOG_ERROR, "parsing header for second field failed");
  846. ret = AVERROR_INVALIDDATA;
  847. goto err;
  848. }
  849. v->s.current_picture_ptr->f->pict_type = v->s.pict_type;
  850. if ((ret = avctx->hwaccel->start_frame(avctx, buf_start_second_field, (buf + buf_size) - buf_start_second_field)) < 0)
  851. goto err;
  852. if (n_slices - n_slices1 == 2) {
  853. // no slices, decode the field as-is
  854. if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start_second_field, (buf + buf_size) - buf_start_second_field)) < 0)
  855. goto err;
  856. } else {
  857. if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start_second_field, slices[n_slices1 + 2].rawbuf - buf_start_second_field)) < 0)
  858. goto err;
  859. for (i = n_slices1 + 2; i < n_slices; i++) {
  860. s->gb = slices[i].gb;
  861. s->mb_y = slices[i].mby_start;
  862. v->pic_header_flag = get_bits1(&s->gb);
  863. if (v->pic_header_flag) {
  864. if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
  865. av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
  866. ret = AVERROR_INVALIDDATA;
  867. if (avctx->err_recognition & AV_EF_EXPLODE)
  868. goto err;
  869. continue;
  870. }
  871. }
  872. if ((ret = avctx->hwaccel->decode_slice(avctx, slices[i].rawbuf, slices[i].raw_size)) < 0)
  873. goto err;
  874. }
  875. }
  876. if ((ret = avctx->hwaccel->end_frame(avctx)) < 0)
  877. goto err;
  878. } else {
  879. s->picture_structure = PICT_FRAME;
  880. if ((ret = avctx->hwaccel->start_frame(avctx, buf_start, (buf + buf_size) - buf_start)) < 0)
  881. goto err;
  882. if (n_slices == 0) {
  883. // no slices, decode the frame as-is
  884. if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, (buf + buf_size) - buf_start)) < 0)
  885. goto err;
  886. } else {
  887. // decode the frame part as the first slice
  888. if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, slices[0].rawbuf - buf_start)) < 0)
  889. goto err;
  890. // and process the slices as additional slices afterwards
  891. for (i = 0 ; i < n_slices; i++) {
  892. s->gb = slices[i].gb;
  893. s->mb_y = slices[i].mby_start;
  894. v->pic_header_flag = get_bits1(&s->gb);
  895. if (v->pic_header_flag) {
  896. if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
  897. av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
  898. ret = AVERROR_INVALIDDATA;
  899. if (avctx->err_recognition & AV_EF_EXPLODE)
  900. goto err;
  901. continue;
  902. }
  903. }
  904. if ((ret = avctx->hwaccel->decode_slice(avctx, slices[i].rawbuf, slices[i].raw_size)) < 0)
  905. goto err;
  906. }
  907. }
  908. if ((ret = avctx->hwaccel->end_frame(avctx)) < 0)
  909. goto err;
  910. }
  911. } else {
  912. int header_ret = 0;
  913. ff_mpeg_er_frame_start(s);
  914. v->bits = FFMIN(buf_size * 8, s->gb.size_in_bits);
  915. v->end_mb_x = s->mb_width;
  916. if (v->field_mode) {
  917. s->current_picture.f->linesize[0] <<= 1;
  918. s->current_picture.f->linesize[1] <<= 1;
  919. s->current_picture.f->linesize[2] <<= 1;
  920. s->linesize <<= 1;
  921. s->uvlinesize <<= 1;
  922. }
  923. mb_height = s->mb_height >> v->field_mode;
  924. av_assert0 (mb_height > 0);
  925. for (i = 0; i <= n_slices; i++) {
  926. if (i > 0 && slices[i - 1].mby_start >= mb_height) {
  927. if (v->field_mode <= 0) {
  928. av_log(v->s.avctx, AV_LOG_ERROR, "Slice %d starts beyond "
  929. "picture boundary (%d >= %d)\n", i,
  930. slices[i - 1].mby_start, mb_height);
  931. continue;
  932. }
  933. v->second_field = 1;
  934. av_assert0((s->mb_height & 1) == 0);
  935. v->blocks_off = s->b8_stride * (s->mb_height&~1);
  936. v->mb_off = s->mb_stride * s->mb_height >> 1;
  937. } else {
  938. v->second_field = 0;
  939. v->blocks_off = 0;
  940. v->mb_off = 0;
  941. }
  942. if (i) {
  943. v->pic_header_flag = 0;
  944. if (v->field_mode && i == n_slices1 + 2) {
  945. if ((header_ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
  946. av_log(v->s.avctx, AV_LOG_ERROR, "Field header damaged\n");
  947. ret = AVERROR_INVALIDDATA;
  948. if (avctx->err_recognition & AV_EF_EXPLODE)
  949. goto err;
  950. continue;
  951. }
  952. } else if (get_bits1(&s->gb)) {
  953. v->pic_header_flag = 1;
  954. if ((header_ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
  955. av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
  956. ret = AVERROR_INVALIDDATA;
  957. if (avctx->err_recognition & AV_EF_EXPLODE)
  958. goto err;
  959. continue;
  960. }
  961. }
  962. }
  963. if (header_ret < 0)
  964. continue;
  965. s->start_mb_y = (i == 0) ? 0 : FFMAX(0, slices[i-1].mby_start % mb_height);
  966. if (!v->field_mode || v->second_field)
  967. s->end_mb_y = (i == n_slices ) ? mb_height : FFMIN(mb_height, slices[i].mby_start % mb_height);
  968. else {
  969. if (i >= n_slices) {
  970. av_log(v->s.avctx, AV_LOG_ERROR, "first field slice count too large\n");
  971. continue;
  972. }
  973. s->end_mb_y = (i == n_slices1 + 1) ? mb_height : FFMIN(mb_height, slices[i].mby_start % mb_height);
  974. }
  975. if (s->end_mb_y <= s->start_mb_y) {
  976. av_log(v->s.avctx, AV_LOG_ERROR, "end mb y %d %d invalid\n", s->end_mb_y, s->start_mb_y);
  977. continue;
  978. }
  979. if (((s->pict_type == AV_PICTURE_TYPE_P && !v->p_frame_skipped) ||
  980. (s->pict_type == AV_PICTURE_TYPE_B && !v->bi_type)) &&
  981. !v->cbpcy_vlc) {
  982. av_log(v->s.avctx, AV_LOG_ERROR, "missing cbpcy_vlc\n");
  983. continue;
  984. }
  985. ff_vc1_decode_blocks(v);
  986. if (i != n_slices) {
  987. s->gb = slices[i].gb;
  988. v->bits = FFMIN(buf_size * 8, s->gb.size_in_bits);
  989. }
  990. }
  991. if (v->field_mode) {
  992. v->second_field = 0;
  993. s->current_picture.f->linesize[0] >>= 1;
  994. s->current_picture.f->linesize[1] >>= 1;
  995. s->current_picture.f->linesize[2] >>= 1;
  996. s->linesize >>= 1;
  997. s->uvlinesize >>= 1;
  998. if (v->s.pict_type != AV_PICTURE_TYPE_BI && v->s.pict_type != AV_PICTURE_TYPE_B) {
  999. FFSWAP(uint8_t *, v->mv_f_next[0], v->mv_f[0]);
  1000. FFSWAP(uint8_t *, v->mv_f_next[1], v->mv_f[1]);
  1001. }
  1002. }
  1003. ff_dlog(s->avctx, "Consumed %i/%i bits\n",
  1004. get_bits_count(&s->gb), s->gb.size_in_bits);
  1005. // if (get_bits_count(&s->gb) > buf_size * 8)
  1006. // return -1;
  1007. if(s->er.error_occurred && s->pict_type == AV_PICTURE_TYPE_B) {
  1008. ret = AVERROR_INVALIDDATA;
  1009. goto err;
  1010. }
  1011. if (!v->field_mode)
  1012. ff_er_frame_end(&s->er);
  1013. }
  1014. ff_mpv_frame_end(s);
  1015. if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
  1016. image:
  1017. avctx->width = avctx->coded_width = v->output_width;
  1018. avctx->height = avctx->coded_height = v->output_height;
  1019. if (avctx->skip_frame >= AVDISCARD_NONREF)
  1020. goto end;
  1021. #if CONFIG_WMV3IMAGE_DECODER || CONFIG_VC1IMAGE_DECODER
  1022. if ((ret = vc1_decode_sprites(v, &s->gb)) < 0)
  1023. goto err;
  1024. #endif
  1025. if ((ret = av_frame_ref(pict, v->sprite_output_frame)) < 0)
  1026. goto err;
  1027. *got_frame = 1;
  1028. } else {
  1029. if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) {
  1030. if ((ret = av_frame_ref(pict, s->current_picture_ptr->f)) < 0)
  1031. goto err;
  1032. ff_print_debug_info(s, s->current_picture_ptr, pict);
  1033. *got_frame = 1;
  1034. } else if (s->last_picture_ptr) {
  1035. if ((ret = av_frame_ref(pict, s->last_picture_ptr->f)) < 0)
  1036. goto err;
  1037. ff_print_debug_info(s, s->last_picture_ptr, pict);
  1038. *got_frame = 1;
  1039. }
  1040. }
  1041. end:
  1042. av_free(buf2);
  1043. for (i = 0; i < n_slices; i++)
  1044. av_free(slices[i].buf);
  1045. av_free(slices);
  1046. return buf_size;
  1047. err:
  1048. av_free(buf2);
  1049. for (i = 0; i < n_slices; i++)
  1050. av_free(slices[i].buf);
  1051. av_free(slices);
  1052. return ret;
  1053. }
  1054. static const enum AVPixelFormat vc1_hwaccel_pixfmt_list_420[] = {
  1055. #if CONFIG_VC1_DXVA2_HWACCEL
  1056. AV_PIX_FMT_DXVA2_VLD,
  1057. #endif
  1058. #if CONFIG_VC1_D3D11VA_HWACCEL
  1059. AV_PIX_FMT_D3D11VA_VLD,
  1060. AV_PIX_FMT_D3D11,
  1061. #endif
  1062. #if CONFIG_VC1_NVDEC_HWACCEL
  1063. AV_PIX_FMT_CUDA,
  1064. #endif
  1065. #if CONFIG_VC1_VAAPI_HWACCEL
  1066. AV_PIX_FMT_VAAPI,
  1067. #endif
  1068. #if CONFIG_VC1_VDPAU_HWACCEL
  1069. AV_PIX_FMT_VDPAU,
  1070. #endif
  1071. AV_PIX_FMT_YUV420P,
  1072. AV_PIX_FMT_NONE
  1073. };
  1074. AVCodec ff_vc1_decoder = {
  1075. .name = "vc1",
  1076. .long_name = NULL_IF_CONFIG_SMALL("SMPTE VC-1"),
  1077. .type = AVMEDIA_TYPE_VIDEO,
  1078. .id = AV_CODEC_ID_VC1,
  1079. .priv_data_size = sizeof(VC1Context),
  1080. .init = vc1_decode_init,
  1081. .close = ff_vc1_decode_end,
  1082. .decode = vc1_decode_frame,
  1083. .flush = ff_mpeg_flush,
  1084. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY,
  1085. .pix_fmts = vc1_hwaccel_pixfmt_list_420,
  1086. .hw_configs = (const AVCodecHWConfigInternal*[]) {
  1087. #if CONFIG_VC1_DXVA2_HWACCEL
  1088. HWACCEL_DXVA2(vc1),
  1089. #endif
  1090. #if CONFIG_VC1_D3D11VA_HWACCEL
  1091. HWACCEL_D3D11VA(vc1),
  1092. #endif
  1093. #if CONFIG_VC1_D3D11VA2_HWACCEL
  1094. HWACCEL_D3D11VA2(vc1),
  1095. #endif
  1096. #if CONFIG_VC1_NVDEC_HWACCEL
  1097. HWACCEL_NVDEC(vc1),
  1098. #endif
  1099. #if CONFIG_VC1_VAAPI_HWACCEL
  1100. HWACCEL_VAAPI(vc1),
  1101. #endif
  1102. #if CONFIG_VC1_VDPAU_HWACCEL
  1103. HWACCEL_VDPAU(vc1),
  1104. #endif
  1105. NULL
  1106. },
  1107. .profiles = NULL_IF_CONFIG_SMALL(ff_vc1_profiles)
  1108. };
  1109. #if CONFIG_WMV3_DECODER
  1110. AVCodec ff_wmv3_decoder = {
  1111. .name = "wmv3",
  1112. .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9"),
  1113. .type = AVMEDIA_TYPE_VIDEO,
  1114. .id = AV_CODEC_ID_WMV3,
  1115. .priv_data_size = sizeof(VC1Context),
  1116. .init = vc1_decode_init,
  1117. .close = ff_vc1_decode_end,
  1118. .decode = vc1_decode_frame,
  1119. .flush = ff_mpeg_flush,
  1120. .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY,
  1121. .pix_fmts = vc1_hwaccel_pixfmt_list_420,
  1122. .hw_configs = (const AVCodecHWConfigInternal*[]) {
  1123. #if CONFIG_WMV3_DXVA2_HWACCEL
  1124. HWACCEL_DXVA2(wmv3),
  1125. #endif
  1126. #if CONFIG_WMV3_D3D11VA_HWACCEL
  1127. HWACCEL_D3D11VA(wmv3),
  1128. #endif
  1129. #if CONFIG_WMV3_D3D11VA2_HWACCEL
  1130. HWACCEL_D3D11VA2(wmv3),
  1131. #endif
  1132. #if CONFIG_WMV3_NVDEC_HWACCEL
  1133. HWACCEL_NVDEC(wmv3),
  1134. #endif
  1135. #if CONFIG_WMV3_VAAPI_HWACCEL
  1136. HWACCEL_VAAPI(wmv3),
  1137. #endif
  1138. #if CONFIG_WMV3_VDPAU_HWACCEL
  1139. HWACCEL_VDPAU(wmv3),
  1140. #endif
  1141. NULL
  1142. },
  1143. .profiles = NULL_IF_CONFIG_SMALL(ff_vc1_profiles)
  1144. };
  1145. #endif
  1146. #if CONFIG_WMV3IMAGE_DECODER
  1147. AVCodec ff_wmv3image_decoder = {
  1148. .name = "wmv3image",
  1149. .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 Image"),
  1150. .type = AVMEDIA_TYPE_VIDEO,
  1151. .id = AV_CODEC_ID_WMV3IMAGE,
  1152. .priv_data_size = sizeof(VC1Context),
  1153. .init = vc1_decode_init,
  1154. .close = ff_vc1_decode_end,
  1155. .decode = vc1_decode_frame,
  1156. .capabilities = AV_CODEC_CAP_DR1,
  1157. .flush = vc1_sprite_flush,
  1158. .pix_fmts = (const enum AVPixelFormat[]) {
  1159. AV_PIX_FMT_YUV420P,
  1160. AV_PIX_FMT_NONE
  1161. },
  1162. };
  1163. #endif
  1164. #if CONFIG_VC1IMAGE_DECODER
  1165. AVCodec ff_vc1image_decoder = {
  1166. .name = "vc1image",
  1167. .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 Image v2"),
  1168. .type = AVMEDIA_TYPE_VIDEO,
  1169. .id = AV_CODEC_ID_VC1IMAGE,
  1170. .priv_data_size = sizeof(VC1Context),
  1171. .init = vc1_decode_init,
  1172. .close = ff_vc1_decode_end,
  1173. .decode = vc1_decode_frame,
  1174. .capabilities = AV_CODEC_CAP_DR1,
  1175. .flush = vc1_sprite_flush,
  1176. .pix_fmts = (const enum AVPixelFormat[]) {
  1177. AV_PIX_FMT_YUV420P,
  1178. AV_PIX_FMT_NONE
  1179. },
  1180. };
  1181. #endif