You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

503 lines
16KB

  1. /*
  2. * Copyright (c) 2002 The Libav Project
  3. *
  4. * This file is part of Libav.
  5. *
  6. * Libav is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public
  8. * License as published by the Free Software Foundation; either
  9. * version 2.1 of the License, or (at your option) any later version.
  10. *
  11. * Libav is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  14. * Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public
  17. * License along with Libav; if not, write to the Free Software
  18. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20. #include "avcodec.h"
  21. #include "h263.h"
  22. #include "internal.h"
  23. #include "intrax8.h"
  24. #include "mathops.h"
  25. #include "mpegutils.h"
  26. #include "mpegvideo.h"
  27. #include "msmpeg4.h"
  28. #include "msmpeg4data.h"
  29. #include "wmv2.h"
  30. static void parse_mb_skip(Wmv2Context *w)
  31. {
  32. int mb_x, mb_y;
  33. MpegEncContext *const s = &w->s;
  34. uint32_t *const mb_type = s->current_picture_ptr->mb_type;
  35. w->skip_type = get_bits(&s->gb, 2);
  36. switch (w->skip_type) {
  37. case SKIP_TYPE_NONE:
  38. for (mb_y = 0; mb_y < s->mb_height; mb_y++)
  39. for (mb_x = 0; mb_x < s->mb_width; mb_x++)
  40. mb_type[mb_y * s->mb_stride + mb_x] =
  41. MB_TYPE_16x16 | MB_TYPE_L0;
  42. break;
  43. case SKIP_TYPE_MPEG:
  44. for (mb_y = 0; mb_y < s->mb_height; mb_y++)
  45. for (mb_x = 0; mb_x < s->mb_width; mb_x++)
  46. mb_type[mb_y * s->mb_stride + mb_x] =
  47. (get_bits1(&s->gb) ? MB_TYPE_SKIP : 0) | MB_TYPE_16x16 | MB_TYPE_L0;
  48. break;
  49. case SKIP_TYPE_ROW:
  50. for (mb_y = 0; mb_y < s->mb_height; mb_y++) {
  51. if (get_bits1(&s->gb)) {
  52. for (mb_x = 0; mb_x < s->mb_width; mb_x++)
  53. mb_type[mb_y * s->mb_stride + mb_x] =
  54. MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
  55. } else {
  56. for (mb_x = 0; mb_x < s->mb_width; mb_x++)
  57. mb_type[mb_y * s->mb_stride + mb_x] =
  58. (get_bits1(&s->gb) ? MB_TYPE_SKIP : 0) | MB_TYPE_16x16 | MB_TYPE_L0;
  59. }
  60. }
  61. break;
  62. case SKIP_TYPE_COL:
  63. for (mb_x = 0; mb_x < s->mb_width; mb_x++) {
  64. if (get_bits1(&s->gb)) {
  65. for (mb_y = 0; mb_y < s->mb_height; mb_y++)
  66. mb_type[mb_y * s->mb_stride + mb_x] =
  67. MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
  68. } else {
  69. for (mb_y = 0; mb_y < s->mb_height; mb_y++)
  70. mb_type[mb_y * s->mb_stride + mb_x] =
  71. (get_bits1(&s->gb) ? MB_TYPE_SKIP : 0) | MB_TYPE_16x16 | MB_TYPE_L0;
  72. }
  73. }
  74. break;
  75. }
  76. }
  77. static int decode_ext_header(Wmv2Context *w)
  78. {
  79. MpegEncContext *const s = &w->s;
  80. GetBitContext gb;
  81. int fps;
  82. int code;
  83. if (s->avctx->extradata_size < 4)
  84. return AVERROR_INVALIDDATA;
  85. init_get_bits(&gb, s->avctx->extradata, 32);
  86. fps = get_bits(&gb, 5);
  87. s->bit_rate = get_bits(&gb, 11) * 1024;
  88. w->mspel_bit = get_bits1(&gb);
  89. s->loop_filter = get_bits1(&gb);
  90. w->abt_flag = get_bits1(&gb);
  91. w->j_type_bit = get_bits1(&gb);
  92. w->top_left_mv_flag = get_bits1(&gb);
  93. w->per_mb_rl_bit = get_bits1(&gb);
  94. code = get_bits(&gb, 3);
  95. if (code == 0)
  96. return AVERROR_INVALIDDATA;
  97. s->slice_height = s->mb_height / code;
  98. if (s->avctx->debug & FF_DEBUG_PICT_INFO)
  99. av_log(s->avctx, AV_LOG_DEBUG,
  100. "fps:%d, br:%d, qpbit:%d, abt_flag:%d, j_type_bit:%d, "
  101. "tl_mv_flag:%d, mbrl_bit:%d, code:%d, loop_filter:%d, "
  102. "slices:%d\n",
  103. fps, s->bit_rate, w->mspel_bit, w->abt_flag, w->j_type_bit,
  104. w->top_left_mv_flag, w->per_mb_rl_bit, code, s->loop_filter,
  105. code);
  106. return 0;
  107. }
  108. int ff_wmv2_decode_picture_header(MpegEncContext *s)
  109. {
  110. Wmv2Context *const w = (Wmv2Context *) s;
  111. int code;
  112. if (s->picture_number == 0)
  113. decode_ext_header(w);
  114. s->pict_type = get_bits1(&s->gb) + 1;
  115. if (s->pict_type == AV_PICTURE_TYPE_I) {
  116. code = get_bits(&s->gb, 7);
  117. av_log(s->avctx, AV_LOG_DEBUG, "I7:%X/\n", code);
  118. }
  119. s->chroma_qscale = s->qscale = get_bits(&s->gb, 5);
  120. if (s->qscale <= 0)
  121. return AVERROR_INVALIDDATA;
  122. return 0;
  123. }
  124. int ff_wmv2_decode_secondary_picture_header(MpegEncContext *s)
  125. {
  126. Wmv2Context *const w = (Wmv2Context *) s;
  127. if (s->pict_type == AV_PICTURE_TYPE_I) {
  128. if (w->j_type_bit)
  129. w->j_type = get_bits1(&s->gb);
  130. else
  131. w->j_type = 0; // FIXME check
  132. if (!w->j_type) {
  133. if (w->per_mb_rl_bit)
  134. s->per_mb_rl_table = get_bits1(&s->gb);
  135. else
  136. s->per_mb_rl_table = 0;
  137. if (!s->per_mb_rl_table) {
  138. s->rl_chroma_table_index = decode012(&s->gb);
  139. s->rl_table_index = decode012(&s->gb);
  140. }
  141. s->dc_table_index = get_bits1(&s->gb);
  142. }
  143. s->inter_intra_pred = 0;
  144. s->no_rounding = 1;
  145. if (s->avctx->debug & FF_DEBUG_PICT_INFO) {
  146. av_log(s->avctx, AV_LOG_DEBUG,
  147. "qscale:%d rlc:%d rl:%d dc:%d mbrl:%d j_type:%d \n",
  148. s->qscale, s->rl_chroma_table_index, s->rl_table_index,
  149. s->dc_table_index, s->per_mb_rl_table, w->j_type);
  150. }
  151. } else {
  152. int cbp_index;
  153. w->j_type = 0;
  154. parse_mb_skip(w);
  155. cbp_index = decode012(&s->gb);
  156. if (s->qscale <= 10) {
  157. int map[3] = { 0, 2, 1 };
  158. w->cbp_table_index = map[cbp_index];
  159. } else if (s->qscale <= 20) {
  160. int map[3] = { 1, 0, 2 };
  161. w->cbp_table_index = map[cbp_index];
  162. } else {
  163. int map[3] = {2,1,0};
  164. w->cbp_table_index = map[cbp_index];
  165. }
  166. if (w->mspel_bit)
  167. s->mspel = get_bits1(&s->gb);
  168. else
  169. s->mspel = 0; // FIXME check
  170. if (w->abt_flag) {
  171. w->per_mb_abt = get_bits1(&s->gb) ^ 1;
  172. if (!w->per_mb_abt)
  173. w->abt_type = decode012(&s->gb);
  174. }
  175. if (w->per_mb_rl_bit)
  176. s->per_mb_rl_table = get_bits1(&s->gb);
  177. else
  178. s->per_mb_rl_table = 0;
  179. if (!s->per_mb_rl_table) {
  180. s->rl_table_index = decode012(&s->gb);
  181. s->rl_chroma_table_index = s->rl_table_index;
  182. }
  183. s->dc_table_index = get_bits1(&s->gb);
  184. s->mv_table_index = get_bits1(&s->gb);
  185. s->inter_intra_pred = 0; // (s->width * s->height < 320 * 240 && s->bit_rate <= II_BITRATE);
  186. s->no_rounding ^= 1;
  187. if (s->avctx->debug & FF_DEBUG_PICT_INFO) {
  188. av_log(s->avctx, AV_LOG_DEBUG,
  189. "rl:%d rlc:%d dc:%d mv:%d mbrl:%d qp:%d mspel:%d "
  190. "per_mb_abt:%d abt_type:%d cbp:%d ii:%d\n",
  191. s->rl_table_index, s->rl_chroma_table_index,
  192. s->dc_table_index, s->mv_table_index,
  193. s->per_mb_rl_table, s->qscale, s->mspel,
  194. w->per_mb_abt, w->abt_type, w->cbp_table_index,
  195. s->inter_intra_pred);
  196. }
  197. }
  198. s->esc3_level_length = 0;
  199. s->esc3_run_length = 0;
  200. s->picture_number++; // FIXME ?
  201. if (w->j_type) {
  202. ff_intrax8_decode_picture(&w->x8, &s->current_picture,
  203. &s->gb, &s->mb_x, &s->mb_y,
  204. 2 * s->qscale, (s->qscale - 1) | 1,
  205. s->loop_filter, s->low_delay);
  206. ff_er_add_slice(&w->s.er, 0, 0,
  207. (w->s.mb_x >> 1) - 1, (w->s.mb_y >> 1) - 1,
  208. ER_MB_END);
  209. return 1;
  210. }
  211. return 0;
  212. }
  213. static inline int wmv2_decode_motion(Wmv2Context *w, int *mx_ptr, int *my_ptr)
  214. {
  215. MpegEncContext *const s = &w->s;
  216. int ret;
  217. ret = ff_msmpeg4_decode_motion(s, mx_ptr, my_ptr);
  218. if (ret < 0)
  219. return ret;
  220. if ((((*mx_ptr) | (*my_ptr)) & 1) && s->mspel)
  221. w->hshift = get_bits1(&s->gb);
  222. else
  223. w->hshift = 0;
  224. return 0;
  225. }
  226. static int16_t *wmv2_pred_motion(Wmv2Context *w, int *px, int *py)
  227. {
  228. MpegEncContext *const s = &w->s;
  229. int xy, wrap, diff, type;
  230. int16_t *A, *B, *C, *mot_val;
  231. wrap = s->b8_stride;
  232. xy = s->block_index[0];
  233. mot_val = s->current_picture.motion_val[0][xy];
  234. A = s->current_picture.motion_val[0][xy - 1];
  235. B = s->current_picture.motion_val[0][xy - wrap];
  236. C = s->current_picture.motion_val[0][xy + 2 - wrap];
  237. if (s->mb_x && !s->first_slice_line && !s->mspel && w->top_left_mv_flag)
  238. diff = FFMAX(FFABS(A[0] - B[0]), FFABS(A[1] - B[1]));
  239. else
  240. diff = 0;
  241. if (diff >= 8)
  242. type = get_bits1(&s->gb);
  243. else
  244. type = 2;
  245. if (type == 0) {
  246. *px = A[0];
  247. *py = A[1];
  248. } else if (type == 1) {
  249. *px = B[0];
  250. *py = B[1];
  251. } else {
  252. /* special case for first (slice) line */
  253. if (s->first_slice_line) {
  254. *px = A[0];
  255. *py = A[1];
  256. } else {
  257. *px = mid_pred(A[0], B[0], C[0]);
  258. *py = mid_pred(A[1], B[1], C[1]);
  259. }
  260. }
  261. return mot_val;
  262. }
  263. static inline int wmv2_decode_inter_block(Wmv2Context *w, int16_t *block,
  264. int n, int cbp)
  265. {
  266. MpegEncContext *const s = &w->s;
  267. static const int sub_cbp_table[3] = { 2, 3, 1 };
  268. int sub_cbp, ret;
  269. if (!cbp) {
  270. s->block_last_index[n] = -1;
  271. return 0;
  272. }
  273. if (w->per_block_abt)
  274. w->abt_type = decode012(&s->gb);
  275. w->abt_type_table[n] = w->abt_type;
  276. if (w->abt_type) {
  277. // const uint8_t *scantable = w->abt_scantable[w->abt_type - 1].permutated;
  278. const uint8_t *scantable = w->abt_scantable[w->abt_type - 1].scantable;
  279. // const uint8_t *scantable = w->abt_type - 1 ? w->abt_scantable[1].permutated : w->abt_scantable[0].scantable;
  280. sub_cbp = sub_cbp_table[decode012(&s->gb)];
  281. if (sub_cbp & 1)
  282. if ((ret = ff_msmpeg4_decode_block(s, block, n, 1, scantable)) < 0)
  283. return ret;
  284. if (sub_cbp & 2)
  285. if ((ret = ff_msmpeg4_decode_block(s, w->abt_block2[n], n, 1, scantable)) < 0)
  286. return ret;
  287. s->block_last_index[n] = 63;
  288. return 0;
  289. } else {
  290. return ff_msmpeg4_decode_block(s, block, n, 1,
  291. s->inter_scantable.permutated);
  292. }
  293. }
  294. int ff_wmv2_decode_mb(MpegEncContext *s, int16_t block[6][64])
  295. {
  296. Wmv2Context *const w = (Wmv2Context *) s;
  297. int cbp, code, i, ret;
  298. uint8_t *coded_val;
  299. if (w->j_type)
  300. return 0;
  301. if (s->pict_type == AV_PICTURE_TYPE_P) {
  302. if (IS_SKIP(s->current_picture.mb_type[s->mb_y * s->mb_stride + s->mb_x])) {
  303. /* skip mb */
  304. s->mb_intra = 0;
  305. for (i = 0; i < 6; i++)
  306. s->block_last_index[i] = -1;
  307. s->mv_dir = MV_DIR_FORWARD;
  308. s->mv_type = MV_TYPE_16X16;
  309. s->mv[0][0][0] = 0;
  310. s->mv[0][0][1] = 0;
  311. s->mb_skipped = 1;
  312. w->hshift = 0;
  313. return 0;
  314. }
  315. code = get_vlc2(&s->gb, ff_mb_non_intra_vlc[w->cbp_table_index].table,
  316. MB_NON_INTRA_VLC_BITS, 3);
  317. if (code < 0)
  318. return AVERROR_INVALIDDATA;
  319. s->mb_intra = (~code & 0x40) >> 6;
  320. cbp = code & 0x3f;
  321. } else {
  322. s->mb_intra = 1;
  323. code = get_vlc2(&s->gb, ff_msmp4_mb_i_vlc.table, MB_INTRA_VLC_BITS, 2);
  324. if (code < 0) {
  325. av_log(s->avctx, AV_LOG_ERROR,
  326. "II-cbp illegal at %d %d\n", s->mb_x, s->mb_y);
  327. return AVERROR_INVALIDDATA;
  328. }
  329. /* predict coded block pattern */
  330. cbp = 0;
  331. for (i = 0; i < 6; i++) {
  332. int val = ((code >> (5 - i)) & 1);
  333. if (i < 4) {
  334. int pred = ff_msmpeg4_coded_block_pred(s, i, &coded_val);
  335. val = val ^ pred;
  336. *coded_val = val;
  337. }
  338. cbp |= val << (5 - i);
  339. }
  340. }
  341. if (!s->mb_intra) {
  342. int mx, my;
  343. wmv2_pred_motion(w, &mx, &my);
  344. if (cbp) {
  345. s->bdsp.clear_blocks(s->block[0]);
  346. if (s->per_mb_rl_table) {
  347. s->rl_table_index = decode012(&s->gb);
  348. s->rl_chroma_table_index = s->rl_table_index;
  349. }
  350. if (w->abt_flag && w->per_mb_abt) {
  351. w->per_block_abt = get_bits1(&s->gb);
  352. if (!w->per_block_abt)
  353. w->abt_type = decode012(&s->gb);
  354. } else
  355. w->per_block_abt = 0;
  356. }
  357. if ((ret = wmv2_decode_motion(w, &mx, &my)) < 0)
  358. return ret;
  359. s->mv_dir = MV_DIR_FORWARD;
  360. s->mv_type = MV_TYPE_16X16;
  361. s->mv[0][0][0] = mx;
  362. s->mv[0][0][1] = my;
  363. for (i = 0; i < 6; i++) {
  364. if ((ret = wmv2_decode_inter_block(w, block[i], i, (cbp >> (5 - i)) & 1)) < 0) {
  365. av_log(s->avctx, AV_LOG_ERROR,
  366. "\nerror while decoding inter block: %d x %d (%d)\n",
  367. s->mb_x, s->mb_y, i);
  368. return ret;
  369. }
  370. }
  371. } else {
  372. if (s->pict_type == AV_PICTURE_TYPE_P)
  373. ff_dlog(s->avctx, "%d%d ", s->inter_intra_pred, cbp);
  374. ff_dlog(s->avctx, "I at %d %d %d %06X\n", s->mb_x, s->mb_y,
  375. ((cbp & 3) ? 1 : 0) + ((cbp & 0x3C) ? 2 : 0),
  376. show_bits(&s->gb, 24));
  377. s->ac_pred = get_bits1(&s->gb);
  378. if (s->inter_intra_pred) {
  379. s->h263_aic_dir = get_vlc2(&s->gb, ff_inter_intra_vlc.table,
  380. INTER_INTRA_VLC_BITS, 1);
  381. ff_dlog(s->avctx, "%d%d %d %d/",
  382. s->ac_pred, s->h263_aic_dir, s->mb_x, s->mb_y);
  383. }
  384. if (s->per_mb_rl_table && cbp) {
  385. s->rl_table_index = decode012(&s->gb);
  386. s->rl_chroma_table_index = s->rl_table_index;
  387. }
  388. s->bdsp.clear_blocks(s->block[0]);
  389. for (i = 0; i < 6; i++) {
  390. if ((ret = ff_msmpeg4_decode_block(s, block[i], i, (cbp >> (5 - i)) & 1, NULL)) < 0) {
  391. av_log(s->avctx, AV_LOG_ERROR,
  392. "\nerror while decoding intra block: %d x %d (%d)\n",
  393. s->mb_x, s->mb_y, i);
  394. return ret;
  395. }
  396. }
  397. }
  398. return 0;
  399. }
  400. static av_cold int wmv2_decode_init(AVCodecContext *avctx)
  401. {
  402. Wmv2Context *const w = avctx->priv_data;
  403. int ret;
  404. if ((ret = ff_msmpeg4_decode_init(avctx)) < 0)
  405. return ret;
  406. ff_wmv2_common_init(w);
  407. return ff_intrax8_common_init(avctx, &w->x8, &w->s.idsp,
  408. w->s.block, w->s.block_last_index,
  409. w->s.mb_width, w->s.mb_height);
  410. }
  411. static av_cold int wmv2_decode_end(AVCodecContext *avctx)
  412. {
  413. Wmv2Context *w = avctx->priv_data;
  414. ff_intrax8_common_end(&w->x8);
  415. return ff_h263_decode_end(avctx);
  416. }
  417. AVCodec ff_wmv2_decoder = {
  418. .name = "wmv2",
  419. .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 8"),
  420. .type = AVMEDIA_TYPE_VIDEO,
  421. .id = AV_CODEC_ID_WMV2,
  422. .priv_data_size = sizeof(Wmv2Context),
  423. .init = wmv2_decode_init,
  424. .close = wmv2_decode_end,
  425. .decode = ff_h263_decode_frame,
  426. .capabilities = AV_CODEC_CAP_DRAW_HORIZ_BAND | AV_CODEC_CAP_DR1,
  427. .pix_fmts = (const enum AVPixelFormat[]) { AV_PIX_FMT_YUV420P,
  428. AV_PIX_FMT_NONE },
  429. };