You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

494 lines
15KB

  1. /*
  2. * Copyright (c) 2002 The Libav Project
  3. *
  4. * This file is part of Libav.
  5. *
  6. * Libav is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public
  8. * License as published by the Free Software Foundation; either
  9. * version 2.1 of the License, or (at your option) any later version.
  10. *
  11. * Libav is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  14. * Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public
  17. * License along with Libav; if not, write to the Free Software
  18. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20. #include "avcodec.h"
  21. #include "h263.h"
  22. #include "intrax8.h"
  23. #include "mathops.h"
  24. #include "mpegutils.h"
  25. #include "mpegvideo.h"
  26. #include "msmpeg4.h"
  27. #include "msmpeg4data.h"
  28. #include "wmv2.h"
  29. static void parse_mb_skip(Wmv2Context *w)
  30. {
  31. int mb_x, mb_y;
  32. MpegEncContext *const s = &w->s;
  33. uint32_t *const mb_type = s->current_picture_ptr->mb_type;
  34. w->skip_type = get_bits(&s->gb, 2);
  35. switch (w->skip_type) {
  36. case SKIP_TYPE_NONE:
  37. for (mb_y = 0; mb_y < s->mb_height; mb_y++)
  38. for (mb_x = 0; mb_x < s->mb_width; mb_x++)
  39. mb_type[mb_y * s->mb_stride + mb_x] =
  40. MB_TYPE_16x16 | MB_TYPE_L0;
  41. break;
  42. case SKIP_TYPE_MPEG:
  43. for (mb_y = 0; mb_y < s->mb_height; mb_y++)
  44. for (mb_x = 0; mb_x < s->mb_width; mb_x++)
  45. mb_type[mb_y * s->mb_stride + mb_x] =
  46. (get_bits1(&s->gb) ? MB_TYPE_SKIP : 0) | MB_TYPE_16x16 | MB_TYPE_L0;
  47. break;
  48. case SKIP_TYPE_ROW:
  49. for (mb_y = 0; mb_y < s->mb_height; mb_y++) {
  50. if (get_bits1(&s->gb)) {
  51. for (mb_x = 0; mb_x < s->mb_width; mb_x++)
  52. mb_type[mb_y * s->mb_stride + mb_x] =
  53. MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
  54. } else {
  55. for (mb_x = 0; mb_x < s->mb_width; mb_x++)
  56. mb_type[mb_y * s->mb_stride + mb_x] =
  57. (get_bits1(&s->gb) ? MB_TYPE_SKIP : 0) | MB_TYPE_16x16 | MB_TYPE_L0;
  58. }
  59. }
  60. break;
  61. case SKIP_TYPE_COL:
  62. for (mb_x = 0; mb_x < s->mb_width; mb_x++) {
  63. if (get_bits1(&s->gb)) {
  64. for (mb_y = 0; mb_y < s->mb_height; mb_y++)
  65. mb_type[mb_y * s->mb_stride + mb_x] =
  66. MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
  67. } else {
  68. for (mb_y = 0; mb_y < s->mb_height; mb_y++)
  69. mb_type[mb_y * s->mb_stride + mb_x] =
  70. (get_bits1(&s->gb) ? MB_TYPE_SKIP : 0) | MB_TYPE_16x16 | MB_TYPE_L0;
  71. }
  72. }
  73. break;
  74. }
  75. }
  76. static int decode_ext_header(Wmv2Context *w)
  77. {
  78. MpegEncContext *const s = &w->s;
  79. GetBitContext gb;
  80. int fps;
  81. int code;
  82. if (s->avctx->extradata_size < 4)
  83. return -1;
  84. init_get_bits(&gb, s->avctx->extradata, 32);
  85. fps = get_bits(&gb, 5);
  86. s->bit_rate = get_bits(&gb, 11) * 1024;
  87. w->mspel_bit = get_bits1(&gb);
  88. s->loop_filter = get_bits1(&gb);
  89. w->abt_flag = get_bits1(&gb);
  90. w->j_type_bit = get_bits1(&gb);
  91. w->top_left_mv_flag = get_bits1(&gb);
  92. w->per_mb_rl_bit = get_bits1(&gb);
  93. code = get_bits(&gb, 3);
  94. if (code == 0)
  95. return -1;
  96. s->slice_height = s->mb_height / code;
  97. if (s->avctx->debug & FF_DEBUG_PICT_INFO)
  98. av_log(s->avctx, AV_LOG_DEBUG,
  99. "fps:%d, br:%d, qpbit:%d, abt_flag:%d, j_type_bit:%d, "
  100. "tl_mv_flag:%d, mbrl_bit:%d, code:%d, loop_filter:%d, "
  101. "slices:%d\n",
  102. fps, s->bit_rate, w->mspel_bit, w->abt_flag, w->j_type_bit,
  103. w->top_left_mv_flag, w->per_mb_rl_bit, code, s->loop_filter,
  104. code);
  105. return 0;
  106. }
  107. int ff_wmv2_decode_picture_header(MpegEncContext *s)
  108. {
  109. Wmv2Context *const w = (Wmv2Context *) s;
  110. int code;
  111. if (s->picture_number == 0)
  112. decode_ext_header(w);
  113. s->pict_type = get_bits1(&s->gb) + 1;
  114. if (s->pict_type == AV_PICTURE_TYPE_I) {
  115. code = get_bits(&s->gb, 7);
  116. av_log(s->avctx, AV_LOG_DEBUG, "I7:%X/\n", code);
  117. }
  118. s->chroma_qscale = s->qscale = get_bits(&s->gb, 5);
  119. if (s->qscale <= 0)
  120. return -1;
  121. return 0;
  122. }
  123. int ff_wmv2_decode_secondary_picture_header(MpegEncContext *s)
  124. {
  125. Wmv2Context *const w = (Wmv2Context *) s;
  126. if (s->pict_type == AV_PICTURE_TYPE_I) {
  127. if (w->j_type_bit)
  128. w->j_type = get_bits1(&s->gb);
  129. else
  130. w->j_type = 0; // FIXME check
  131. if (!w->j_type) {
  132. if (w->per_mb_rl_bit)
  133. s->per_mb_rl_table = get_bits1(&s->gb);
  134. else
  135. s->per_mb_rl_table = 0;
  136. if (!s->per_mb_rl_table) {
  137. s->rl_chroma_table_index = decode012(&s->gb);
  138. s->rl_table_index = decode012(&s->gb);
  139. }
  140. s->dc_table_index = get_bits1(&s->gb);
  141. }
  142. s->inter_intra_pred = 0;
  143. s->no_rounding = 1;
  144. if (s->avctx->debug & FF_DEBUG_PICT_INFO) {
  145. av_log(s->avctx, AV_LOG_DEBUG,
  146. "qscale:%d rlc:%d rl:%d dc:%d mbrl:%d j_type:%d \n",
  147. s->qscale, s->rl_chroma_table_index, s->rl_table_index,
  148. s->dc_table_index, s->per_mb_rl_table, w->j_type);
  149. }
  150. } else {
  151. int cbp_index;
  152. w->j_type = 0;
  153. parse_mb_skip(w);
  154. cbp_index = decode012(&s->gb);
  155. if (s->qscale <= 10) {
  156. int map[3] = { 0, 2, 1 };
  157. w->cbp_table_index = map[cbp_index];
  158. } else if (s->qscale <= 20) {
  159. int map[3] = { 1, 0, 2 };
  160. w->cbp_table_index = map[cbp_index];
  161. } else {
  162. int map[3] = {2,1,0};
  163. w->cbp_table_index = map[cbp_index];
  164. }
  165. if (w->mspel_bit)
  166. s->mspel = get_bits1(&s->gb);
  167. else
  168. s->mspel = 0; // FIXME check
  169. if (w->abt_flag) {
  170. w->per_mb_abt = get_bits1(&s->gb) ^ 1;
  171. if (!w->per_mb_abt)
  172. w->abt_type = decode012(&s->gb);
  173. }
  174. if (w->per_mb_rl_bit)
  175. s->per_mb_rl_table = get_bits1(&s->gb);
  176. else
  177. s->per_mb_rl_table = 0;
  178. if (!s->per_mb_rl_table) {
  179. s->rl_table_index = decode012(&s->gb);
  180. s->rl_chroma_table_index = s->rl_table_index;
  181. }
  182. s->dc_table_index = get_bits1(&s->gb);
  183. s->mv_table_index = get_bits1(&s->gb);
  184. s->inter_intra_pred = 0; // (s->width * s->height < 320 * 240 && s->bit_rate <= II_BITRATE);
  185. s->no_rounding ^= 1;
  186. if (s->avctx->debug & FF_DEBUG_PICT_INFO) {
  187. av_log(s->avctx, AV_LOG_DEBUG,
  188. "rl:%d rlc:%d dc:%d mv:%d mbrl:%d qp:%d mspel:%d "
  189. "per_mb_abt:%d abt_type:%d cbp:%d ii:%d\n",
  190. s->rl_table_index, s->rl_chroma_table_index,
  191. s->dc_table_index, s->mv_table_index,
  192. s->per_mb_rl_table, s->qscale, s->mspel,
  193. w->per_mb_abt, w->abt_type, w->cbp_table_index,
  194. s->inter_intra_pred);
  195. }
  196. }
  197. s->esc3_level_length = 0;
  198. s->esc3_run_length = 0;
  199. s->picture_number++; // FIXME ?
  200. if (w->j_type) {
  201. ff_intrax8_decode_picture(&w->x8, 2 * s->qscale, (s->qscale - 1) | 1);
  202. return 1;
  203. }
  204. return 0;
  205. }
  206. static inline int wmv2_decode_motion(Wmv2Context *w, int *mx_ptr, int *my_ptr)
  207. {
  208. MpegEncContext *const s = &w->s;
  209. int ret;
  210. ret = ff_msmpeg4_decode_motion(s, mx_ptr, my_ptr);
  211. if (ret < 0)
  212. return -1;
  213. if ((((*mx_ptr) | (*my_ptr)) & 1) && s->mspel)
  214. w->hshift = get_bits1(&s->gb);
  215. else
  216. w->hshift = 0;
  217. return 0;
  218. }
  219. static int16_t *wmv2_pred_motion(Wmv2Context *w, int *px, int *py)
  220. {
  221. MpegEncContext *const s = &w->s;
  222. int xy, wrap, diff, type;
  223. int16_t *A, *B, *C, *mot_val;
  224. wrap = s->b8_stride;
  225. xy = s->block_index[0];
  226. mot_val = s->current_picture.motion_val[0][xy];
  227. A = s->current_picture.motion_val[0][xy - 1];
  228. B = s->current_picture.motion_val[0][xy - wrap];
  229. C = s->current_picture.motion_val[0][xy + 2 - wrap];
  230. if (s->mb_x && !s->first_slice_line && !s->mspel && w->top_left_mv_flag)
  231. diff = FFMAX(FFABS(A[0] - B[0]), FFABS(A[1] - B[1]));
  232. else
  233. diff = 0;
  234. if (diff >= 8)
  235. type = get_bits1(&s->gb);
  236. else
  237. type = 2;
  238. if (type == 0) {
  239. *px = A[0];
  240. *py = A[1];
  241. } else if (type == 1) {
  242. *px = B[0];
  243. *py = B[1];
  244. } else {
  245. /* special case for first (slice) line */
  246. if (s->first_slice_line) {
  247. *px = A[0];
  248. *py = A[1];
  249. } else {
  250. *px = mid_pred(A[0], B[0], C[0]);
  251. *py = mid_pred(A[1], B[1], C[1]);
  252. }
  253. }
  254. return mot_val;
  255. }
  256. static inline int wmv2_decode_inter_block(Wmv2Context *w, int16_t *block,
  257. int n, int cbp)
  258. {
  259. MpegEncContext *const s = &w->s;
  260. static const int sub_cbp_table[3] = { 2, 3, 1 };
  261. int sub_cbp;
  262. if (!cbp) {
  263. s->block_last_index[n] = -1;
  264. return 0;
  265. }
  266. if (w->per_block_abt)
  267. w->abt_type = decode012(&s->gb);
  268. w->abt_type_table[n] = w->abt_type;
  269. if (w->abt_type) {
  270. // const uint8_t *scantable = w->abt_scantable[w->abt_type - 1].permutated;
  271. const uint8_t *scantable = w->abt_scantable[w->abt_type - 1].scantable;
  272. // const uint8_t *scantable = w->abt_type - 1 ? w->abt_scantable[1].permutated : w->abt_scantable[0].scantable;
  273. sub_cbp = sub_cbp_table[decode012(&s->gb)];
  274. if (sub_cbp & 1)
  275. if (ff_msmpeg4_decode_block(s, block, n, 1, scantable) < 0)
  276. return -1;
  277. if (sub_cbp & 2)
  278. if (ff_msmpeg4_decode_block(s, w->abt_block2[n], n, 1, scantable) < 0)
  279. return -1;
  280. s->block_last_index[n] = 63;
  281. return 0;
  282. } else {
  283. return ff_msmpeg4_decode_block(s, block, n, 1,
  284. s->inter_scantable.permutated);
  285. }
  286. }
  287. int ff_wmv2_decode_mb(MpegEncContext *s, int16_t block[6][64])
  288. {
  289. Wmv2Context *const w = (Wmv2Context *) s;
  290. int cbp, code, i;
  291. uint8_t *coded_val;
  292. if (w->j_type)
  293. return 0;
  294. if (s->pict_type == AV_PICTURE_TYPE_P) {
  295. if (IS_SKIP(s->current_picture.mb_type[s->mb_y * s->mb_stride + s->mb_x])) {
  296. /* skip mb */
  297. s->mb_intra = 0;
  298. for (i = 0; i < 6; i++)
  299. s->block_last_index[i] = -1;
  300. s->mv_dir = MV_DIR_FORWARD;
  301. s->mv_type = MV_TYPE_16X16;
  302. s->mv[0][0][0] = 0;
  303. s->mv[0][0][1] = 0;
  304. s->mb_skipped = 1;
  305. w->hshift = 0;
  306. return 0;
  307. }
  308. code = get_vlc2(&s->gb, ff_mb_non_intra_vlc[w->cbp_table_index].table,
  309. MB_NON_INTRA_VLC_BITS, 3);
  310. if (code < 0)
  311. return -1;
  312. s->mb_intra = (~code & 0x40) >> 6;
  313. cbp = code & 0x3f;
  314. } else {
  315. s->mb_intra = 1;
  316. code = get_vlc2(&s->gb, ff_msmp4_mb_i_vlc.table, MB_INTRA_VLC_BITS, 2);
  317. if (code < 0) {
  318. av_log(s->avctx, AV_LOG_ERROR,
  319. "II-cbp illegal at %d %d\n", s->mb_x, s->mb_y);
  320. return -1;
  321. }
  322. /* predict coded block pattern */
  323. cbp = 0;
  324. for (i = 0; i < 6; i++) {
  325. int val = ((code >> (5 - i)) & 1);
  326. if (i < 4) {
  327. int pred = ff_msmpeg4_coded_block_pred(s, i, &coded_val);
  328. val = val ^ pred;
  329. *coded_val = val;
  330. }
  331. cbp |= val << (5 - i);
  332. }
  333. }
  334. if (!s->mb_intra) {
  335. int mx, my;
  336. wmv2_pred_motion(w, &mx, &my);
  337. if (cbp) {
  338. s->bdsp.clear_blocks(s->block[0]);
  339. if (s->per_mb_rl_table) {
  340. s->rl_table_index = decode012(&s->gb);
  341. s->rl_chroma_table_index = s->rl_table_index;
  342. }
  343. if (w->abt_flag && w->per_mb_abt) {
  344. w->per_block_abt = get_bits1(&s->gb);
  345. if (!w->per_block_abt)
  346. w->abt_type = decode012(&s->gb);
  347. } else
  348. w->per_block_abt = 0;
  349. }
  350. if (wmv2_decode_motion(w, &mx, &my) < 0)
  351. return -1;
  352. s->mv_dir = MV_DIR_FORWARD;
  353. s->mv_type = MV_TYPE_16X16;
  354. s->mv[0][0][0] = mx;
  355. s->mv[0][0][1] = my;
  356. for (i = 0; i < 6; i++) {
  357. if (wmv2_decode_inter_block(w, block[i], i, (cbp >> (5 - i)) & 1) < 0) {
  358. av_log(s->avctx, AV_LOG_ERROR,
  359. "\nerror while decoding inter block: %d x %d (%d)\n",
  360. s->mb_x, s->mb_y, i);
  361. return -1;
  362. }
  363. }
  364. } else {
  365. if (s->pict_type == AV_PICTURE_TYPE_P)
  366. av_dlog(s->avctx, "%d%d ", s->inter_intra_pred, cbp);
  367. av_dlog(s->avctx, "I at %d %d %d %06X\n", s->mb_x, s->mb_y,
  368. ((cbp & 3) ? 1 : 0) + ((cbp & 0x3C) ? 2 : 0),
  369. show_bits(&s->gb, 24));
  370. s->ac_pred = get_bits1(&s->gb);
  371. if (s->inter_intra_pred) {
  372. s->h263_aic_dir = get_vlc2(&s->gb, ff_inter_intra_vlc.table,
  373. INTER_INTRA_VLC_BITS, 1);
  374. av_dlog(s->avctx, "%d%d %d %d/",
  375. s->ac_pred, s->h263_aic_dir, s->mb_x, s->mb_y);
  376. }
  377. if (s->per_mb_rl_table && cbp) {
  378. s->rl_table_index = decode012(&s->gb);
  379. s->rl_chroma_table_index = s->rl_table_index;
  380. }
  381. s->bdsp.clear_blocks(s->block[0]);
  382. for (i = 0; i < 6; i++) {
  383. if (ff_msmpeg4_decode_block(s, block[i], i, (cbp >> (5 - i)) & 1, NULL) < 0) {
  384. av_log(s->avctx, AV_LOG_ERROR,
  385. "\nerror while decoding intra block: %d x %d (%d)\n",
  386. s->mb_x, s->mb_y, i);
  387. return -1;
  388. }
  389. }
  390. }
  391. return 0;
  392. }
  393. static av_cold int wmv2_decode_init(AVCodecContext *avctx)
  394. {
  395. Wmv2Context *const w = avctx->priv_data;
  396. if (ff_msmpeg4_decode_init(avctx) < 0)
  397. return -1;
  398. ff_wmv2_common_init(w);
  399. ff_intrax8_common_init(&w->x8, &w->s);
  400. return 0;
  401. }
  402. static av_cold int wmv2_decode_end(AVCodecContext *avctx)
  403. {
  404. Wmv2Context *w = avctx->priv_data;
  405. ff_intrax8_common_end(&w->x8);
  406. return ff_h263_decode_end(avctx);
  407. }
  408. AVCodec ff_wmv2_decoder = {
  409. .name = "wmv2",
  410. .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 8"),
  411. .type = AVMEDIA_TYPE_VIDEO,
  412. .id = AV_CODEC_ID_WMV2,
  413. .priv_data_size = sizeof(Wmv2Context),
  414. .init = wmv2_decode_init,
  415. .close = wmv2_decode_end,
  416. .decode = ff_h263_decode_frame,
  417. .capabilities = CODEC_CAP_DRAW_HORIZ_BAND | CODEC_CAP_DR1,
  418. .pix_fmts = (const enum AVPixelFormat[]) { AV_PIX_FMT_YUV420P,
  419. AV_PIX_FMT_NONE },
  420. };