You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

479 lines
14KB

  1. /*
  2. * Copyright (c) 2002 The Libav Project
  3. *
  4. * This file is part of Libav.
  5. *
  6. * Libav is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public
  8. * License as published by the Free Software Foundation; either
  9. * version 2.1 of the License, or (at your option) any later version.
  10. *
  11. * Libav is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  14. * Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public
  17. * License along with Libav; if not, write to the Free Software
  18. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20. #include "avcodec.h"
  21. #include "mpegvideo.h"
  22. #include "h263.h"
  23. #include "mathops.h"
  24. #include "msmpeg4.h"
  25. #include "msmpeg4data.h"
  26. #include "intrax8.h"
  27. #include "wmv2.h"
  28. static void parse_mb_skip(Wmv2Context * w){
  29. int mb_x, mb_y;
  30. MpegEncContext * const s= &w->s;
  31. uint32_t * const mb_type = s->current_picture_ptr->mb_type;
  32. w->skip_type= get_bits(&s->gb, 2);
  33. switch(w->skip_type){
  34. case SKIP_TYPE_NONE:
  35. for(mb_y=0; mb_y<s->mb_height; mb_y++){
  36. for(mb_x=0; mb_x<s->mb_width; mb_x++){
  37. mb_type[mb_y*s->mb_stride + mb_x]= MB_TYPE_16x16 | MB_TYPE_L0;
  38. }
  39. }
  40. break;
  41. case SKIP_TYPE_MPEG:
  42. for(mb_y=0; mb_y<s->mb_height; mb_y++){
  43. for(mb_x=0; mb_x<s->mb_width; mb_x++){
  44. mb_type[mb_y*s->mb_stride + mb_x]= (get_bits1(&s->gb) ? MB_TYPE_SKIP : 0) | MB_TYPE_16x16 | MB_TYPE_L0;
  45. }
  46. }
  47. break;
  48. case SKIP_TYPE_ROW:
  49. for(mb_y=0; mb_y<s->mb_height; mb_y++){
  50. if(get_bits1(&s->gb)){
  51. for(mb_x=0; mb_x<s->mb_width; mb_x++){
  52. mb_type[mb_y*s->mb_stride + mb_x]= MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
  53. }
  54. }else{
  55. for(mb_x=0; mb_x<s->mb_width; mb_x++){
  56. mb_type[mb_y*s->mb_stride + mb_x]= (get_bits1(&s->gb) ? MB_TYPE_SKIP : 0) | MB_TYPE_16x16 | MB_TYPE_L0;
  57. }
  58. }
  59. }
  60. break;
  61. case SKIP_TYPE_COL:
  62. for(mb_x=0; mb_x<s->mb_width; mb_x++){
  63. if(get_bits1(&s->gb)){
  64. for(mb_y=0; mb_y<s->mb_height; mb_y++){
  65. mb_type[mb_y*s->mb_stride + mb_x]= MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
  66. }
  67. }else{
  68. for(mb_y=0; mb_y<s->mb_height; mb_y++){
  69. mb_type[mb_y*s->mb_stride + mb_x]= (get_bits1(&s->gb) ? MB_TYPE_SKIP : 0) | MB_TYPE_16x16 | MB_TYPE_L0;
  70. }
  71. }
  72. }
  73. break;
  74. }
  75. }
  76. static int decode_ext_header(Wmv2Context *w){
  77. MpegEncContext * const s= &w->s;
  78. GetBitContext gb;
  79. int fps;
  80. int code;
  81. if(s->avctx->extradata_size<4) return -1;
  82. init_get_bits(&gb, s->avctx->extradata, 32);
  83. fps = get_bits(&gb, 5);
  84. s->bit_rate = get_bits(&gb, 11)*1024;
  85. w->mspel_bit = get_bits1(&gb);
  86. s->loop_filter = get_bits1(&gb);
  87. w->abt_flag = get_bits1(&gb);
  88. w->j_type_bit = get_bits1(&gb);
  89. w->top_left_mv_flag= get_bits1(&gb);
  90. w->per_mb_rl_bit = get_bits1(&gb);
  91. code = get_bits(&gb, 3);
  92. if(code==0) return -1;
  93. s->slice_height = s->mb_height / code;
  94. if(s->avctx->debug&FF_DEBUG_PICT_INFO){
  95. av_log(s->avctx, AV_LOG_DEBUG, "fps:%d, br:%d, qpbit:%d, abt_flag:%d, j_type_bit:%d, tl_mv_flag:%d, mbrl_bit:%d, code:%d, loop_filter:%d, slices:%d\n",
  96. fps, s->bit_rate, w->mspel_bit, w->abt_flag, w->j_type_bit, w->top_left_mv_flag, w->per_mb_rl_bit, code, s->loop_filter,
  97. code);
  98. }
  99. return 0;
  100. }
  101. int ff_wmv2_decode_picture_header(MpegEncContext * s)
  102. {
  103. Wmv2Context * const w= (Wmv2Context*)s;
  104. int code;
  105. if(s->picture_number==0)
  106. decode_ext_header(w);
  107. s->pict_type = get_bits1(&s->gb) + 1;
  108. if(s->pict_type == AV_PICTURE_TYPE_I){
  109. code = get_bits(&s->gb, 7);
  110. av_log(s->avctx, AV_LOG_DEBUG, "I7:%X/\n", code);
  111. }
  112. s->chroma_qscale= s->qscale = get_bits(&s->gb, 5);
  113. if(s->qscale <= 0)
  114. return -1;
  115. return 0;
  116. }
  117. int ff_wmv2_decode_secondary_picture_header(MpegEncContext * s)
  118. {
  119. Wmv2Context * const w= (Wmv2Context*)s;
  120. if (s->pict_type == AV_PICTURE_TYPE_I) {
  121. if(w->j_type_bit) w->j_type= get_bits1(&s->gb);
  122. else w->j_type= 0; //FIXME check
  123. if(!w->j_type){
  124. if(w->per_mb_rl_bit) s->per_mb_rl_table= get_bits1(&s->gb);
  125. else s->per_mb_rl_table= 0;
  126. if(!s->per_mb_rl_table){
  127. s->rl_chroma_table_index = decode012(&s->gb);
  128. s->rl_table_index = decode012(&s->gb);
  129. }
  130. s->dc_table_index = get_bits1(&s->gb);
  131. }
  132. s->inter_intra_pred= 0;
  133. s->no_rounding = 1;
  134. if(s->avctx->debug&FF_DEBUG_PICT_INFO){
  135. av_log(s->avctx, AV_LOG_DEBUG, "qscale:%d rlc:%d rl:%d dc:%d mbrl:%d j_type:%d \n",
  136. s->qscale,
  137. s->rl_chroma_table_index,
  138. s->rl_table_index,
  139. s->dc_table_index,
  140. s->per_mb_rl_table,
  141. w->j_type);
  142. }
  143. }else{
  144. int cbp_index;
  145. w->j_type=0;
  146. parse_mb_skip(w);
  147. cbp_index= decode012(&s->gb);
  148. if(s->qscale <= 10){
  149. int map[3]= {0,2,1};
  150. w->cbp_table_index= map[cbp_index];
  151. }else if(s->qscale <= 20){
  152. int map[3]= {1,0,2};
  153. w->cbp_table_index= map[cbp_index];
  154. }else{
  155. int map[3]= {2,1,0};
  156. w->cbp_table_index= map[cbp_index];
  157. }
  158. if(w->mspel_bit) s->mspel= get_bits1(&s->gb);
  159. else s->mspel= 0; //FIXME check
  160. if(w->abt_flag){
  161. w->per_mb_abt= get_bits1(&s->gb)^1;
  162. if(!w->per_mb_abt){
  163. w->abt_type= decode012(&s->gb);
  164. }
  165. }
  166. if(w->per_mb_rl_bit) s->per_mb_rl_table= get_bits1(&s->gb);
  167. else s->per_mb_rl_table= 0;
  168. if(!s->per_mb_rl_table){
  169. s->rl_table_index = decode012(&s->gb);
  170. s->rl_chroma_table_index = s->rl_table_index;
  171. }
  172. s->dc_table_index = get_bits1(&s->gb);
  173. s->mv_table_index = get_bits1(&s->gb);
  174. s->inter_intra_pred= 0;//(s->width*s->height < 320*240 && s->bit_rate<=II_BITRATE);
  175. s->no_rounding ^= 1;
  176. if(s->avctx->debug&FF_DEBUG_PICT_INFO){
  177. av_log(s->avctx, AV_LOG_DEBUG, "rl:%d rlc:%d dc:%d mv:%d mbrl:%d qp:%d mspel:%d per_mb_abt:%d abt_type:%d cbp:%d ii:%d\n",
  178. s->rl_table_index,
  179. s->rl_chroma_table_index,
  180. s->dc_table_index,
  181. s->mv_table_index,
  182. s->per_mb_rl_table,
  183. s->qscale,
  184. s->mspel,
  185. w->per_mb_abt,
  186. w->abt_type,
  187. w->cbp_table_index,
  188. s->inter_intra_pred);
  189. }
  190. }
  191. s->esc3_level_length= 0;
  192. s->esc3_run_length= 0;
  193. s->picture_number++; //FIXME ?
  194. if(w->j_type){
  195. ff_intrax8_decode_picture(&w->x8, 2*s->qscale, (s->qscale-1)|1 );
  196. return 1;
  197. }
  198. return 0;
  199. }
  200. static inline int wmv2_decode_motion(Wmv2Context *w, int *mx_ptr, int *my_ptr){
  201. MpegEncContext * const s= &w->s;
  202. int ret;
  203. ret= ff_msmpeg4_decode_motion(s, mx_ptr, my_ptr);
  204. if(ret<0) return -1;
  205. if((((*mx_ptr)|(*my_ptr)) & 1) && s->mspel)
  206. w->hshift= get_bits1(&s->gb);
  207. else
  208. w->hshift= 0;
  209. return 0;
  210. }
  211. static int16_t *wmv2_pred_motion(Wmv2Context *w, int *px, int *py){
  212. MpegEncContext * const s= &w->s;
  213. int xy, wrap, diff, type;
  214. int16_t *A, *B, *C, *mot_val;
  215. wrap = s->b8_stride;
  216. xy = s->block_index[0];
  217. mot_val = s->current_picture.motion_val[0][xy];
  218. A = s->current_picture.motion_val[0][xy - 1];
  219. B = s->current_picture.motion_val[0][xy - wrap];
  220. C = s->current_picture.motion_val[0][xy + 2 - wrap];
  221. if(s->mb_x && !s->first_slice_line && !s->mspel && w->top_left_mv_flag)
  222. diff= FFMAX(FFABS(A[0] - B[0]), FFABS(A[1] - B[1]));
  223. else
  224. diff=0;
  225. if(diff >= 8)
  226. type= get_bits1(&s->gb);
  227. else
  228. type= 2;
  229. if(type == 0){
  230. *px= A[0];
  231. *py= A[1];
  232. }else if(type == 1){
  233. *px= B[0];
  234. *py= B[1];
  235. }else{
  236. /* special case for first (slice) line */
  237. if (s->first_slice_line) {
  238. *px = A[0];
  239. *py = A[1];
  240. } else {
  241. *px = mid_pred(A[0], B[0], C[0]);
  242. *py = mid_pred(A[1], B[1], C[1]);
  243. }
  244. }
  245. return mot_val;
  246. }
  247. static inline int wmv2_decode_inter_block(Wmv2Context *w, int16_t *block, int n, int cbp){
  248. MpegEncContext * const s= &w->s;
  249. static const int sub_cbp_table[3]= {2,3,1};
  250. int sub_cbp;
  251. if(!cbp){
  252. s->block_last_index[n] = -1;
  253. return 0;
  254. }
  255. if(w->per_block_abt)
  256. w->abt_type= decode012(&s->gb);
  257. w->abt_type_table[n]= w->abt_type;
  258. if(w->abt_type){
  259. // const uint8_t *scantable= w->abt_scantable[w->abt_type-1].permutated;
  260. const uint8_t *scantable= w->abt_scantable[w->abt_type-1].scantable;
  261. // const uint8_t *scantable= w->abt_type-1 ? w->abt_scantable[1].permutated : w->abt_scantable[0].scantable;
  262. sub_cbp= sub_cbp_table[ decode012(&s->gb) ];
  263. if(sub_cbp&1){
  264. if (ff_msmpeg4_decode_block(s, block, n, 1, scantable) < 0)
  265. return -1;
  266. }
  267. if(sub_cbp&2){
  268. if (ff_msmpeg4_decode_block(s, w->abt_block2[n], n, 1, scantable) < 0)
  269. return -1;
  270. }
  271. s->block_last_index[n] = 63;
  272. return 0;
  273. }else{
  274. return ff_msmpeg4_decode_block(s, block, n, 1, s->inter_scantable.permutated);
  275. }
  276. }
  277. int ff_wmv2_decode_mb(MpegEncContext *s, int16_t block[6][64])
  278. {
  279. Wmv2Context * const w= (Wmv2Context*)s;
  280. int cbp, code, i;
  281. uint8_t *coded_val;
  282. if(w->j_type) return 0;
  283. if (s->pict_type == AV_PICTURE_TYPE_P) {
  284. if (IS_SKIP(s->current_picture.mb_type[s->mb_y * s->mb_stride + s->mb_x])) {
  285. /* skip mb */
  286. s->mb_intra = 0;
  287. for(i=0;i<6;i++)
  288. s->block_last_index[i] = -1;
  289. s->mv_dir = MV_DIR_FORWARD;
  290. s->mv_type = MV_TYPE_16X16;
  291. s->mv[0][0][0] = 0;
  292. s->mv[0][0][1] = 0;
  293. s->mb_skipped = 1;
  294. w->hshift=0;
  295. return 0;
  296. }
  297. code = get_vlc2(&s->gb, ff_mb_non_intra_vlc[w->cbp_table_index].table, MB_NON_INTRA_VLC_BITS, 3);
  298. if (code < 0)
  299. return -1;
  300. s->mb_intra = (~code & 0x40) >> 6;
  301. cbp = code & 0x3f;
  302. } else {
  303. s->mb_intra = 1;
  304. code = get_vlc2(&s->gb, ff_msmp4_mb_i_vlc.table, MB_INTRA_VLC_BITS, 2);
  305. if (code < 0){
  306. av_log(s->avctx, AV_LOG_ERROR, "II-cbp illegal at %d %d\n", s->mb_x, s->mb_y);
  307. return -1;
  308. }
  309. /* predict coded block pattern */
  310. cbp = 0;
  311. for(i=0;i<6;i++) {
  312. int val = ((code >> (5 - i)) & 1);
  313. if (i < 4) {
  314. int pred = ff_msmpeg4_coded_block_pred(s, i, &coded_val);
  315. val = val ^ pred;
  316. *coded_val = val;
  317. }
  318. cbp |= val << (5 - i);
  319. }
  320. }
  321. if (!s->mb_intra) {
  322. int mx, my;
  323. wmv2_pred_motion(w, &mx, &my);
  324. if(cbp){
  325. s->dsp.clear_blocks(s->block[0]);
  326. if(s->per_mb_rl_table){
  327. s->rl_table_index = decode012(&s->gb);
  328. s->rl_chroma_table_index = s->rl_table_index;
  329. }
  330. if(w->abt_flag && w->per_mb_abt){
  331. w->per_block_abt= get_bits1(&s->gb);
  332. if(!w->per_block_abt)
  333. w->abt_type= decode012(&s->gb);
  334. }else
  335. w->per_block_abt=0;
  336. }
  337. if (wmv2_decode_motion(w, &mx, &my) < 0)
  338. return -1;
  339. s->mv_dir = MV_DIR_FORWARD;
  340. s->mv_type = MV_TYPE_16X16;
  341. s->mv[0][0][0] = mx;
  342. s->mv[0][0][1] = my;
  343. for (i = 0; i < 6; i++) {
  344. if (wmv2_decode_inter_block(w, block[i], i, (cbp >> (5 - i)) & 1) < 0)
  345. {
  346. av_log(s->avctx, AV_LOG_ERROR, "\nerror while decoding inter block: %d x %d (%d)\n", s->mb_x, s->mb_y, i);
  347. return -1;
  348. }
  349. }
  350. } else {
  351. if (s->pict_type==AV_PICTURE_TYPE_P)
  352. av_dlog(s->avctx, "%d%d ", s->inter_intra_pred, cbp);
  353. av_dlog(s->avctx, "I at %d %d %d %06X\n", s->mb_x, s->mb_y,
  354. ((cbp & 3) ? 1 : 0) +((cbp & 0x3C)? 2 : 0),
  355. show_bits(&s->gb, 24));
  356. s->ac_pred = get_bits1(&s->gb);
  357. if(s->inter_intra_pred){
  358. s->h263_aic_dir= get_vlc2(&s->gb, ff_inter_intra_vlc.table, INTER_INTRA_VLC_BITS, 1);
  359. av_dlog(s->avctx, "%d%d %d %d/",
  360. s->ac_pred, s->h263_aic_dir, s->mb_x, s->mb_y);
  361. }
  362. if(s->per_mb_rl_table && cbp){
  363. s->rl_table_index = decode012(&s->gb);
  364. s->rl_chroma_table_index = s->rl_table_index;
  365. }
  366. s->dsp.clear_blocks(s->block[0]);
  367. for (i = 0; i < 6; i++) {
  368. if (ff_msmpeg4_decode_block(s, block[i], i, (cbp >> (5 - i)) & 1, NULL) < 0)
  369. {
  370. av_log(s->avctx, AV_LOG_ERROR, "\nerror while decoding intra block: %d x %d (%d)\n", s->mb_x, s->mb_y, i);
  371. return -1;
  372. }
  373. }
  374. }
  375. return 0;
  376. }
  377. static av_cold int wmv2_decode_init(AVCodecContext *avctx){
  378. Wmv2Context * const w= avctx->priv_data;
  379. if(ff_msmpeg4_decode_init(avctx) < 0)
  380. return -1;
  381. ff_wmv2_common_init(w);
  382. ff_intrax8_common_init(&w->x8,&w->s);
  383. return 0;
  384. }
  385. static av_cold int wmv2_decode_end(AVCodecContext *avctx)
  386. {
  387. Wmv2Context *w = avctx->priv_data;
  388. ff_intrax8_common_end(&w->x8);
  389. return ff_h263_decode_end(avctx);
  390. }
  391. AVCodec ff_wmv2_decoder = {
  392. .name = "wmv2",
  393. .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 8"),
  394. .type = AVMEDIA_TYPE_VIDEO,
  395. .id = AV_CODEC_ID_WMV2,
  396. .priv_data_size = sizeof(Wmv2Context),
  397. .init = wmv2_decode_init,
  398. .close = wmv2_decode_end,
  399. .decode = ff_h263_decode_frame,
  400. .capabilities = CODEC_CAP_DRAW_HORIZ_BAND | CODEC_CAP_DR1,
  401. .pix_fmts = ff_pixfmt_list_420,
  402. };