You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1049 lines
28KB

  1. /*
  2. * H261 decoder
  3. * Copyright (c) 2002-2004 Michael Niedermayer <michaelni@gmx.at>
  4. * Copyright (c) 2004 Maarten Daniels
  5. *
  6. * This library is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public
  8. * License as published by the Free Software Foundation; either
  9. * version 2 of the License, or (at your option) any later version.
  10. *
  11. * This library is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  14. * Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public
  17. * License along with this library; if not, write to the Free Software
  18. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20. /**
  21. * @file h261.c
  22. * h261codec.
  23. */
  24. #include "common.h"
  25. #include "dsputil.h"
  26. #include "avcodec.h"
  27. #include "mpegvideo.h"
  28. #include "h261data.h"
  29. #define H261_MBA_VLC_BITS 9
  30. #define H261_MTYPE_VLC_BITS 6
  31. #define H261_MV_VLC_BITS 7
  32. #define H261_CBP_VLC_BITS 9
  33. #define TCOEFF_VLC_BITS 9
  34. #define MBA_STUFFING 33
  35. #define MBA_STARTCODE 34
  36. #define IS_FIL(a) ((a)&MB_TYPE_H261_FIL)
  37. /**
  38. * H261Context
  39. */
  40. typedef struct H261Context{
  41. MpegEncContext s;
  42. int current_mba;
  43. int previous_mba;
  44. int mba_diff;
  45. int mtype;
  46. int current_mv_x;
  47. int current_mv_y;
  48. int gob_number;
  49. int gob_start_code_skipped; // 1 if gob start code is already read before gob header is read
  50. }H261Context;
  51. void ff_h261_loop_filter(MpegEncContext *s){
  52. H261Context * h= (H261Context*)s;
  53. const int linesize = s->linesize;
  54. const int uvlinesize= s->uvlinesize;
  55. uint8_t *dest_y = s->dest[0];
  56. uint8_t *dest_cb= s->dest[1];
  57. uint8_t *dest_cr= s->dest[2];
  58. if(!(IS_FIL (h->mtype)))
  59. return;
  60. s->dsp.h261_loop_filter(dest_y , linesize);
  61. s->dsp.h261_loop_filter(dest_y + 8, linesize);
  62. s->dsp.h261_loop_filter(dest_y + 8 * linesize , linesize);
  63. s->dsp.h261_loop_filter(dest_y + 8 * linesize + 8, linesize);
  64. s->dsp.h261_loop_filter(dest_cb, uvlinesize);
  65. s->dsp.h261_loop_filter(dest_cr, uvlinesize);
  66. }
  67. static int ff_h261_get_picture_format(int width, int height){
  68. // QCIF
  69. if (width == 176 && height == 144)
  70. return 0;
  71. // CIF
  72. else if (width == 352 && height == 288)
  73. return 1;
  74. // ERROR
  75. else
  76. return -1;
  77. }
  78. static void h261_encode_block(H261Context * h, DCTELEM * block,
  79. int n);
  80. static int h261_decode_block(H261Context *h, DCTELEM *block,
  81. int n, int coded);
  82. void ff_h261_encode_picture_header(MpegEncContext * s, int picture_number){
  83. H261Context * h = (H261Context *) s;
  84. int format, temp_ref;
  85. align_put_bits(&s->pb);
  86. /* Update the pointer to last GOB */
  87. s->ptr_lastgob = pbBufPtr(&s->pb);
  88. put_bits(&s->pb, 20, 0x10); /* PSC */
  89. temp_ref= s->picture_number * (int64_t)30000 * s->avctx->time_base.num /
  90. (1001 * (int64_t)s->avctx->time_base.den); //FIXME maybe this should use a timestamp
  91. put_bits(&s->pb, 5, temp_ref & 0x1f); /* TemporalReference */
  92. put_bits(&s->pb, 1, 0); /* split screen off */
  93. put_bits(&s->pb, 1, 0); /* camera off */
  94. put_bits(&s->pb, 1, 0); /* freeze picture release off */
  95. format = ff_h261_get_picture_format(s->width, s->height);
  96. put_bits(&s->pb, 1, format); /* 0 == QCIF, 1 == CIF */
  97. put_bits(&s->pb, 1, 0); /* still image mode */
  98. put_bits(&s->pb, 1, 0); /* reserved */
  99. put_bits(&s->pb, 1, 0); /* no PEI */
  100. if(format == 0)
  101. h->gob_number = -1;
  102. else
  103. h->gob_number = 0;
  104. h->current_mba = 0;
  105. }
  106. /**
  107. * Encodes a group of blocks header.
  108. */
  109. static void h261_encode_gob_header(MpegEncContext * s, int mb_line){
  110. H261Context * h = (H261Context *)s;
  111. if(ff_h261_get_picture_format(s->width, s->height) == 0){
  112. h->gob_number+=2; // QCIF
  113. }
  114. else{
  115. h->gob_number++; // CIF
  116. }
  117. put_bits(&s->pb, 16, 1); /* GBSC */
  118. put_bits(&s->pb, 4, h->gob_number); /* GN */
  119. put_bits(&s->pb, 5, s->qscale); /* GQUANT */
  120. put_bits(&s->pb, 1, 0); /* no GEI */
  121. h->current_mba = 0;
  122. h->previous_mba = 0;
  123. h->current_mv_x=0;
  124. h->current_mv_y=0;
  125. }
  126. void ff_h261_reorder_mb_index(MpegEncContext* s){
  127. int index= s->mb_x + s->mb_y*s->mb_width;
  128. if(index % 33 == 0)
  129. h261_encode_gob_header(s,0);
  130. /* for CIF the GOB's are fragmented in the middle of a scanline
  131. that's why we need to adjust the x and y index of the macroblocks */
  132. if(ff_h261_get_picture_format(s->width,s->height) == 1){ // CIF
  133. s->mb_x = index % 11 ; index /= 11;
  134. s->mb_y = index % 3 ; index /= 3;
  135. s->mb_x+= 11*(index % 2); index /= 2;
  136. s->mb_y+= 3*index;
  137. ff_init_block_index(s);
  138. ff_update_block_index(s);
  139. }
  140. }
  141. static void h261_encode_motion(H261Context * h, int val){
  142. MpegEncContext * const s = &h->s;
  143. int sign, code;
  144. if(val==0){
  145. code = 0;
  146. put_bits(&s->pb,h261_mv_tab[code][1],h261_mv_tab[code][0]);
  147. }
  148. else{
  149. if(val > 15)
  150. val -=32;
  151. if(val < -16)
  152. val+=32;
  153. sign = val < 0;
  154. code = sign ? -val : val;
  155. put_bits(&s->pb,h261_mv_tab[code][1],h261_mv_tab[code][0]);
  156. put_bits(&s->pb,1,sign);
  157. }
  158. }
  159. static inline int get_cbp(MpegEncContext * s,
  160. DCTELEM block[6][64])
  161. {
  162. int i, cbp;
  163. cbp= 0;
  164. for (i = 0; i < 6; i++) {
  165. if (s->block_last_index[i] >= 0)
  166. cbp |= 1 << (5 - i);
  167. }
  168. return cbp;
  169. }
  170. void ff_h261_encode_mb(MpegEncContext * s,
  171. DCTELEM block[6][64],
  172. int motion_x, int motion_y)
  173. {
  174. H261Context * h = (H261Context *)s;
  175. int mvd, mv_diff_x, mv_diff_y, i, cbp;
  176. cbp = 63; // avoid warning
  177. mvd = 0;
  178. h->current_mba++;
  179. h->mtype = 0;
  180. if (!s->mb_intra){
  181. /* compute cbp */
  182. cbp= get_cbp(s, block);
  183. /* mvd indicates if this block is motion compensated */
  184. mvd = motion_x | motion_y;
  185. if((cbp | mvd | s->dquant ) == 0) {
  186. /* skip macroblock */
  187. s->skip_count++;
  188. h->current_mv_x=0;
  189. h->current_mv_y=0;
  190. return;
  191. }
  192. }
  193. /* MB is not skipped, encode MBA */
  194. put_bits(&s->pb, h261_mba_bits[(h->current_mba-h->previous_mba)-1], h261_mba_code[(h->current_mba-h->previous_mba)-1]);
  195. /* calculate MTYPE */
  196. if(!s->mb_intra){
  197. h->mtype++;
  198. if(mvd || s->loop_filter)
  199. h->mtype+=3;
  200. if(s->loop_filter)
  201. h->mtype+=3;
  202. if(cbp || s->dquant)
  203. h->mtype++;
  204. assert(h->mtype > 1);
  205. }
  206. if(s->dquant)
  207. h->mtype++;
  208. put_bits(&s->pb, h261_mtype_bits[h->mtype], h261_mtype_code[h->mtype]);
  209. h->mtype = h261_mtype_map[h->mtype];
  210. if(IS_QUANT(h->mtype)){
  211. ff_set_qscale(s,s->qscale+s->dquant);
  212. put_bits(&s->pb, 5, s->qscale);
  213. }
  214. if(IS_16X16(h->mtype)){
  215. mv_diff_x = (motion_x >> 1) - h->current_mv_x;
  216. mv_diff_y = (motion_y >> 1) - h->current_mv_y;
  217. h->current_mv_x = (motion_x >> 1);
  218. h->current_mv_y = (motion_y >> 1);
  219. h261_encode_motion(h,mv_diff_x);
  220. h261_encode_motion(h,mv_diff_y);
  221. }
  222. h->previous_mba = h->current_mba;
  223. if(HAS_CBP(h->mtype)){
  224. assert(cbp>0);
  225. put_bits(&s->pb,h261_cbp_tab[cbp-1][1],h261_cbp_tab[cbp-1][0]);
  226. }
  227. for(i=0; i<6; i++) {
  228. /* encode each block */
  229. h261_encode_block(h, block[i], i);
  230. }
  231. if ( ( h->current_mba == 11 ) || ( h->current_mba == 22 ) || ( h->current_mba == 33 ) || ( !IS_16X16 ( h->mtype ) )){
  232. h->current_mv_x=0;
  233. h->current_mv_y=0;
  234. }
  235. }
  236. void ff_h261_encode_init(MpegEncContext *s){
  237. static int done = 0;
  238. if (!done) {
  239. done = 1;
  240. init_rl(&h261_rl_tcoeff, 1);
  241. }
  242. s->min_qcoeff= -127;
  243. s->max_qcoeff= 127;
  244. s->y_dc_scale_table=
  245. s->c_dc_scale_table= ff_mpeg1_dc_scale_table;
  246. }
  247. /**
  248. * encodes a 8x8 block.
  249. * @param block the 8x8 block
  250. * @param n block index (0-3 are luma, 4-5 are chroma)
  251. */
  252. static void h261_encode_block(H261Context * h, DCTELEM * block, int n){
  253. MpegEncContext * const s = &h->s;
  254. int level, run, last, i, j, last_index, last_non_zero, sign, slevel, code;
  255. RLTable *rl;
  256. rl = &h261_rl_tcoeff;
  257. if (s->mb_intra) {
  258. /* DC coef */
  259. level = block[0];
  260. /* 255 cannot be represented, so we clamp */
  261. if (level > 254) {
  262. level = 254;
  263. block[0] = 254;
  264. }
  265. /* 0 cannot be represented also */
  266. else if (level < 1) {
  267. level = 1;
  268. block[0] = 1;
  269. }
  270. if (level == 128)
  271. put_bits(&s->pb, 8, 0xff);
  272. else
  273. put_bits(&s->pb, 8, level);
  274. i = 1;
  275. } else if((block[0]==1 || block[0] == -1) && (s->block_last_index[n] > -1)){
  276. //special case
  277. put_bits(&s->pb,2,block[0]>0 ? 2 : 3 );
  278. i = 1;
  279. } else {
  280. i = 0;
  281. }
  282. /* AC coefs */
  283. last_index = s->block_last_index[n];
  284. last_non_zero = i - 1;
  285. for (; i <= last_index; i++) {
  286. j = s->intra_scantable.permutated[i];
  287. level = block[j];
  288. if (level) {
  289. run = i - last_non_zero - 1;
  290. last = (i == last_index);
  291. sign = 0;
  292. slevel = level;
  293. if (level < 0) {
  294. sign = 1;
  295. level = -level;
  296. }
  297. code = get_rl_index(rl, 0 /*no last in H.261, EOB is used*/, run, level);
  298. if(run==0 && level < 16)
  299. code+=1;
  300. put_bits(&s->pb, rl->table_vlc[code][1], rl->table_vlc[code][0]);
  301. if (code == rl->n) {
  302. put_bits(&s->pb, 6, run);
  303. assert(slevel != 0);
  304. assert(level <= 127);
  305. put_bits(&s->pb, 8, slevel & 0xff);
  306. } else {
  307. put_bits(&s->pb, 1, sign);
  308. }
  309. last_non_zero = i;
  310. }
  311. }
  312. if(last_index > -1){
  313. put_bits(&s->pb, rl->table_vlc[0][1], rl->table_vlc[0][0]);// END OF BLOCK
  314. }
  315. }
  316. /***********************************************/
  317. /* decoding */
  318. static VLC h261_mba_vlc;
  319. static VLC h261_mtype_vlc;
  320. static VLC h261_mv_vlc;
  321. static VLC h261_cbp_vlc;
  322. void init_vlc_rl(RLTable *rl, int use_static);
  323. static void h261_decode_init_vlc(H261Context *h){
  324. static int done = 0;
  325. if(!done){
  326. done = 1;
  327. init_vlc(&h261_mba_vlc, H261_MBA_VLC_BITS, 35,
  328. h261_mba_bits, 1, 1,
  329. h261_mba_code, 1, 1, 1);
  330. init_vlc(&h261_mtype_vlc, H261_MTYPE_VLC_BITS, 10,
  331. h261_mtype_bits, 1, 1,
  332. h261_mtype_code, 1, 1, 1);
  333. init_vlc(&h261_mv_vlc, H261_MV_VLC_BITS, 17,
  334. &h261_mv_tab[0][1], 2, 1,
  335. &h261_mv_tab[0][0], 2, 1, 1);
  336. init_vlc(&h261_cbp_vlc, H261_CBP_VLC_BITS, 63,
  337. &h261_cbp_tab[0][1], 2, 1,
  338. &h261_cbp_tab[0][0], 2, 1, 1);
  339. init_rl(&h261_rl_tcoeff, 1);
  340. init_vlc_rl(&h261_rl_tcoeff, 1);
  341. }
  342. }
  343. static int h261_decode_init(AVCodecContext *avctx){
  344. H261Context *h= avctx->priv_data;
  345. MpegEncContext * const s = &h->s;
  346. // set defaults
  347. MPV_decode_defaults(s);
  348. s->avctx = avctx;
  349. s->width = s->avctx->coded_width;
  350. s->height = s->avctx->coded_height;
  351. s->codec_id = s->avctx->codec->id;
  352. s->out_format = FMT_H261;
  353. s->low_delay= 1;
  354. avctx->pix_fmt= PIX_FMT_YUV420P;
  355. s->codec_id= avctx->codec->id;
  356. h261_decode_init_vlc(h);
  357. h->gob_start_code_skipped = 0;
  358. return 0;
  359. }
  360. /**
  361. * decodes the group of blocks header or slice header.
  362. * @return <0 if an error occured
  363. */
  364. static int h261_decode_gob_header(H261Context *h){
  365. unsigned int val;
  366. MpegEncContext * const s = &h->s;
  367. if ( !h->gob_start_code_skipped ){
  368. /* Check for GOB Start Code */
  369. val = show_bits(&s->gb, 15);
  370. if(val)
  371. return -1;
  372. /* We have a GBSC */
  373. skip_bits(&s->gb, 16);
  374. }
  375. h->gob_start_code_skipped = 0;
  376. h->gob_number = get_bits(&s->gb, 4); /* GN */
  377. s->qscale = get_bits(&s->gb, 5); /* GQUANT */
  378. /* Check if gob_number is valid */
  379. if (s->mb_height==18){ //cif
  380. if ((h->gob_number<=0) || (h->gob_number>12))
  381. return -1;
  382. }
  383. else{ //qcif
  384. if ((h->gob_number!=1) && (h->gob_number!=3) && (h->gob_number!=5))
  385. return -1;
  386. }
  387. /* GEI */
  388. while (get_bits1(&s->gb) != 0) {
  389. skip_bits(&s->gb, 8);
  390. }
  391. if(s->qscale==0)
  392. return -1;
  393. // For the first transmitted macroblock in a GOB, MBA is the absolute address. For
  394. // subsequent macroblocks, MBA is the difference between the absolute addresses of
  395. // the macroblock and the last transmitted macroblock.
  396. h->current_mba = 0;
  397. h->mba_diff = 0;
  398. return 0;
  399. }
  400. /**
  401. * decodes the group of blocks / video packet header.
  402. * @return <0 if no resync found
  403. */
  404. static int ff_h261_resync(H261Context *h){
  405. MpegEncContext * const s = &h->s;
  406. int left, ret;
  407. if ( h->gob_start_code_skipped ){
  408. ret= h261_decode_gob_header(h);
  409. if(ret>=0)
  410. return 0;
  411. }
  412. else{
  413. if(show_bits(&s->gb, 15)==0){
  414. ret= h261_decode_gob_header(h);
  415. if(ret>=0)
  416. return 0;
  417. }
  418. //ok, its not where its supposed to be ...
  419. s->gb= s->last_resync_gb;
  420. align_get_bits(&s->gb);
  421. left= s->gb.size_in_bits - get_bits_count(&s->gb);
  422. for(;left>15+1+4+5; left-=8){
  423. if(show_bits(&s->gb, 15)==0){
  424. GetBitContext bak= s->gb;
  425. ret= h261_decode_gob_header(h);
  426. if(ret>=0)
  427. return 0;
  428. s->gb= bak;
  429. }
  430. skip_bits(&s->gb, 8);
  431. }
  432. }
  433. return -1;
  434. }
  435. /**
  436. * decodes skipped macroblocks
  437. * @return 0
  438. */
  439. static int h261_decode_mb_skipped(H261Context *h, int mba1, int mba2 )
  440. {
  441. MpegEncContext * const s = &h->s;
  442. int i;
  443. s->mb_intra = 0;
  444. for(i=mba1; i<mba2; i++){
  445. int j, xy;
  446. s->mb_x= ((h->gob_number-1) % 2) * 11 + i % 11;
  447. s->mb_y= ((h->gob_number-1) / 2) * 3 + i / 11;
  448. xy = s->mb_x + s->mb_y * s->mb_stride;
  449. ff_init_block_index(s);
  450. ff_update_block_index(s);
  451. for(j=0;j<6;j++)
  452. s->block_last_index[j] = -1;
  453. s->mv_dir = MV_DIR_FORWARD;
  454. s->mv_type = MV_TYPE_16X16;
  455. s->current_picture.mb_type[xy]= MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
  456. s->mv[0][0][0] = 0;
  457. s->mv[0][0][1] = 0;
  458. s->mb_skipped = 1;
  459. h->mtype &= ~MB_TYPE_H261_FIL;
  460. MPV_decode_mb(s, s->block);
  461. }
  462. return 0;
  463. }
  464. static int decode_mv_component(GetBitContext *gb, int v){
  465. int mv_diff = get_vlc2(gb, h261_mv_vlc.table, H261_MV_VLC_BITS, 2);
  466. /* check if mv_diff is valid */
  467. if ( mv_diff < 0 )
  468. return v;
  469. mv_diff = mvmap[mv_diff];
  470. if(mv_diff && !get_bits1(gb))
  471. mv_diff= -mv_diff;
  472. v += mv_diff;
  473. if (v <=-16) v+= 32;
  474. else if(v >= 16) v-= 32;
  475. return v;
  476. }
  477. static int h261_decode_mb(H261Context *h){
  478. MpegEncContext * const s = &h->s;
  479. int i, cbp, xy;
  480. cbp = 63;
  481. // Read mba
  482. do{
  483. h->mba_diff = get_vlc2(&s->gb, h261_mba_vlc.table, H261_MBA_VLC_BITS, 2);
  484. /* Check for slice end */
  485. /* NOTE: GOB can be empty (no MB data) or exist only of MBA_stuffing */
  486. if (h->mba_diff == MBA_STARTCODE){ // start code
  487. h->gob_start_code_skipped = 1;
  488. return SLICE_END;
  489. }
  490. }
  491. while( h->mba_diff == MBA_STUFFING ); // stuffing
  492. if ( h->mba_diff < 0 ){
  493. if ( get_bits_count(&s->gb) + 7 >= s->gb.size_in_bits )
  494. return SLICE_END;
  495. av_log(s->avctx, AV_LOG_ERROR, "illegal mba at %d %d\n", s->mb_x, s->mb_y);
  496. return SLICE_ERROR;
  497. }
  498. h->mba_diff += 1;
  499. h->current_mba += h->mba_diff;
  500. if ( h->current_mba > MBA_STUFFING )
  501. return SLICE_ERROR;
  502. s->mb_x= ((h->gob_number-1) % 2) * 11 + ((h->current_mba-1) % 11);
  503. s->mb_y= ((h->gob_number-1) / 2) * 3 + ((h->current_mba-1) / 11);
  504. xy = s->mb_x + s->mb_y * s->mb_stride;
  505. ff_init_block_index(s);
  506. ff_update_block_index(s);
  507. // Read mtype
  508. h->mtype = get_vlc2(&s->gb, h261_mtype_vlc.table, H261_MTYPE_VLC_BITS, 2);
  509. h->mtype = h261_mtype_map[h->mtype];
  510. // Read mquant
  511. if ( IS_QUANT ( h->mtype ) ){
  512. ff_set_qscale(s, get_bits(&s->gb, 5));
  513. }
  514. s->mb_intra = IS_INTRA4x4(h->mtype);
  515. // Read mv
  516. if ( IS_16X16 ( h->mtype ) ){
  517. // Motion vector data is included for all MC macroblocks. MVD is obtained from the macroblock vector by subtracting the
  518. // vector of the preceding macroblock. For this calculation the vector of the preceding macroblock is regarded as zero in the
  519. // following three situations:
  520. // 1) evaluating MVD for macroblocks 1, 12 and 23;
  521. // 2) evaluating MVD for macroblocks in which MBA does not represent a difference of 1;
  522. // 3) MTYPE of the previous macroblock was not MC.
  523. if ( ( h->current_mba == 1 ) || ( h->current_mba == 12 ) || ( h->current_mba == 23 ) ||
  524. ( h->mba_diff != 1))
  525. {
  526. h->current_mv_x = 0;
  527. h->current_mv_y = 0;
  528. }
  529. h->current_mv_x= decode_mv_component(&s->gb, h->current_mv_x);
  530. h->current_mv_y= decode_mv_component(&s->gb, h->current_mv_y);
  531. }else{
  532. h->current_mv_x = 0;
  533. h->current_mv_y = 0;
  534. }
  535. // Read cbp
  536. if ( HAS_CBP( h->mtype ) ){
  537. cbp = get_vlc2(&s->gb, h261_cbp_vlc.table, H261_CBP_VLC_BITS, 2) + 1;
  538. }
  539. if(s->mb_intra){
  540. s->current_picture.mb_type[xy]= MB_TYPE_INTRA;
  541. goto intra;
  542. }
  543. //set motion vectors
  544. s->mv_dir = MV_DIR_FORWARD;
  545. s->mv_type = MV_TYPE_16X16;
  546. s->current_picture.mb_type[xy]= MB_TYPE_16x16 | MB_TYPE_L0;
  547. s->mv[0][0][0] = h->current_mv_x * 2;//gets divided by 2 in motion compensation
  548. s->mv[0][0][1] = h->current_mv_y * 2;
  549. intra:
  550. /* decode each block */
  551. if(s->mb_intra || HAS_CBP(h->mtype)){
  552. s->dsp.clear_blocks(s->block[0]);
  553. for (i = 0; i < 6; i++) {
  554. if (h261_decode_block(h, s->block[i], i, cbp&32) < 0){
  555. return SLICE_ERROR;
  556. }
  557. cbp+=cbp;
  558. }
  559. }else{
  560. for (i = 0; i < 6; i++)
  561. s->block_last_index[i]= -1;
  562. }
  563. MPV_decode_mb(s, s->block);
  564. return SLICE_OK;
  565. }
  566. /**
  567. * decodes a macroblock
  568. * @return <0 if an error occured
  569. */
  570. static int h261_decode_block(H261Context * h, DCTELEM * block,
  571. int n, int coded)
  572. {
  573. MpegEncContext * const s = &h->s;
  574. int code, level, i, j, run;
  575. RLTable *rl = &h261_rl_tcoeff;
  576. const uint8_t *scan_table;
  577. // For the variable length encoding there are two code tables, one being used for
  578. // the first transmitted LEVEL in INTER, INTER+MC and INTER+MC+FIL blocks, the second
  579. // for all other LEVELs except the first one in INTRA blocks which is fixed length
  580. // coded with 8 bits.
  581. // NOTE: the two code tables only differ in one VLC so we handle that manually.
  582. scan_table = s->intra_scantable.permutated;
  583. if (s->mb_intra){
  584. /* DC coef */
  585. level = get_bits(&s->gb, 8);
  586. // 0 (00000000b) and -128 (10000000b) are FORBIDDEN
  587. if((level&0x7F) == 0){
  588. av_log(s->avctx, AV_LOG_ERROR, "illegal dc %d at %d %d\n", level, s->mb_x, s->mb_y);
  589. return -1;
  590. }
  591. // The code 1000 0000 is not used, the reconstruction level of 1024 being coded as 1111 1111.
  592. if (level == 255)
  593. level = 128;
  594. block[0] = level;
  595. i = 1;
  596. }else if(coded){
  597. // Run Level Code
  598. // EOB Not possible for first level when cbp is available (that's why the table is different)
  599. // 0 1 1s
  600. // * * 0*
  601. int check = show_bits(&s->gb, 2);
  602. i = 0;
  603. if ( check & 0x2 ){
  604. skip_bits(&s->gb, 2);
  605. block[0] = ( check & 0x1 ) ? -1 : 1;
  606. i = 1;
  607. }
  608. }else{
  609. i = 0;
  610. }
  611. if(!coded){
  612. s->block_last_index[n] = i - 1;
  613. return 0;
  614. }
  615. for(;;){
  616. code = get_vlc2(&s->gb, rl->vlc.table, TCOEFF_VLC_BITS, 2);
  617. if (code < 0){
  618. av_log(s->avctx, AV_LOG_ERROR, "illegal ac vlc code at %dx%d\n", s->mb_x, s->mb_y);
  619. return -1;
  620. }
  621. if (code == rl->n) {
  622. /* escape */
  623. // The remaining combinations of (run, level) are encoded with a 20-bit word consisting of 6 bits escape, 6 bits run and 8 bits level.
  624. run = get_bits(&s->gb, 6);
  625. level = get_sbits(&s->gb, 8);
  626. }else if(code == 0){
  627. break;
  628. }else{
  629. run = rl->table_run[code];
  630. level = rl->table_level[code];
  631. if (get_bits1(&s->gb))
  632. level = -level;
  633. }
  634. i += run;
  635. if (i >= 64){
  636. av_log(s->avctx, AV_LOG_ERROR, "run overflow at %dx%d\n", s->mb_x, s->mb_y);
  637. return -1;
  638. }
  639. j = scan_table[i];
  640. block[j] = level;
  641. i++;
  642. }
  643. s->block_last_index[n] = i-1;
  644. return 0;
  645. }
  646. /**
  647. * decodes the H261 picture header.
  648. * @return <0 if no startcode found
  649. */
  650. static int h261_decode_picture_header(H261Context *h){
  651. MpegEncContext * const s = &h->s;
  652. int format, i;
  653. uint32_t startcode= 0;
  654. for(i= s->gb.size_in_bits - get_bits_count(&s->gb); i>24; i-=1){
  655. startcode = ((startcode << 1) | get_bits(&s->gb, 1)) & 0x000FFFFF;
  656. if(startcode == 0x10)
  657. break;
  658. }
  659. if (startcode != 0x10){
  660. av_log(s->avctx, AV_LOG_ERROR, "Bad picture start code\n");
  661. return -1;
  662. }
  663. /* temporal reference */
  664. s->picture_number = get_bits(&s->gb, 5); /* picture timestamp */
  665. /* PTYPE starts here */
  666. skip_bits1(&s->gb); /* split screen off */
  667. skip_bits1(&s->gb); /* camera off */
  668. skip_bits1(&s->gb); /* freeze picture release off */
  669. format = get_bits1(&s->gb);
  670. //only 2 formats possible
  671. if (format == 0){//QCIF
  672. s->width = 176;
  673. s->height = 144;
  674. s->mb_width = 11;
  675. s->mb_height = 9;
  676. }else{//CIF
  677. s->width = 352;
  678. s->height = 288;
  679. s->mb_width = 22;
  680. s->mb_height = 18;
  681. }
  682. s->mb_num = s->mb_width * s->mb_height;
  683. skip_bits1(&s->gb); /* still image mode off */
  684. skip_bits1(&s->gb); /* Reserved */
  685. /* PEI */
  686. while (get_bits1(&s->gb) != 0){
  687. skip_bits(&s->gb, 8);
  688. }
  689. // h261 has no I-FRAMES, but if we pass I_TYPE for the first frame, the codec crashes if it does
  690. // not contain all I-blocks (e.g. when a packet is lost)
  691. s->pict_type = P_TYPE;
  692. h->gob_number = 0;
  693. return 0;
  694. }
  695. static int h261_decode_gob(H261Context *h){
  696. MpegEncContext * const s = &h->s;
  697. ff_set_qscale(s, s->qscale);
  698. /* decode mb's */
  699. while(h->current_mba <= MBA_STUFFING)
  700. {
  701. int ret;
  702. /* DCT & quantize */
  703. ret= h261_decode_mb(h);
  704. if(ret<0){
  705. if(ret==SLICE_END){
  706. h261_decode_mb_skipped(h, h->current_mba, 33);
  707. return 0;
  708. }
  709. av_log(s->avctx, AV_LOG_ERROR, "Error at MB: %d\n", s->mb_x + s->mb_y*s->mb_stride);
  710. return -1;
  711. }
  712. h261_decode_mb_skipped(h, h->current_mba-h->mba_diff, h->current_mba-1);
  713. }
  714. return -1;
  715. }
  716. static int h261_find_frame_end(ParseContext *pc, AVCodecContext* avctx, const uint8_t *buf, int buf_size){
  717. int vop_found, i, j;
  718. uint32_t state;
  719. vop_found= pc->frame_start_found;
  720. state= pc->state;
  721. for(i=0; i<buf_size && !vop_found; i++){
  722. state= (state<<8) | buf[i];
  723. for(j=0; j<8; j++){
  724. if(((state>>j)&0xFFFFF) == 0x00010){
  725. i++;
  726. vop_found=1;
  727. break;
  728. }
  729. }
  730. }
  731. if(vop_found){
  732. for(; i<buf_size; i++){
  733. state= (state<<8) | buf[i];
  734. for(j=0; j<8; j++){
  735. if(((state>>j)&0xFFFFF) == 0x00010){
  736. pc->frame_start_found=0;
  737. pc->state= state>>(2*8);
  738. return i-1;
  739. }
  740. }
  741. }
  742. }
  743. pc->frame_start_found= vop_found;
  744. pc->state= state;
  745. return END_NOT_FOUND;
  746. }
  747. static int h261_parse(AVCodecParserContext *s,
  748. AVCodecContext *avctx,
  749. uint8_t **poutbuf, int *poutbuf_size,
  750. const uint8_t *buf, int buf_size)
  751. {
  752. ParseContext *pc = s->priv_data;
  753. int next;
  754. next= h261_find_frame_end(pc,avctx, buf, buf_size);
  755. if (ff_combine_frame(pc, next, (uint8_t **)&buf, &buf_size) < 0) {
  756. *poutbuf = NULL;
  757. *poutbuf_size = 0;
  758. return buf_size;
  759. }
  760. *poutbuf = (uint8_t *)buf;
  761. *poutbuf_size = buf_size;
  762. return next;
  763. }
  764. /**
  765. * returns the number of bytes consumed for building the current frame
  766. */
  767. static int get_consumed_bytes(MpegEncContext *s, int buf_size){
  768. int pos= get_bits_count(&s->gb)>>3;
  769. if(pos==0) pos=1; //avoid infinite loops (i doubt thats needed but ...)
  770. if(pos+10>buf_size) pos=buf_size; // oops ;)
  771. return pos;
  772. }
  773. static int h261_decode_frame(AVCodecContext *avctx,
  774. void *data, int *data_size,
  775. uint8_t *buf, int buf_size)
  776. {
  777. H261Context *h= avctx->priv_data;
  778. MpegEncContext *s = &h->s;
  779. int ret;
  780. AVFrame *pict = data;
  781. #ifdef DEBUG
  782. av_log(avctx, AV_LOG_DEBUG, "*****frame %d size=%d\n", avctx->frame_number, buf_size);
  783. av_log(avctx, AV_LOG_DEBUG, "bytes=%x %x %x %x\n", buf[0], buf[1], buf[2], buf[3]);
  784. #endif
  785. s->flags= avctx->flags;
  786. s->flags2= avctx->flags2;
  787. h->gob_start_code_skipped=0;
  788. retry:
  789. init_get_bits(&s->gb, buf, buf_size*8);
  790. if(!s->context_initialized){
  791. if (MPV_common_init(s) < 0) //we need the idct permutaton for reading a custom matrix
  792. return -1;
  793. }
  794. //we need to set current_picture_ptr before reading the header, otherwise we cant store anyting im there
  795. if(s->current_picture_ptr==NULL || s->current_picture_ptr->data[0]){
  796. int i= ff_find_unused_picture(s, 0);
  797. s->current_picture_ptr= &s->picture[i];
  798. }
  799. ret = h261_decode_picture_header(h);
  800. /* skip if the header was thrashed */
  801. if (ret < 0){
  802. av_log(s->avctx, AV_LOG_ERROR, "header damaged\n");
  803. return -1;
  804. }
  805. if (s->width != avctx->coded_width || s->height != avctx->coded_height){
  806. ParseContext pc= s->parse_context; //FIXME move these demuxng hack to avformat
  807. s->parse_context.buffer=0;
  808. MPV_common_end(s);
  809. s->parse_context= pc;
  810. }
  811. if (!s->context_initialized) {
  812. avcodec_set_dimensions(avctx, s->width, s->height);
  813. goto retry;
  814. }
  815. // for hurry_up==5
  816. s->current_picture.pict_type= s->pict_type;
  817. s->current_picture.key_frame= s->pict_type == I_TYPE;
  818. /* skip everything if we are in a hurry>=5 */
  819. if(avctx->hurry_up>=5) return get_consumed_bytes(s, buf_size);
  820. if( (avctx->skip_frame >= AVDISCARD_NONREF && s->pict_type==B_TYPE)
  821. ||(avctx->skip_frame >= AVDISCARD_NONKEY && s->pict_type!=I_TYPE)
  822. || avctx->skip_frame >= AVDISCARD_ALL)
  823. return get_consumed_bytes(s, buf_size);
  824. if(MPV_frame_start(s, avctx) < 0)
  825. return -1;
  826. ff_er_frame_start(s);
  827. /* decode each macroblock */
  828. s->mb_x=0;
  829. s->mb_y=0;
  830. while(h->gob_number < (s->mb_height==18 ? 12 : 5)){
  831. if(ff_h261_resync(h)<0)
  832. break;
  833. h261_decode_gob(h);
  834. }
  835. MPV_frame_end(s);
  836. assert(s->current_picture.pict_type == s->current_picture_ptr->pict_type);
  837. assert(s->current_picture.pict_type == s->pict_type);
  838. *pict= *(AVFrame*)s->current_picture_ptr;
  839. ff_print_debug_info(s, pict);
  840. /* Return the Picture timestamp as the frame number */
  841. /* we substract 1 because it is added on utils.c */
  842. avctx->frame_number = s->picture_number - 1;
  843. *data_size = sizeof(AVFrame);
  844. return get_consumed_bytes(s, buf_size);
  845. }
  846. static int h261_decode_end(AVCodecContext *avctx)
  847. {
  848. H261Context *h= avctx->priv_data;
  849. MpegEncContext *s = &h->s;
  850. MPV_common_end(s);
  851. return 0;
  852. }
  853. #ifdef CONFIG_ENCODERS
  854. AVCodec h261_encoder = {
  855. "h261",
  856. CODEC_TYPE_VIDEO,
  857. CODEC_ID_H261,
  858. sizeof(H261Context),
  859. MPV_encode_init,
  860. MPV_encode_picture,
  861. MPV_encode_end,
  862. };
  863. #endif
  864. AVCodec h261_decoder = {
  865. "h261",
  866. CODEC_TYPE_VIDEO,
  867. CODEC_ID_H261,
  868. sizeof(H261Context),
  869. h261_decode_init,
  870. NULL,
  871. h261_decode_end,
  872. h261_decode_frame,
  873. CODEC_CAP_DR1,
  874. };
  875. AVCodecParser h261_parser = {
  876. { CODEC_ID_H261 },
  877. sizeof(ParseContext),
  878. NULL,
  879. h261_parse,
  880. ff_parse_close,
  881. };