You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1056 lines
29KB

  1. /*
  2. * H261 decoder
  3. * Copyright (c) 2002-2004 Michael Niedermayer <michaelni@gmx.at>
  4. * Copyright (c) 2004 Maarten Daniels
  5. *
  6. * This file is part of FFmpeg.
  7. *
  8. * FFmpeg is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * FFmpeg is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FFmpeg; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. /**
  23. * @file h261.c
  24. * h261codec.
  25. */
  26. #include "common.h"
  27. #include "dsputil.h"
  28. #include "avcodec.h"
  29. #include "mpegvideo.h"
  30. #include "h261data.h"
  31. #define H261_MBA_VLC_BITS 9
  32. #define H261_MTYPE_VLC_BITS 6
  33. #define H261_MV_VLC_BITS 7
  34. #define H261_CBP_VLC_BITS 9
  35. #define TCOEFF_VLC_BITS 9
  36. #define MBA_STUFFING 33
  37. #define MBA_STARTCODE 34
  38. #define IS_FIL(a) ((a)&MB_TYPE_H261_FIL)
  39. /**
  40. * H261Context
  41. */
  42. typedef struct H261Context{
  43. MpegEncContext s;
  44. int current_mba;
  45. int previous_mba;
  46. int mba_diff;
  47. int mtype;
  48. int current_mv_x;
  49. int current_mv_y;
  50. int gob_number;
  51. int gob_start_code_skipped; // 1 if gob start code is already read before gob header is read
  52. }H261Context;
  53. void ff_h261_loop_filter(MpegEncContext *s){
  54. H261Context * h= (H261Context*)s;
  55. const int linesize = s->linesize;
  56. const int uvlinesize= s->uvlinesize;
  57. uint8_t *dest_y = s->dest[0];
  58. uint8_t *dest_cb= s->dest[1];
  59. uint8_t *dest_cr= s->dest[2];
  60. if(!(IS_FIL (h->mtype)))
  61. return;
  62. s->dsp.h261_loop_filter(dest_y , linesize);
  63. s->dsp.h261_loop_filter(dest_y + 8, linesize);
  64. s->dsp.h261_loop_filter(dest_y + 8 * linesize , linesize);
  65. s->dsp.h261_loop_filter(dest_y + 8 * linesize + 8, linesize);
  66. s->dsp.h261_loop_filter(dest_cb, uvlinesize);
  67. s->dsp.h261_loop_filter(dest_cr, uvlinesize);
  68. }
  69. int ff_h261_get_picture_format(int width, int height){
  70. // QCIF
  71. if (width == 176 && height == 144)
  72. return 0;
  73. // CIF
  74. else if (width == 352 && height == 288)
  75. return 1;
  76. // ERROR
  77. else
  78. return -1;
  79. }
  80. static void h261_encode_block(H261Context * h, DCTELEM * block,
  81. int n);
  82. static int h261_decode_block(H261Context *h, DCTELEM *block,
  83. int n, int coded);
  84. void ff_h261_encode_picture_header(MpegEncContext * s, int picture_number){
  85. H261Context * h = (H261Context *) s;
  86. int format, temp_ref;
  87. align_put_bits(&s->pb);
  88. /* Update the pointer to last GOB */
  89. s->ptr_lastgob = pbBufPtr(&s->pb);
  90. put_bits(&s->pb, 20, 0x10); /* PSC */
  91. temp_ref= s->picture_number * (int64_t)30000 * s->avctx->time_base.num /
  92. (1001 * (int64_t)s->avctx->time_base.den); //FIXME maybe this should use a timestamp
  93. put_bits(&s->pb, 5, temp_ref & 0x1f); /* TemporalReference */
  94. put_bits(&s->pb, 1, 0); /* split screen off */
  95. put_bits(&s->pb, 1, 0); /* camera off */
  96. put_bits(&s->pb, 1, 0); /* freeze picture release off */
  97. format = ff_h261_get_picture_format(s->width, s->height);
  98. put_bits(&s->pb, 1, format); /* 0 == QCIF, 1 == CIF */
  99. put_bits(&s->pb, 1, 0); /* still image mode */
  100. put_bits(&s->pb, 1, 0); /* reserved */
  101. put_bits(&s->pb, 1, 0); /* no PEI */
  102. if(format == 0)
  103. h->gob_number = -1;
  104. else
  105. h->gob_number = 0;
  106. h->current_mba = 0;
  107. }
  108. /**
  109. * Encodes a group of blocks header.
  110. */
  111. static void h261_encode_gob_header(MpegEncContext * s, int mb_line){
  112. H261Context * h = (H261Context *)s;
  113. if(ff_h261_get_picture_format(s->width, s->height) == 0){
  114. h->gob_number+=2; // QCIF
  115. }
  116. else{
  117. h->gob_number++; // CIF
  118. }
  119. put_bits(&s->pb, 16, 1); /* GBSC */
  120. put_bits(&s->pb, 4, h->gob_number); /* GN */
  121. put_bits(&s->pb, 5, s->qscale); /* GQUANT */
  122. put_bits(&s->pb, 1, 0); /* no GEI */
  123. h->current_mba = 0;
  124. h->previous_mba = 0;
  125. h->current_mv_x=0;
  126. h->current_mv_y=0;
  127. }
  128. void ff_h261_reorder_mb_index(MpegEncContext* s){
  129. int index= s->mb_x + s->mb_y*s->mb_width;
  130. if(index % 33 == 0)
  131. h261_encode_gob_header(s,0);
  132. /* for CIF the GOB's are fragmented in the middle of a scanline
  133. that's why we need to adjust the x and y index of the macroblocks */
  134. if(ff_h261_get_picture_format(s->width,s->height) == 1){ // CIF
  135. s->mb_x = index % 11 ; index /= 11;
  136. s->mb_y = index % 3 ; index /= 3;
  137. s->mb_x+= 11*(index % 2); index /= 2;
  138. s->mb_y+= 3*index;
  139. ff_init_block_index(s);
  140. ff_update_block_index(s);
  141. }
  142. }
  143. static void h261_encode_motion(H261Context * h, int val){
  144. MpegEncContext * const s = &h->s;
  145. int sign, code;
  146. if(val==0){
  147. code = 0;
  148. put_bits(&s->pb,h261_mv_tab[code][1],h261_mv_tab[code][0]);
  149. }
  150. else{
  151. if(val > 15)
  152. val -=32;
  153. if(val < -16)
  154. val+=32;
  155. sign = val < 0;
  156. code = sign ? -val : val;
  157. put_bits(&s->pb,h261_mv_tab[code][1],h261_mv_tab[code][0]);
  158. put_bits(&s->pb,1,sign);
  159. }
  160. }
  161. static inline int get_cbp(MpegEncContext * s,
  162. DCTELEM block[6][64])
  163. {
  164. int i, cbp;
  165. cbp= 0;
  166. for (i = 0; i < 6; i++) {
  167. if (s->block_last_index[i] >= 0)
  168. cbp |= 1 << (5 - i);
  169. }
  170. return cbp;
  171. }
  172. void ff_h261_encode_mb(MpegEncContext * s,
  173. DCTELEM block[6][64],
  174. int motion_x, int motion_y)
  175. {
  176. H261Context * h = (H261Context *)s;
  177. int mvd, mv_diff_x, mv_diff_y, i, cbp;
  178. cbp = 63; // avoid warning
  179. mvd = 0;
  180. h->current_mba++;
  181. h->mtype = 0;
  182. if (!s->mb_intra){
  183. /* compute cbp */
  184. cbp= get_cbp(s, block);
  185. /* mvd indicates if this block is motion compensated */
  186. mvd = motion_x | motion_y;
  187. if((cbp | mvd | s->dquant ) == 0) {
  188. /* skip macroblock */
  189. s->skip_count++;
  190. h->current_mv_x=0;
  191. h->current_mv_y=0;
  192. return;
  193. }
  194. }
  195. /* MB is not skipped, encode MBA */
  196. put_bits(&s->pb, h261_mba_bits[(h->current_mba-h->previous_mba)-1], h261_mba_code[(h->current_mba-h->previous_mba)-1]);
  197. /* calculate MTYPE */
  198. if(!s->mb_intra){
  199. h->mtype++;
  200. if(mvd || s->loop_filter)
  201. h->mtype+=3;
  202. if(s->loop_filter)
  203. h->mtype+=3;
  204. if(cbp || s->dquant)
  205. h->mtype++;
  206. assert(h->mtype > 1);
  207. }
  208. if(s->dquant)
  209. h->mtype++;
  210. put_bits(&s->pb, h261_mtype_bits[h->mtype], h261_mtype_code[h->mtype]);
  211. h->mtype = h261_mtype_map[h->mtype];
  212. if(IS_QUANT(h->mtype)){
  213. ff_set_qscale(s,s->qscale+s->dquant);
  214. put_bits(&s->pb, 5, s->qscale);
  215. }
  216. if(IS_16X16(h->mtype)){
  217. mv_diff_x = (motion_x >> 1) - h->current_mv_x;
  218. mv_diff_y = (motion_y >> 1) - h->current_mv_y;
  219. h->current_mv_x = (motion_x >> 1);
  220. h->current_mv_y = (motion_y >> 1);
  221. h261_encode_motion(h,mv_diff_x);
  222. h261_encode_motion(h,mv_diff_y);
  223. }
  224. h->previous_mba = h->current_mba;
  225. if(HAS_CBP(h->mtype)){
  226. assert(cbp>0);
  227. put_bits(&s->pb,h261_cbp_tab[cbp-1][1],h261_cbp_tab[cbp-1][0]);
  228. }
  229. for(i=0; i<6; i++) {
  230. /* encode each block */
  231. h261_encode_block(h, block[i], i);
  232. }
  233. if ( ( h->current_mba == 11 ) || ( h->current_mba == 22 ) || ( h->current_mba == 33 ) || ( !IS_16X16 ( h->mtype ) )){
  234. h->current_mv_x=0;
  235. h->current_mv_y=0;
  236. }
  237. }
  238. void ff_h261_encode_init(MpegEncContext *s){
  239. static int done = 0;
  240. if (!done) {
  241. done = 1;
  242. init_rl(&h261_rl_tcoeff, 1);
  243. }
  244. s->min_qcoeff= -127;
  245. s->max_qcoeff= 127;
  246. s->y_dc_scale_table=
  247. s->c_dc_scale_table= ff_mpeg1_dc_scale_table;
  248. }
  249. /**
  250. * encodes a 8x8 block.
  251. * @param block the 8x8 block
  252. * @param n block index (0-3 are luma, 4-5 are chroma)
  253. */
  254. static void h261_encode_block(H261Context * h, DCTELEM * block, int n){
  255. MpegEncContext * const s = &h->s;
  256. int level, run, last, i, j, last_index, last_non_zero, sign, slevel, code;
  257. RLTable *rl;
  258. rl = &h261_rl_tcoeff;
  259. if (s->mb_intra) {
  260. /* DC coef */
  261. level = block[0];
  262. /* 255 cannot be represented, so we clamp */
  263. if (level > 254) {
  264. level = 254;
  265. block[0] = 254;
  266. }
  267. /* 0 cannot be represented also */
  268. else if (level < 1) {
  269. level = 1;
  270. block[0] = 1;
  271. }
  272. if (level == 128)
  273. put_bits(&s->pb, 8, 0xff);
  274. else
  275. put_bits(&s->pb, 8, level);
  276. i = 1;
  277. } else if((block[0]==1 || block[0] == -1) && (s->block_last_index[n] > -1)){
  278. //special case
  279. put_bits(&s->pb,2,block[0]>0 ? 2 : 3 );
  280. i = 1;
  281. } else {
  282. i = 0;
  283. }
  284. /* AC coefs */
  285. last_index = s->block_last_index[n];
  286. last_non_zero = i - 1;
  287. for (; i <= last_index; i++) {
  288. j = s->intra_scantable.permutated[i];
  289. level = block[j];
  290. if (level) {
  291. run = i - last_non_zero - 1;
  292. last = (i == last_index);
  293. sign = 0;
  294. slevel = level;
  295. if (level < 0) {
  296. sign = 1;
  297. level = -level;
  298. }
  299. code = get_rl_index(rl, 0 /*no last in H.261, EOB is used*/, run, level);
  300. if(run==0 && level < 16)
  301. code+=1;
  302. put_bits(&s->pb, rl->table_vlc[code][1], rl->table_vlc[code][0]);
  303. if (code == rl->n) {
  304. put_bits(&s->pb, 6, run);
  305. assert(slevel != 0);
  306. assert(level <= 127);
  307. put_bits(&s->pb, 8, slevel & 0xff);
  308. } else {
  309. put_bits(&s->pb, 1, sign);
  310. }
  311. last_non_zero = i;
  312. }
  313. }
  314. if(last_index > -1){
  315. put_bits(&s->pb, rl->table_vlc[0][1], rl->table_vlc[0][0]);// END OF BLOCK
  316. }
  317. }
  318. /***********************************************/
  319. /* decoding */
  320. static VLC h261_mba_vlc;
  321. static VLC h261_mtype_vlc;
  322. static VLC h261_mv_vlc;
  323. static VLC h261_cbp_vlc;
  324. static void h261_decode_init_vlc(H261Context *h){
  325. static int done = 0;
  326. if(!done){
  327. done = 1;
  328. init_vlc(&h261_mba_vlc, H261_MBA_VLC_BITS, 35,
  329. h261_mba_bits, 1, 1,
  330. h261_mba_code, 1, 1, 1);
  331. init_vlc(&h261_mtype_vlc, H261_MTYPE_VLC_BITS, 10,
  332. h261_mtype_bits, 1, 1,
  333. h261_mtype_code, 1, 1, 1);
  334. init_vlc(&h261_mv_vlc, H261_MV_VLC_BITS, 17,
  335. &h261_mv_tab[0][1], 2, 1,
  336. &h261_mv_tab[0][0], 2, 1, 1);
  337. init_vlc(&h261_cbp_vlc, H261_CBP_VLC_BITS, 63,
  338. &h261_cbp_tab[0][1], 2, 1,
  339. &h261_cbp_tab[0][0], 2, 1, 1);
  340. init_rl(&h261_rl_tcoeff, 1);
  341. init_vlc_rl(&h261_rl_tcoeff, 1);
  342. }
  343. }
  344. static int h261_decode_init(AVCodecContext *avctx){
  345. H261Context *h= avctx->priv_data;
  346. MpegEncContext * const s = &h->s;
  347. // set defaults
  348. MPV_decode_defaults(s);
  349. s->avctx = avctx;
  350. s->width = s->avctx->coded_width;
  351. s->height = s->avctx->coded_height;
  352. s->codec_id = s->avctx->codec->id;
  353. s->out_format = FMT_H261;
  354. s->low_delay= 1;
  355. avctx->pix_fmt= PIX_FMT_YUV420P;
  356. s->codec_id= avctx->codec->id;
  357. h261_decode_init_vlc(h);
  358. h->gob_start_code_skipped = 0;
  359. return 0;
  360. }
  361. /**
  362. * decodes the group of blocks header or slice header.
  363. * @return <0 if an error occured
  364. */
  365. static int h261_decode_gob_header(H261Context *h){
  366. unsigned int val;
  367. MpegEncContext * const s = &h->s;
  368. if ( !h->gob_start_code_skipped ){
  369. /* Check for GOB Start Code */
  370. val = show_bits(&s->gb, 15);
  371. if(val)
  372. return -1;
  373. /* We have a GBSC */
  374. skip_bits(&s->gb, 16);
  375. }
  376. h->gob_start_code_skipped = 0;
  377. h->gob_number = get_bits(&s->gb, 4); /* GN */
  378. s->qscale = get_bits(&s->gb, 5); /* GQUANT */
  379. /* Check if gob_number is valid */
  380. if (s->mb_height==18){ //cif
  381. if ((h->gob_number<=0) || (h->gob_number>12))
  382. return -1;
  383. }
  384. else{ //qcif
  385. if ((h->gob_number!=1) && (h->gob_number!=3) && (h->gob_number!=5))
  386. return -1;
  387. }
  388. /* GEI */
  389. while (get_bits1(&s->gb) != 0) {
  390. skip_bits(&s->gb, 8);
  391. }
  392. if(s->qscale==0)
  393. return -1;
  394. // For the first transmitted macroblock in a GOB, MBA is the absolute address. For
  395. // subsequent macroblocks, MBA is the difference between the absolute addresses of
  396. // the macroblock and the last transmitted macroblock.
  397. h->current_mba = 0;
  398. h->mba_diff = 0;
  399. return 0;
  400. }
  401. /**
  402. * decodes the group of blocks / video packet header.
  403. * @return <0 if no resync found
  404. */
  405. static int ff_h261_resync(H261Context *h){
  406. MpegEncContext * const s = &h->s;
  407. int left, ret;
  408. if ( h->gob_start_code_skipped ){
  409. ret= h261_decode_gob_header(h);
  410. if(ret>=0)
  411. return 0;
  412. }
  413. else{
  414. if(show_bits(&s->gb, 15)==0){
  415. ret= h261_decode_gob_header(h);
  416. if(ret>=0)
  417. return 0;
  418. }
  419. //ok, its not where its supposed to be ...
  420. s->gb= s->last_resync_gb;
  421. align_get_bits(&s->gb);
  422. left= s->gb.size_in_bits - get_bits_count(&s->gb);
  423. for(;left>15+1+4+5; left-=8){
  424. if(show_bits(&s->gb, 15)==0){
  425. GetBitContext bak= s->gb;
  426. ret= h261_decode_gob_header(h);
  427. if(ret>=0)
  428. return 0;
  429. s->gb= bak;
  430. }
  431. skip_bits(&s->gb, 8);
  432. }
  433. }
  434. return -1;
  435. }
  436. /**
  437. * decodes skipped macroblocks
  438. * @return 0
  439. */
  440. static int h261_decode_mb_skipped(H261Context *h, int mba1, int mba2 )
  441. {
  442. MpegEncContext * const s = &h->s;
  443. int i;
  444. s->mb_intra = 0;
  445. for(i=mba1; i<mba2; i++){
  446. int j, xy;
  447. s->mb_x= ((h->gob_number-1) % 2) * 11 + i % 11;
  448. s->mb_y= ((h->gob_number-1) / 2) * 3 + i / 11;
  449. xy = s->mb_x + s->mb_y * s->mb_stride;
  450. ff_init_block_index(s);
  451. ff_update_block_index(s);
  452. for(j=0;j<6;j++)
  453. s->block_last_index[j] = -1;
  454. s->mv_dir = MV_DIR_FORWARD;
  455. s->mv_type = MV_TYPE_16X16;
  456. s->current_picture.mb_type[xy]= MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0;
  457. s->mv[0][0][0] = 0;
  458. s->mv[0][0][1] = 0;
  459. s->mb_skipped = 1;
  460. h->mtype &= ~MB_TYPE_H261_FIL;
  461. MPV_decode_mb(s, s->block);
  462. }
  463. return 0;
  464. }
  465. static int decode_mv_component(GetBitContext *gb, int v){
  466. int mv_diff = get_vlc2(gb, h261_mv_vlc.table, H261_MV_VLC_BITS, 2);
  467. /* check if mv_diff is valid */
  468. if ( mv_diff < 0 )
  469. return v;
  470. mv_diff = mvmap[mv_diff];
  471. if(mv_diff && !get_bits1(gb))
  472. mv_diff= -mv_diff;
  473. v += mv_diff;
  474. if (v <=-16) v+= 32;
  475. else if(v >= 16) v-= 32;
  476. return v;
  477. }
  478. static int h261_decode_mb(H261Context *h){
  479. MpegEncContext * const s = &h->s;
  480. int i, cbp, xy;
  481. cbp = 63;
  482. // Read mba
  483. do{
  484. h->mba_diff = get_vlc2(&s->gb, h261_mba_vlc.table, H261_MBA_VLC_BITS, 2);
  485. /* Check for slice end */
  486. /* NOTE: GOB can be empty (no MB data) or exist only of MBA_stuffing */
  487. if (h->mba_diff == MBA_STARTCODE){ // start code
  488. h->gob_start_code_skipped = 1;
  489. return SLICE_END;
  490. }
  491. }
  492. while( h->mba_diff == MBA_STUFFING ); // stuffing
  493. if ( h->mba_diff < 0 ){
  494. if ( get_bits_count(&s->gb) + 7 >= s->gb.size_in_bits )
  495. return SLICE_END;
  496. av_log(s->avctx, AV_LOG_ERROR, "illegal mba at %d %d\n", s->mb_x, s->mb_y);
  497. return SLICE_ERROR;
  498. }
  499. h->mba_diff += 1;
  500. h->current_mba += h->mba_diff;
  501. if ( h->current_mba > MBA_STUFFING )
  502. return SLICE_ERROR;
  503. s->mb_x= ((h->gob_number-1) % 2) * 11 + ((h->current_mba-1) % 11);
  504. s->mb_y= ((h->gob_number-1) / 2) * 3 + ((h->current_mba-1) / 11);
  505. xy = s->mb_x + s->mb_y * s->mb_stride;
  506. ff_init_block_index(s);
  507. ff_update_block_index(s);
  508. // Read mtype
  509. h->mtype = get_vlc2(&s->gb, h261_mtype_vlc.table, H261_MTYPE_VLC_BITS, 2);
  510. h->mtype = h261_mtype_map[h->mtype];
  511. // Read mquant
  512. if ( IS_QUANT ( h->mtype ) ){
  513. ff_set_qscale(s, get_bits(&s->gb, 5));
  514. }
  515. s->mb_intra = IS_INTRA4x4(h->mtype);
  516. // Read mv
  517. if ( IS_16X16 ( h->mtype ) ){
  518. // Motion vector data is included for all MC macroblocks. MVD is obtained from the macroblock vector by subtracting the
  519. // vector of the preceding macroblock. For this calculation the vector of the preceding macroblock is regarded as zero in the
  520. // following three situations:
  521. // 1) evaluating MVD for macroblocks 1, 12 and 23;
  522. // 2) evaluating MVD for macroblocks in which MBA does not represent a difference of 1;
  523. // 3) MTYPE of the previous macroblock was not MC.
  524. if ( ( h->current_mba == 1 ) || ( h->current_mba == 12 ) || ( h->current_mba == 23 ) ||
  525. ( h->mba_diff != 1))
  526. {
  527. h->current_mv_x = 0;
  528. h->current_mv_y = 0;
  529. }
  530. h->current_mv_x= decode_mv_component(&s->gb, h->current_mv_x);
  531. h->current_mv_y= decode_mv_component(&s->gb, h->current_mv_y);
  532. }else{
  533. h->current_mv_x = 0;
  534. h->current_mv_y = 0;
  535. }
  536. // Read cbp
  537. if ( HAS_CBP( h->mtype ) ){
  538. cbp = get_vlc2(&s->gb, h261_cbp_vlc.table, H261_CBP_VLC_BITS, 2) + 1;
  539. }
  540. if(s->mb_intra){
  541. s->current_picture.mb_type[xy]= MB_TYPE_INTRA;
  542. goto intra;
  543. }
  544. //set motion vectors
  545. s->mv_dir = MV_DIR_FORWARD;
  546. s->mv_type = MV_TYPE_16X16;
  547. s->current_picture.mb_type[xy]= MB_TYPE_16x16 | MB_TYPE_L0;
  548. s->mv[0][0][0] = h->current_mv_x * 2;//gets divided by 2 in motion compensation
  549. s->mv[0][0][1] = h->current_mv_y * 2;
  550. intra:
  551. /* decode each block */
  552. if(s->mb_intra || HAS_CBP(h->mtype)){
  553. s->dsp.clear_blocks(s->block[0]);
  554. for (i = 0; i < 6; i++) {
  555. if (h261_decode_block(h, s->block[i], i, cbp&32) < 0){
  556. return SLICE_ERROR;
  557. }
  558. cbp+=cbp;
  559. }
  560. }else{
  561. for (i = 0; i < 6; i++)
  562. s->block_last_index[i]= -1;
  563. }
  564. MPV_decode_mb(s, s->block);
  565. return SLICE_OK;
  566. }
  567. /**
  568. * decodes a macroblock
  569. * @return <0 if an error occured
  570. */
  571. static int h261_decode_block(H261Context * h, DCTELEM * block,
  572. int n, int coded)
  573. {
  574. MpegEncContext * const s = &h->s;
  575. int code, level, i, j, run;
  576. RLTable *rl = &h261_rl_tcoeff;
  577. const uint8_t *scan_table;
  578. // For the variable length encoding there are two code tables, one being used for
  579. // the first transmitted LEVEL in INTER, INTER+MC and INTER+MC+FIL blocks, the second
  580. // for all other LEVELs except the first one in INTRA blocks which is fixed length
  581. // coded with 8 bits.
  582. // NOTE: the two code tables only differ in one VLC so we handle that manually.
  583. scan_table = s->intra_scantable.permutated;
  584. if (s->mb_intra){
  585. /* DC coef */
  586. level = get_bits(&s->gb, 8);
  587. // 0 (00000000b) and -128 (10000000b) are FORBIDDEN
  588. if((level&0x7F) == 0){
  589. av_log(s->avctx, AV_LOG_ERROR, "illegal dc %d at %d %d\n", level, s->mb_x, s->mb_y);
  590. return -1;
  591. }
  592. // The code 1000 0000 is not used, the reconstruction level of 1024 being coded as 1111 1111.
  593. if (level == 255)
  594. level = 128;
  595. block[0] = level;
  596. i = 1;
  597. }else if(coded){
  598. // Run Level Code
  599. // EOB Not possible for first level when cbp is available (that's why the table is different)
  600. // 0 1 1s
  601. // * * 0*
  602. int check = show_bits(&s->gb, 2);
  603. i = 0;
  604. if ( check & 0x2 ){
  605. skip_bits(&s->gb, 2);
  606. block[0] = ( check & 0x1 ) ? -1 : 1;
  607. i = 1;
  608. }
  609. }else{
  610. i = 0;
  611. }
  612. if(!coded){
  613. s->block_last_index[n] = i - 1;
  614. return 0;
  615. }
  616. for(;;){
  617. code = get_vlc2(&s->gb, rl->vlc.table, TCOEFF_VLC_BITS, 2);
  618. if (code < 0){
  619. av_log(s->avctx, AV_LOG_ERROR, "illegal ac vlc code at %dx%d\n", s->mb_x, s->mb_y);
  620. return -1;
  621. }
  622. if (code == rl->n) {
  623. /* escape */
  624. // The remaining combinations of (run, level) are encoded with a 20-bit word consisting of 6 bits escape, 6 bits run and 8 bits level.
  625. run = get_bits(&s->gb, 6);
  626. level = get_sbits(&s->gb, 8);
  627. }else if(code == 0){
  628. break;
  629. }else{
  630. run = rl->table_run[code];
  631. level = rl->table_level[code];
  632. if (get_bits1(&s->gb))
  633. level = -level;
  634. }
  635. i += run;
  636. if (i >= 64){
  637. av_log(s->avctx, AV_LOG_ERROR, "run overflow at %dx%d\n", s->mb_x, s->mb_y);
  638. return -1;
  639. }
  640. j = scan_table[i];
  641. block[j] = level;
  642. i++;
  643. }
  644. s->block_last_index[n] = i-1;
  645. return 0;
  646. }
  647. /**
  648. * decodes the H261 picture header.
  649. * @return <0 if no startcode found
  650. */
  651. static int h261_decode_picture_header(H261Context *h){
  652. MpegEncContext * const s = &h->s;
  653. int format, i;
  654. uint32_t startcode= 0;
  655. for(i= s->gb.size_in_bits - get_bits_count(&s->gb); i>24; i-=1){
  656. startcode = ((startcode << 1) | get_bits(&s->gb, 1)) & 0x000FFFFF;
  657. if(startcode == 0x10)
  658. break;
  659. }
  660. if (startcode != 0x10){
  661. av_log(s->avctx, AV_LOG_ERROR, "Bad picture start code\n");
  662. return -1;
  663. }
  664. /* temporal reference */
  665. i= get_bits(&s->gb, 5); /* picture timestamp */
  666. if(i < (s->picture_number&31))
  667. i += 32;
  668. s->picture_number = (s->picture_number&~31) + i;
  669. s->avctx->time_base= (AVRational){1001, 30000};
  670. s->current_picture.pts= s->picture_number;
  671. /* PTYPE starts here */
  672. skip_bits1(&s->gb); /* split screen off */
  673. skip_bits1(&s->gb); /* camera off */
  674. skip_bits1(&s->gb); /* freeze picture release off */
  675. format = get_bits1(&s->gb);
  676. //only 2 formats possible
  677. if (format == 0){//QCIF
  678. s->width = 176;
  679. s->height = 144;
  680. s->mb_width = 11;
  681. s->mb_height = 9;
  682. }else{//CIF
  683. s->width = 352;
  684. s->height = 288;
  685. s->mb_width = 22;
  686. s->mb_height = 18;
  687. }
  688. s->mb_num = s->mb_width * s->mb_height;
  689. skip_bits1(&s->gb); /* still image mode off */
  690. skip_bits1(&s->gb); /* Reserved */
  691. /* PEI */
  692. while (get_bits1(&s->gb) != 0){
  693. skip_bits(&s->gb, 8);
  694. }
  695. // h261 has no I-FRAMES, but if we pass I_TYPE for the first frame, the codec crashes if it does
  696. // not contain all I-blocks (e.g. when a packet is lost)
  697. s->pict_type = P_TYPE;
  698. h->gob_number = 0;
  699. return 0;
  700. }
  701. static int h261_decode_gob(H261Context *h){
  702. MpegEncContext * const s = &h->s;
  703. ff_set_qscale(s, s->qscale);
  704. /* decode mb's */
  705. while(h->current_mba <= MBA_STUFFING)
  706. {
  707. int ret;
  708. /* DCT & quantize */
  709. ret= h261_decode_mb(h);
  710. if(ret<0){
  711. if(ret==SLICE_END){
  712. h261_decode_mb_skipped(h, h->current_mba, 33);
  713. return 0;
  714. }
  715. av_log(s->avctx, AV_LOG_ERROR, "Error at MB: %d\n", s->mb_x + s->mb_y*s->mb_stride);
  716. return -1;
  717. }
  718. h261_decode_mb_skipped(h, h->current_mba-h->mba_diff, h->current_mba-1);
  719. }
  720. return -1;
  721. }
  722. #ifdef CONFIG_H261_PARSER
  723. static int h261_find_frame_end(ParseContext *pc, AVCodecContext* avctx, const uint8_t *buf, int buf_size){
  724. int vop_found, i, j;
  725. uint32_t state;
  726. vop_found= pc->frame_start_found;
  727. state= pc->state;
  728. for(i=0; i<buf_size && !vop_found; i++){
  729. state= (state<<8) | buf[i];
  730. for(j=0; j<8; j++){
  731. if(((state>>j)&0xFFFFF) == 0x00010){
  732. vop_found=1;
  733. break;
  734. }
  735. }
  736. }
  737. if(vop_found){
  738. for(; i<buf_size; i++){
  739. state= (state<<8) | buf[i];
  740. for(j=0; j<8; j++){
  741. if(((state>>j)&0xFFFFF) == 0x00010){
  742. pc->frame_start_found=0;
  743. pc->state= state>>(2*8);
  744. return i-1;
  745. }
  746. }
  747. }
  748. }
  749. pc->frame_start_found= vop_found;
  750. pc->state= state;
  751. return END_NOT_FOUND;
  752. }
  753. static int h261_parse(AVCodecParserContext *s,
  754. AVCodecContext *avctx,
  755. uint8_t **poutbuf, int *poutbuf_size,
  756. const uint8_t *buf, int buf_size)
  757. {
  758. ParseContext *pc = s->priv_data;
  759. int next;
  760. next= h261_find_frame_end(pc,avctx, buf, buf_size);
  761. if (ff_combine_frame(pc, next, (uint8_t **)&buf, &buf_size) < 0) {
  762. *poutbuf = NULL;
  763. *poutbuf_size = 0;
  764. return buf_size;
  765. }
  766. *poutbuf = (uint8_t *)buf;
  767. *poutbuf_size = buf_size;
  768. return next;
  769. }
  770. #endif
  771. /**
  772. * returns the number of bytes consumed for building the current frame
  773. */
  774. static int get_consumed_bytes(MpegEncContext *s, int buf_size){
  775. int pos= get_bits_count(&s->gb)>>3;
  776. if(pos==0) pos=1; //avoid infinite loops (i doubt thats needed but ...)
  777. if(pos+10>buf_size) pos=buf_size; // oops ;)
  778. return pos;
  779. }
  780. static int h261_decode_frame(AVCodecContext *avctx,
  781. void *data, int *data_size,
  782. uint8_t *buf, int buf_size)
  783. {
  784. H261Context *h= avctx->priv_data;
  785. MpegEncContext *s = &h->s;
  786. int ret;
  787. AVFrame *pict = data;
  788. #ifdef DEBUG
  789. av_log(avctx, AV_LOG_DEBUG, "*****frame %d size=%d\n", avctx->frame_number, buf_size);
  790. av_log(avctx, AV_LOG_DEBUG, "bytes=%x %x %x %x\n", buf[0], buf[1], buf[2], buf[3]);
  791. #endif
  792. s->flags= avctx->flags;
  793. s->flags2= avctx->flags2;
  794. h->gob_start_code_skipped=0;
  795. retry:
  796. init_get_bits(&s->gb, buf, buf_size*8);
  797. if(!s->context_initialized){
  798. if (MPV_common_init(s) < 0) //we need the idct permutaton for reading a custom matrix
  799. return -1;
  800. }
  801. //we need to set current_picture_ptr before reading the header, otherwise we cant store anyting im there
  802. if(s->current_picture_ptr==NULL || s->current_picture_ptr->data[0]){
  803. int i= ff_find_unused_picture(s, 0);
  804. s->current_picture_ptr= &s->picture[i];
  805. }
  806. ret = h261_decode_picture_header(h);
  807. /* skip if the header was thrashed */
  808. if (ret < 0){
  809. av_log(s->avctx, AV_LOG_ERROR, "header damaged\n");
  810. return -1;
  811. }
  812. if (s->width != avctx->coded_width || s->height != avctx->coded_height){
  813. ParseContext pc= s->parse_context; //FIXME move these demuxng hack to avformat
  814. s->parse_context.buffer=0;
  815. MPV_common_end(s);
  816. s->parse_context= pc;
  817. }
  818. if (!s->context_initialized) {
  819. avcodec_set_dimensions(avctx, s->width, s->height);
  820. goto retry;
  821. }
  822. // for hurry_up==5
  823. s->current_picture.pict_type= s->pict_type;
  824. s->current_picture.key_frame= s->pict_type == I_TYPE;
  825. /* skip everything if we are in a hurry>=5 */
  826. if(avctx->hurry_up>=5) return get_consumed_bytes(s, buf_size);
  827. if( (avctx->skip_frame >= AVDISCARD_NONREF && s->pict_type==B_TYPE)
  828. ||(avctx->skip_frame >= AVDISCARD_NONKEY && s->pict_type!=I_TYPE)
  829. || avctx->skip_frame >= AVDISCARD_ALL)
  830. return get_consumed_bytes(s, buf_size);
  831. if(MPV_frame_start(s, avctx) < 0)
  832. return -1;
  833. ff_er_frame_start(s);
  834. /* decode each macroblock */
  835. s->mb_x=0;
  836. s->mb_y=0;
  837. while(h->gob_number < (s->mb_height==18 ? 12 : 5)){
  838. if(ff_h261_resync(h)<0)
  839. break;
  840. h261_decode_gob(h);
  841. }
  842. MPV_frame_end(s);
  843. assert(s->current_picture.pict_type == s->current_picture_ptr->pict_type);
  844. assert(s->current_picture.pict_type == s->pict_type);
  845. *pict= *(AVFrame*)s->current_picture_ptr;
  846. ff_print_debug_info(s, pict);
  847. *data_size = sizeof(AVFrame);
  848. return get_consumed_bytes(s, buf_size);
  849. }
  850. static int h261_decode_end(AVCodecContext *avctx)
  851. {
  852. H261Context *h= avctx->priv_data;
  853. MpegEncContext *s = &h->s;
  854. MPV_common_end(s);
  855. return 0;
  856. }
  857. #ifdef CONFIG_ENCODERS
  858. AVCodec h261_encoder = {
  859. "h261",
  860. CODEC_TYPE_VIDEO,
  861. CODEC_ID_H261,
  862. sizeof(H261Context),
  863. MPV_encode_init,
  864. MPV_encode_picture,
  865. MPV_encode_end,
  866. .pix_fmts= (enum PixelFormat[]){PIX_FMT_YUV420P, -1},
  867. };
  868. #endif
  869. AVCodec h261_decoder = {
  870. "h261",
  871. CODEC_TYPE_VIDEO,
  872. CODEC_ID_H261,
  873. sizeof(H261Context),
  874. h261_decode_init,
  875. NULL,
  876. h261_decode_end,
  877. h261_decode_frame,
  878. CODEC_CAP_DR1,
  879. };
  880. #ifdef CONFIG_H261_PARSER
  881. AVCodecParser h261_parser = {
  882. { CODEC_ID_H261 },
  883. sizeof(ParseContext),
  884. NULL,
  885. h261_parse,
  886. ff_parse_close,
  887. };
  888. #endif