Originally committed as revision 4153 to svn://svn.ffmpeg.org/ffmpeg/trunktags/v0.5
| @@ -564,7 +564,7 @@ static int decode_i_frame(FourXContext *f, uint8_t *buf, int length){ | |||||
| if(prestream_size + bitstream_size + 12 != length | if(prestream_size + bitstream_size + 12 != length | ||||
| || bitstream_size > (1<<26) | || bitstream_size > (1<<26) | ||||
| || prestream_size > (1<<26)){ | || prestream_size > (1<<26)){ | ||||
| av_log(f->avctx, AV_LOG_ERROR, "size missmatch %d %d %d\n", prestream_size, bitstream_size, length); | |||||
| av_log(f->avctx, AV_LOG_ERROR, "size mismatch %d %d %d\n", prestream_size, bitstream_size, length); | |||||
| return -1; | return -1; | ||||
| } | } | ||||
| @@ -591,7 +591,7 @@ static int decode_i_frame(FourXContext *f, uint8_t *buf, int length){ | |||||
| } | } | ||||
| if(get_vlc2(&f->pre_gb, f->pre_vlc.table, ACDC_VLC_BITS, 3) != 256) | if(get_vlc2(&f->pre_gb, f->pre_vlc.table, ACDC_VLC_BITS, 3) != 256) | ||||
| av_log(f->avctx, AV_LOG_ERROR, "end missmatch\n"); | |||||
| av_log(f->avctx, AV_LOG_ERROR, "end mismatch\n"); | |||||
| return 0; | return 0; | ||||
| } | } | ||||
| @@ -607,7 +607,7 @@ static int decode_frame(AVCodecContext *avctx, | |||||
| frame_4cc= get32(buf); | frame_4cc= get32(buf); | ||||
| if(buf_size != get32(buf+4)+8){ | if(buf_size != get32(buf+4)+8){ | ||||
| av_log(f->avctx, AV_LOG_ERROR, "size missmatch %d %d\n", buf_size, get32(buf+4)); | |||||
| av_log(f->avctx, AV_LOG_ERROR, "size mismatch %d %d\n", buf_size, get32(buf+4)); | |||||
| } | } | ||||
| if(frame_4cc == ff_get_fourcc("cfrm")){ | if(frame_4cc == ff_get_fourcc("cfrm")){ | ||||
| @@ -643,7 +643,7 @@ static int decode_frame(AVCodecContext *avctx, | |||||
| frame_size= cfrm->size; | frame_size= cfrm->size; | ||||
| if(id != avctx->frame_number){ | if(id != avctx->frame_number){ | ||||
| av_log(f->avctx, AV_LOG_ERROR, "cframe id missmatch %d %d\n", id, avctx->frame_number); | |||||
| av_log(f->avctx, AV_LOG_ERROR, "cframe id mismatch %d %d\n", id, avctx->frame_number); | |||||
| } | } | ||||
| cfrm->size= cfrm->id= 0; | cfrm->size= cfrm->id= 0; | ||||
| @@ -1691,14 +1691,14 @@ typedef struct AVCodecContext { | |||||
| int nsse_weight; | int nsse_weight; | ||||
| /** | /** | ||||
| * number of macroblock rows at the top which are skiped. | |||||
| * number of macroblock rows at the top which are skipped. | |||||
| * - encoding: unused | * - encoding: unused | ||||
| * - decoding: set by user | * - decoding: set by user | ||||
| */ | */ | ||||
| int skip_top; | int skip_top; | ||||
| /** | /** | ||||
| * number of macroblock rows at the bottom which are skiped. | |||||
| * number of macroblock rows at the bottom which are skipped. | |||||
| * - encoding: unused | * - encoding: unused | ||||
| * - decoding: set by user | * - decoding: set by user | ||||
| */ | */ | ||||
| @@ -346,7 +346,7 @@ static void guess_mv(MpegEncContext *s){ | |||||
| s->mv_dir = MV_DIR_FORWARD; | s->mv_dir = MV_DIR_FORWARD; | ||||
| s->mb_intra=0; | s->mb_intra=0; | ||||
| s->mv_type = MV_TYPE_16X16; | s->mv_type = MV_TYPE_16X16; | ||||
| s->mb_skiped=0; | |||||
| s->mb_skipped=0; | |||||
| s->dsp.clear_blocks(s->block[0]); | s->dsp.clear_blocks(s->block[0]); | ||||
| @@ -474,7 +474,7 @@ int score_sum=0; | |||||
| s->mv_dir = MV_DIR_FORWARD; | s->mv_dir = MV_DIR_FORWARD; | ||||
| s->mb_intra=0; | s->mb_intra=0; | ||||
| s->mv_type = MV_TYPE_16X16; | s->mv_type = MV_TYPE_16X16; | ||||
| s->mb_skiped=0; | |||||
| s->mb_skipped=0; | |||||
| s->dsp.clear_blocks(s->block[0]); | s->dsp.clear_blocks(s->block[0]); | ||||
| @@ -858,7 +858,7 @@ void ff_er_frame_end(MpegEncContext *s){ | |||||
| s->mv_dir = MV_DIR_FORWARD; | s->mv_dir = MV_DIR_FORWARD; | ||||
| s->mb_intra=0; | s->mb_intra=0; | ||||
| s->mb_skiped=0; | |||||
| s->mb_skipped=0; | |||||
| if(IS_8X8(mb_type)){ | if(IS_8X8(mb_type)){ | ||||
| int mb_index= mb_x*2 + mb_y*2*s->b8_stride; | int mb_index= mb_x*2 + mb_y*2*s->b8_stride; | ||||
| int j; | int j; | ||||
| @@ -897,7 +897,7 @@ void ff_er_frame_end(MpegEncContext *s){ | |||||
| s->mv_dir = MV_DIR_FORWARD|MV_DIR_BACKWARD; | s->mv_dir = MV_DIR_FORWARD|MV_DIR_BACKWARD; | ||||
| s->mb_intra=0; | s->mb_intra=0; | ||||
| s->mv_type = MV_TYPE_16X16; | s->mv_type = MV_TYPE_16X16; | ||||
| s->mb_skiped=0; | |||||
| s->mb_skipped=0; | |||||
| if(s->pp_time){ | if(s->pp_time){ | ||||
| int time_pp= s->pp_time; | int time_pp= s->pp_time; | ||||
| @@ -158,7 +158,7 @@ static int faac_decode_frame(AVCodecContext *avctx, | |||||
| out = s->faacDecDecode(s->faac_handle, &frame_info, (unsigned char*)buf, (unsigned long)buf_size); | out = s->faacDecDecode(s->faac_handle, &frame_info, (unsigned char*)buf, (unsigned long)buf_size); | ||||
| if (frame_info.error > 0) { | if (frame_info.error > 0) { | ||||
| av_log(avctx, AV_LOG_ERROR, "faac: frame decodinf failed: %s\n", | |||||
| av_log(avctx, AV_LOG_ERROR, "faac: frame decoding failed: %s\n", | |||||
| s->faacDecGetErrorMessage(frame_info.error)); | s->faacDecGetErrorMessage(frame_info.error)); | ||||
| return 0; | return 0; | ||||
| } | } | ||||
| @@ -551,7 +551,7 @@ static int encode_init(AVCodecContext *avctx) | |||||
| int i; | int i; | ||||
| if(avctx->strict_std_compliance >= 0){ | if(avctx->strict_std_compliance >= 0){ | ||||
| av_log(avctx, AV_LOG_ERROR, "this codec is under development, files encoded with it wont be decodeable with future versions!!!\n" | |||||
| av_log(avctx, AV_LOG_ERROR, "this codec is under development, files encoded with it may not be decodeable with future versions!!!\n" | |||||
| "use vstrict=-1 / -strict -1 to use it anyway\n"); | "use vstrict=-1 / -strict -1 to use it anyway\n"); | ||||
| return -1; | return -1; | ||||
| } | } | ||||
| @@ -556,7 +556,7 @@ static int decode_frame(FLACContext *s) | |||||
| skip_bits(&s->gb, 8); | skip_bits(&s->gb, 8); | ||||
| crc8= get_crc8(s->gb.buffer, get_bits_count(&s->gb)/8); | crc8= get_crc8(s->gb.buffer, get_bits_count(&s->gb)/8); | ||||
| if(crc8){ | if(crc8){ | ||||
| av_log(s->avctx, AV_LOG_ERROR, "header crc missmatch crc=%2X\n", crc8); | |||||
| av_log(s->avctx, AV_LOG_ERROR, "header crc mismatch crc=%2X\n", crc8); | |||||
| return -1; | return -1; | ||||
| } | } | ||||
| @@ -541,7 +541,7 @@ static int h261_decode_mb_skipped(H261Context *h, int mba1, int mba2 ) | |||||
| s->current_picture.mb_type[xy]= MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0; | s->current_picture.mb_type[xy]= MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0; | ||||
| s->mv[0][0][0] = 0; | s->mv[0][0][0] = 0; | ||||
| s->mv[0][0][1] = 0; | s->mv[0][0][1] = 0; | ||||
| s->mb_skiped = 1; | |||||
| s->mb_skipped = 1; | |||||
| h->mtype &= ~MB_TYPE_H261_FIL; | h->mtype &= ~MB_TYPE_H261_FIL; | ||||
| MPV_decode_mb(s, s->block); | MPV_decode_mb(s, s->block); | ||||
| @@ -619,7 +619,7 @@ void ff_h263_update_motion_val(MpegEncContext * s){ | |||||
| const int wrap = s->b8_stride; | const int wrap = s->b8_stride; | ||||
| const int xy = s->block_index[0]; | const int xy = s->block_index[0]; | ||||
| s->current_picture.mbskip_table[mb_xy]= s->mb_skiped; | |||||
| s->current_picture.mbskip_table[mb_xy]= s->mb_skipped; | |||||
| if(s->mv_type != MV_TYPE_8X8){ | if(s->mv_type != MV_TYPE_8X8){ | ||||
| int motion_x, motion_y; | int motion_x, motion_y; | ||||
| @@ -855,7 +855,7 @@ void mpeg4_encode_mb(MpegEncContext * s, | |||||
| assert((s->dquant&1)==0); | assert((s->dquant&1)==0); | ||||
| assert(mb_type>=0); | assert(mb_type>=0); | ||||
| /* nothing to do if this MB was skiped in the next P Frame */ | |||||
| /* nothing to do if this MB was skipped in the next P Frame */ | |||||
| if(s->next_picture.mbskip_table[s->mb_y * s->mb_stride + s->mb_x]){ //FIXME avoid DCT & ... | if(s->next_picture.mbskip_table[s->mb_y * s->mb_stride + s->mb_x]){ //FIXME avoid DCT & ... | ||||
| s->skip_count++; | s->skip_count++; | ||||
| s->mv[0][0][0]= | s->mv[0][0][0]= | ||||
| @@ -864,7 +864,7 @@ void mpeg4_encode_mb(MpegEncContext * s, | |||||
| s->mv[1][0][1]= 0; | s->mv[1][0][1]= 0; | ||||
| s->mv_dir= MV_DIR_FORWARD; //doesnt matter | s->mv_dir= MV_DIR_FORWARD; //doesnt matter | ||||
| s->qscale -= s->dquant; | s->qscale -= s->dquant; | ||||
| // s->mb_skiped=1; | |||||
| // s->mb_skipped=1; | |||||
| return; | return; | ||||
| } | } | ||||
| @@ -990,7 +990,7 @@ void mpeg4_encode_mb(MpegEncContext * s, | |||||
| offset= x + y*s->linesize; | offset= x + y*s->linesize; | ||||
| p_pic= s->new_picture.data[0] + offset; | p_pic= s->new_picture.data[0] + offset; | ||||
| s->mb_skiped=1; | |||||
| s->mb_skipped=1; | |||||
| for(i=0; i<s->max_b_frames; i++){ | for(i=0; i<s->max_b_frames; i++){ | ||||
| uint8_t *b_pic; | uint8_t *b_pic; | ||||
| int diff; | int diff; | ||||
| @@ -1001,14 +1001,14 @@ void mpeg4_encode_mb(MpegEncContext * s, | |||||
| b_pic= pic->data[0] + offset + 16; //FIXME +16 | b_pic= pic->data[0] + offset + 16; //FIXME +16 | ||||
| diff= s->dsp.sad[0](NULL, p_pic, b_pic, s->linesize, 16); | diff= s->dsp.sad[0](NULL, p_pic, b_pic, s->linesize, 16); | ||||
| if(diff>s->qscale*70){ //FIXME check that 70 is optimal | if(diff>s->qscale*70){ //FIXME check that 70 is optimal | ||||
| s->mb_skiped=0; | |||||
| s->mb_skipped=0; | |||||
| break; | break; | ||||
| } | } | ||||
| } | } | ||||
| }else | }else | ||||
| s->mb_skiped=1; | |||||
| s->mb_skipped=1; | |||||
| if(s->mb_skiped==1){ | |||||
| if(s->mb_skipped==1){ | |||||
| /* skip macroblock */ | /* skip macroblock */ | ||||
| put_bits(&s->pb, 1, 1); | put_bits(&s->pb, 1, 1); | ||||
| @@ -3211,7 +3211,7 @@ static int mpeg4_decode_video_packet_header(MpegEncContext *s) | |||||
| } | } | ||||
| if(s->pict_type == B_TYPE){ | if(s->pict_type == B_TYPE){ | ||||
| while(s->next_picture.mbskip_table[ s->mb_index2xy[ mb_num ] ]) mb_num++; | while(s->next_picture.mbskip_table[ s->mb_index2xy[ mb_num ] ]) mb_num++; | ||||
| if(mb_num >= s->mb_num) return -1; // slice contains just skiped MBs which where allready decoded | |||||
| if(mb_num >= s->mb_num) return -1; // slice contains just skipped MBs which where allready decoded | |||||
| } | } | ||||
| s->mb_x= mb_num % s->mb_width; | s->mb_x= mb_num % s->mb_width; | ||||
| @@ -3729,10 +3729,10 @@ static int mpeg4_decode_partitioned_mb(MpegEncContext *s, DCTELEM block[6][64]) | |||||
| s->mv_type = MV_TYPE_16X16; | s->mv_type = MV_TYPE_16X16; | ||||
| if(s->pict_type==S_TYPE && s->vol_sprite_usage==GMC_SPRITE){ | if(s->pict_type==S_TYPE && s->vol_sprite_usage==GMC_SPRITE){ | ||||
| s->mcsel=1; | s->mcsel=1; | ||||
| s->mb_skiped = 0; | |||||
| s->mb_skipped = 0; | |||||
| }else{ | }else{ | ||||
| s->mcsel=0; | s->mcsel=0; | ||||
| s->mb_skiped = 1; | |||||
| s->mb_skipped = 1; | |||||
| } | } | ||||
| }else if(s->mb_intra){ | }else if(s->mb_intra){ | ||||
| s->ac_pred = IS_ACPRED(s->current_picture.mb_type[xy]); | s->ac_pred = IS_ACPRED(s->current_picture.mb_type[xy]); | ||||
| @@ -3910,7 +3910,7 @@ int ff_h263_decode_mb(MpegEncContext *s, | |||||
| s->current_picture.mb_type[xy]= MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0; | s->current_picture.mb_type[xy]= MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0; | ||||
| s->mv[0][0][0] = 0; | s->mv[0][0][0] = 0; | ||||
| s->mv[0][0][1] = 0; | s->mv[0][0][1] = 0; | ||||
| s->mb_skiped = !(s->obmc | s->loop_filter); | |||||
| s->mb_skipped = !(s->obmc | s->loop_filter); | |||||
| goto end; | goto end; | ||||
| } | } | ||||
| cbpc = get_vlc2(&s->gb, inter_MCBPC_vlc.table, INTER_MCBPC_VLC_BITS, 2); | cbpc = get_vlc2(&s->gb, inter_MCBPC_vlc.table, INTER_MCBPC_VLC_BITS, 2); | ||||
| @@ -4175,13 +4175,13 @@ int ff_mpeg4_decode_mb(MpegEncContext *s, | |||||
| s->mv[0][0][0]= get_amv(s, 0); | s->mv[0][0][0]= get_amv(s, 0); | ||||
| s->mv[0][0][1]= get_amv(s, 1); | s->mv[0][0][1]= get_amv(s, 1); | ||||
| s->mb_skiped = 0; | |||||
| s->mb_skipped = 0; | |||||
| }else{ | }else{ | ||||
| s->current_picture.mb_type[xy]= MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0; | s->current_picture.mb_type[xy]= MB_TYPE_SKIP | MB_TYPE_16x16 | MB_TYPE_L0; | ||||
| s->mcsel=0; | s->mcsel=0; | ||||
| s->mv[0][0][0] = 0; | s->mv[0][0][0] = 0; | ||||
| s->mv[0][0][1] = 0; | s->mv[0][0][1] = 0; | ||||
| s->mb_skiped = 1; | |||||
| s->mb_skipped = 1; | |||||
| } | } | ||||
| goto end; | goto end; | ||||
| } | } | ||||
| @@ -4294,9 +4294,9 @@ int ff_mpeg4_decode_mb(MpegEncContext *s, | |||||
| } | } | ||||
| /* if we skipped it in the future P Frame than skip it now too */ | /* if we skipped it in the future P Frame than skip it now too */ | ||||
| s->mb_skiped= s->next_picture.mbskip_table[s->mb_y * s->mb_stride + s->mb_x]; // Note, skiptab=0 if last was GMC | |||||
| s->mb_skipped= s->next_picture.mbskip_table[s->mb_y * s->mb_stride + s->mb_x]; // Note, skiptab=0 if last was GMC | |||||
| if(s->mb_skiped){ | |||||
| if(s->mb_skipped){ | |||||
| /* skip mb */ | /* skip mb */ | ||||
| for(i=0;i<6;i++) | for(i=0;i<6;i++) | ||||
| s->block_last_index[i] = -1; | s->block_last_index[i] = -1; | ||||
| @@ -5586,7 +5586,7 @@ static int decode_vol_header(MpegEncContext *s, GetBitContext *gb){ | |||||
| if (get_bits1(gb) == 1) { /* not_8_bit */ | if (get_bits1(gb) == 1) { /* not_8_bit */ | ||||
| s->quant_precision = get_bits(gb, 4); /* quant_precision */ | s->quant_precision = get_bits(gb, 4); /* quant_precision */ | ||||
| if(get_bits(gb, 4)!=8) av_log(s->avctx, AV_LOG_ERROR, "N-bit not supported\n"); /* bits_per_pixel */ | if(get_bits(gb, 4)!=8) av_log(s->avctx, AV_LOG_ERROR, "N-bit not supported\n"); /* bits_per_pixel */ | ||||
| if(s->quant_precision!=5) av_log(s->avctx, AV_LOG_ERROR, "quant precission %d\n", s->quant_precision); | |||||
| if(s->quant_precision!=5) av_log(s->avctx, AV_LOG_ERROR, "quant precision %d\n", s->quant_precision); | |||||
| } else { | } else { | ||||
| s->quant_precision = 5; | s->quant_precision = 5; | ||||
| } | } | ||||
| @@ -5778,7 +5778,7 @@ static int decode_vop_header(MpegEncContext *s, GetBitContext *gb){ | |||||
| s->pict_type = get_bits(gb, 2) + I_TYPE; /* pict type: I = 0 , P = 1 */ | s->pict_type = get_bits(gb, 2) + I_TYPE; /* pict type: I = 0 , P = 1 */ | ||||
| if(s->pict_type==B_TYPE && s->low_delay && s->vol_control_parameters==0 && !(s->flags & CODEC_FLAG_LOW_DELAY)){ | if(s->pict_type==B_TYPE && s->low_delay && s->vol_control_parameters==0 && !(s->flags & CODEC_FLAG_LOW_DELAY)){ | ||||
| av_log(s->avctx, AV_LOG_ERROR, "low_delay flag set, but shouldnt, clearing it\n"); | |||||
| av_log(s->avctx, AV_LOG_ERROR, "low_delay flag incorrectly, clearing it\n"); | |||||
| s->low_delay=0; | s->low_delay=0; | ||||
| } | } | ||||
| @@ -5799,7 +5799,7 @@ static int decode_vop_header(MpegEncContext *s, GetBitContext *gb){ | |||||
| check_marker(gb, "before time_increment"); | check_marker(gb, "before time_increment"); | ||||
| if(s->time_increment_bits==0){ | if(s->time_increment_bits==0){ | ||||
| av_log(s->avctx, AV_LOG_ERROR, "hmm, seems the headers arnt complete, trying to guess time_increment_bits\n"); | |||||
| av_log(s->avctx, AV_LOG_ERROR, "hmm, seems the headers are not complete, trying to guess time_increment_bits\n"); | |||||
| for(s->time_increment_bits=1 ;s->time_increment_bits<16; s->time_increment_bits++){ | for(s->time_increment_bits=1 ;s->time_increment_bits<16; s->time_increment_bits++){ | ||||
| if(show_bits(gb, s->time_increment_bits+1)&1) break; | if(show_bits(gb, s->time_increment_bits+1)&1) break; | ||||
| @@ -5830,8 +5830,8 @@ static int decode_vop_header(MpegEncContext *s, GetBitContext *gb){ | |||||
| s->time= (s->last_time_base + time_incr)*s->time_increment_resolution + time_increment; | s->time= (s->last_time_base + time_incr)*s->time_increment_resolution + time_increment; | ||||
| s->pb_time= s->pp_time - (s->last_non_b_time - s->time); | s->pb_time= s->pp_time - (s->last_non_b_time - s->time); | ||||
| if(s->pp_time <=s->pb_time || s->pp_time <= s->pp_time - s->pb_time || s->pp_time<=0){ | if(s->pp_time <=s->pb_time || s->pp_time <= s->pp_time - s->pb_time || s->pp_time<=0){ | ||||
| // printf("messed up order, seeking?, skiping current b frame\n"); | |||||
| return FRAME_SKIPED; | |||||
| // printf("messed up order, maybe after seeking? skipping current b frame\n"); | |||||
| return FRAME_SKIPPED; | |||||
| } | } | ||||
| if(s->t_frame==0) s->t_frame= s->pb_time; | if(s->t_frame==0) s->t_frame= s->pb_time; | ||||
| @@ -5853,7 +5853,7 @@ static int decode_vop_header(MpegEncContext *s, GetBitContext *gb){ | |||||
| if (get_bits1(gb) != 1){ | if (get_bits1(gb) != 1){ | ||||
| if(s->avctx->debug&FF_DEBUG_PICT_INFO) | if(s->avctx->debug&FF_DEBUG_PICT_INFO) | ||||
| av_log(s->avctx, AV_LOG_ERROR, "vop not coded\n"); | av_log(s->avctx, AV_LOG_ERROR, "vop not coded\n"); | ||||
| return FRAME_SKIPED; | |||||
| return FRAME_SKIPPED; | |||||
| } | } | ||||
| //printf("time %d %d %d || %Ld %Ld %Ld\n", s->time_increment_bits, s->time_increment_resolution, s->time_base, | //printf("time %d %d %d || %Ld %Ld %Ld\n", s->time_increment_bits, s->time_increment_resolution, s->time_base, | ||||
| //s->time, s->last_non_b_time, s->last_non_b_time - s->pp_time); | //s->time, s->last_non_b_time, s->last_non_b_time - s->pp_time); | ||||
| @@ -6003,7 +6003,7 @@ int ff_mpeg4_decode_picture_header(MpegEncContext * s, GetBitContext *gb) | |||||
| if(get_bits_count(gb) >= gb->size_in_bits){ | if(get_bits_count(gb) >= gb->size_in_bits){ | ||||
| if(gb->size_in_bits==8 && (s->divx_version || s->xvid_build)){ | if(gb->size_in_bits==8 && (s->divx_version || s->xvid_build)){ | ||||
| av_log(s->avctx, AV_LOG_ERROR, "frame skip %d\n", gb->size_in_bits); | av_log(s->avctx, AV_LOG_ERROR, "frame skip %d\n", gb->size_in_bits); | ||||
| return FRAME_SKIPED; //divx bug | |||||
| return FRAME_SKIPPED; //divx bug | |||||
| }else | }else | ||||
| return -1; //end of stream | return -1; //end of stream | ||||
| } | } | ||||
| @@ -6032,11 +6032,11 @@ int ff_mpeg4_decode_picture_header(MpegEncContext * s, GetBitContext *gb) | |||||
| else if(startcode==0x1BB) av_log(s->avctx, AV_LOG_DEBUG, "FBA Object Plane start"); | else if(startcode==0x1BB) av_log(s->avctx, AV_LOG_DEBUG, "FBA Object Plane start"); | ||||
| else if(startcode==0x1BC) av_log(s->avctx, AV_LOG_DEBUG, "Mesh Object start"); | else if(startcode==0x1BC) av_log(s->avctx, AV_LOG_DEBUG, "Mesh Object start"); | ||||
| else if(startcode==0x1BD) av_log(s->avctx, AV_LOG_DEBUG, "Mesh Object Plane start"); | else if(startcode==0x1BD) av_log(s->avctx, AV_LOG_DEBUG, "Mesh Object Plane start"); | ||||
| else if(startcode==0x1BE) av_log(s->avctx, AV_LOG_DEBUG, "Still Textutre Object start"); | |||||
| else if(startcode==0x1BF) av_log(s->avctx, AV_LOG_DEBUG, "Textutre Spatial Layer start"); | |||||
| else if(startcode==0x1C0) av_log(s->avctx, AV_LOG_DEBUG, "Textutre SNR Layer start"); | |||||
| else if(startcode==0x1C1) av_log(s->avctx, AV_LOG_DEBUG, "Textutre Tile start"); | |||||
| else if(startcode==0x1C2) av_log(s->avctx, AV_LOG_DEBUG, "Textutre Shape Layer start"); | |||||
| else if(startcode==0x1BE) av_log(s->avctx, AV_LOG_DEBUG, "Still Texture Object start"); | |||||
| else if(startcode==0x1BF) av_log(s->avctx, AV_LOG_DEBUG, "Texture Spatial Layer start"); | |||||
| else if(startcode==0x1C0) av_log(s->avctx, AV_LOG_DEBUG, "Texture SNR Layer start"); | |||||
| else if(startcode==0x1C1) av_log(s->avctx, AV_LOG_DEBUG, "Texture Tile start"); | |||||
| else if(startcode==0x1C2) av_log(s->avctx, AV_LOG_DEBUG, "Texture Shape Layer start"); | |||||
| else if(startcode==0x1C3) av_log(s->avctx, AV_LOG_DEBUG, "stuffing start"); | else if(startcode==0x1C3) av_log(s->avctx, AV_LOG_DEBUG, "stuffing start"); | ||||
| else if(startcode<=0x1C5) av_log(s->avctx, AV_LOG_DEBUG, "reserved"); | else if(startcode<=0x1C5) av_log(s->avctx, AV_LOG_DEBUG, "reserved"); | ||||
| else if(startcode<=0x1FF) av_log(s->avctx, AV_LOG_DEBUG, "System start"); | else if(startcode<=0x1FF) av_log(s->avctx, AV_LOG_DEBUG, "System start"); | ||||
| @@ -199,7 +199,7 @@ static int decode_slice(MpegEncContext *s){ | |||||
| s->mv_dir = MV_DIR_FORWARD; | s->mv_dir = MV_DIR_FORWARD; | ||||
| s->mv_type = MV_TYPE_16X16; | s->mv_type = MV_TYPE_16X16; | ||||
| // s->mb_skiped = 0; | |||||
| // s->mb_skipped = 0; | |||||
| //printf("%d %d %06X\n", ret, get_bits_count(&s->gb), show_bits(&s->gb, 24)); | //printf("%d %d %06X\n", ret, get_bits_count(&s->gb), show_bits(&s->gb, 24)); | ||||
| ret= s->decode_mb(s, s->block); | ret= s->decode_mb(s, s->block); | ||||
| @@ -451,7 +451,7 @@ uint64_t time= rdtsc(); | |||||
| }else if(s->codec_id==CODEC_ID_H263){ | }else if(s->codec_id==CODEC_ID_H263){ | ||||
| next= h263_find_frame_end(&s->parse_context, buf, buf_size); | next= h263_find_frame_end(&s->parse_context, buf, buf_size); | ||||
| }else{ | }else{ | ||||
| av_log(s->avctx, AV_LOG_ERROR, "this codec doesnt support truncated bitstreams\n"); | |||||
| av_log(s->avctx, AV_LOG_ERROR, "this codec does not support truncated bitstreams\n"); | |||||
| return -1; | return -1; | ||||
| } | } | ||||
| @@ -503,7 +503,7 @@ retry: | |||||
| ret = h263_decode_picture_header(s); | ret = h263_decode_picture_header(s); | ||||
| } | } | ||||
| if(ret==FRAME_SKIPED) return get_consumed_bytes(s, buf_size); | |||||
| if(ret==FRAME_SKIPPED) return get_consumed_bytes(s, buf_size); | |||||
| /* skip if the header was thrashed */ | /* skip if the header was thrashed */ | ||||
| if (ret < 0){ | if (ret < 0){ | ||||
| @@ -162,7 +162,7 @@ typedef struct H264Context{ | |||||
| int chroma_qp; //QPc | int chroma_qp; //QPc | ||||
| int prev_mb_skiped; //FIXME remove (IMHO not used) | |||||
| int prev_mb_skipped; //FIXME remove (IMHO not used) | |||||
| //prediction stuff | //prediction stuff | ||||
| int chroma_pred_mode; | int chroma_pred_mode; | ||||
| @@ -4205,7 +4205,7 @@ static void decode_mb_skip(H264Context *h){ | |||||
| s->current_picture.mb_type[mb_xy]= mb_type|MB_TYPE_SKIP; | s->current_picture.mb_type[mb_xy]= mb_type|MB_TYPE_SKIP; | ||||
| s->current_picture.qscale_table[mb_xy]= s->qscale; | s->current_picture.qscale_table[mb_xy]= s->qscale; | ||||
| h->slice_table[ mb_xy ]= h->slice_num; | h->slice_table[ mb_xy ]= h->slice_num; | ||||
| h->prev_mb_skiped= 1; | |||||
| h->prev_mb_skipped= 1; | |||||
| } | } | ||||
| /** | /** | ||||
| @@ -4232,12 +4232,12 @@ static int decode_mb_cavlc(H264Context *h){ | |||||
| } | } | ||||
| } | } | ||||
| if(h->mb_aff_frame){ | if(h->mb_aff_frame){ | ||||
| if ( ((s->mb_y&1) == 0) || h->prev_mb_skiped) | |||||
| if ( ((s->mb_y&1) == 0) || h->prev_mb_skipped) | |||||
| h->mb_field_decoding_flag = get_bits1(&s->gb); | h->mb_field_decoding_flag = get_bits1(&s->gb); | ||||
| }else | }else | ||||
| h->mb_field_decoding_flag= (s->picture_structure!=PICT_FRAME); | h->mb_field_decoding_flag= (s->picture_structure!=PICT_FRAME); | ||||
| h->prev_mb_skiped= 0; | |||||
| h->prev_mb_skipped= 0; | |||||
| mb_type= get_ue_golomb(&s->gb); | mb_type= get_ue_golomb(&s->gb); | ||||
| if(h->slice_type == B_TYPE){ | if(h->slice_type == B_TYPE){ | ||||
| @@ -5209,12 +5209,12 @@ static int decode_mb_cabac(H264Context *h) { | |||||
| } | } | ||||
| } | } | ||||
| if(h->mb_aff_frame){ | if(h->mb_aff_frame){ | ||||
| if ( ((s->mb_y&1) == 0) || h->prev_mb_skiped) | |||||
| if ( ((s->mb_y&1) == 0) || h->prev_mb_skipped) | |||||
| h->mb_field_decoding_flag = decode_cabac_field_decoding_flag(h); | h->mb_field_decoding_flag = decode_cabac_field_decoding_flag(h); | ||||
| }else | }else | ||||
| h->mb_field_decoding_flag= (s->picture_structure!=PICT_FRAME); | h->mb_field_decoding_flag= (s->picture_structure!=PICT_FRAME); | ||||
| h->prev_mb_skiped = 0; | |||||
| h->prev_mb_skipped = 0; | |||||
| compute_mb_neighboors(h); | compute_mb_neighboors(h); | ||||
| if( ( mb_type = decode_cabac_mb_type( h ) ) < 0 ) { | if( ( mb_type = decode_cabac_mb_type( h ) ) < 0 ) { | ||||
| @@ -6628,7 +6628,7 @@ static inline int decode_seq_parameter_set(H264Context *h){ | |||||
| sps->crop_top = get_ue_golomb(&s->gb); | sps->crop_top = get_ue_golomb(&s->gb); | ||||
| sps->crop_bottom= get_ue_golomb(&s->gb); | sps->crop_bottom= get_ue_golomb(&s->gb); | ||||
| if(sps->crop_left || sps->crop_top){ | if(sps->crop_left || sps->crop_top){ | ||||
| av_log(h->s.avctx, AV_LOG_ERROR, "insane cropping not completly supported, this could look slightly wrong ...\n"); | |||||
| av_log(h->s.avctx, AV_LOG_ERROR, "insane cropping not completely supported, this could look slightly wrong ...\n"); | |||||
| } | } | ||||
| }else{ | }else{ | ||||
| sps->crop_left = | sps->crop_left = | ||||
| @@ -7022,7 +7022,7 @@ static int decode_frame(AVCodecContext *avctx, | |||||
| //FIXME do something with unavailable reference frames | //FIXME do something with unavailable reference frames | ||||
| // if(ret==FRAME_SKIPED) return get_consumed_bytes(s, buf_index, buf_size); | |||||
| // if(ret==FRAME_SKIPPED) return get_consumed_bytes(s, buf_index, buf_size); | |||||
| if(!s->current_picture_ptr){ | if(!s->current_picture_ptr){ | ||||
| av_log(h->s.avctx, AV_LOG_DEBUG, "error, NO frame\n"); | av_log(h->s.avctx, AV_LOG_DEBUG, "error, NO frame\n"); | ||||
| return -1; | return -1; | ||||
| @@ -542,7 +542,7 @@ static int encode_init(AVCodecContext *avctx) | |||||
| if(s->interlaced != ( s->height > 288 )) | if(s->interlaced != ( s->height > 288 )) | ||||
| av_log(avctx, AV_LOG_INFO, "using huffyuv 2.2.0 or newer interlacing flag\n"); | av_log(avctx, AV_LOG_INFO, "using huffyuv 2.2.0 or newer interlacing flag\n"); | ||||
| }else if(avctx->strict_std_compliance>=0){ | }else if(avctx->strict_std_compliance>=0){ | ||||
| av_log(avctx, AV_LOG_ERROR, "This codec is under development; files encoded with it may not be decodeable with future versions!!! Set vstrict=-1 / -strict -1 to use it anyway.\n"); | |||||
| av_log(avctx, AV_LOG_ERROR, "This codec is under development; files encoded with it may not be decodable with future versions!!! Set vstrict=-1 / -strict -1 to use it anyway.\n"); | |||||
| return -1; | return -1; | ||||
| } | } | ||||
| @@ -827,7 +827,7 @@ static int decode_frame(AVCodecContext *avctx, void *data, int *data_size, uint8 | |||||
| p->data[0][1]= get_bits(&s->gb, 8); | p->data[0][1]= get_bits(&s->gb, 8); | ||||
| p->data[0][0]= get_bits(&s->gb, 8); | p->data[0][0]= get_bits(&s->gb, 8); | ||||
| av_log(avctx, AV_LOG_ERROR, "YUY2 output isnt implemenetd yet\n"); | |||||
| av_log(avctx, AV_LOG_ERROR, "YUY2 output is not implemented yet\n"); | |||||
| return -1; | return -1; | ||||
| }else{ | }else{ | ||||
| @@ -996,14 +996,14 @@ static int decode_frame(AVCodecContext *avctx, void *data, int *data_size, uint8 | |||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| draw_slice(s, height); // just 1 large slice as this isnt possible in reverse order | |||||
| draw_slice(s, height); // just 1 large slice as this is not possible in reverse order | |||||
| break; | break; | ||||
| default: | default: | ||||
| av_log(avctx, AV_LOG_ERROR, "prediction type not supported!\n"); | av_log(avctx, AV_LOG_ERROR, "prediction type not supported!\n"); | ||||
| } | } | ||||
| }else{ | }else{ | ||||
| av_log(avctx, AV_LOG_ERROR, "BGR24 output isnt implemenetd yet\n"); | |||||
| av_log(avctx, AV_LOG_ERROR, "BGR24 output is not implemented yet\n"); | |||||
| return -1; | return -1; | ||||
| } | } | ||||
| } | } | ||||
| @@ -714,7 +714,7 @@ static int decode_init(AVCodecContext *avctx) | |||||
| break; | break; | ||||
| default: | default: | ||||
| if ((c->compression < Z_NO_COMPRESSION) || (c->compression > Z_BEST_COMPRESSION)) { | if ((c->compression < Z_NO_COMPRESSION) || (c->compression > Z_BEST_COMPRESSION)) { | ||||
| av_log(avctx, AV_LOG_ERROR, "Unusupported compression level for ZLIB: (%d).\n", c->compression); | |||||
| av_log(avctx, AV_LOG_ERROR, "Unsupported compression level for ZLIB: (%d).\n", c->compression); | |||||
| return 1; | return 1; | ||||
| } | } | ||||
| av_log(avctx, AV_LOG_INFO, "Compression level for ZLIB: (%d).\n", c->compression); | av_log(avctx, AV_LOG_INFO, "Compression level for ZLIB: (%d).\n", c->compression); | ||||
| @@ -1300,7 +1300,7 @@ void ff_estimate_p_frame_motion(MpegEncContext * s, | |||||
| c->sub_motion_search(s, &mx, &my, dmin, 0, 0, 0, 16); | c->sub_motion_search(s, &mx, &my, dmin, 0, 0, 0, 16); | ||||
| if(s->flags&CODEC_FLAG_MV0) | if(s->flags&CODEC_FLAG_MV0) | ||||
| if(mx || my) | if(mx || my) | ||||
| mb_type |= CANDIDATE_MB_TYPE_SKIPED; //FIXME check difference | |||||
| mb_type |= CANDIDATE_MB_TYPE_SKIPPED; //FIXME check difference | |||||
| }else{ | }else{ | ||||
| mx <<=shift; | mx <<=shift; | ||||
| my <<=shift; | my <<=shift; | ||||
| @@ -226,10 +226,10 @@ static int encode_init(AVCodecContext *avctx) | |||||
| if(find_frame_rate_index(s) < 0){ | if(find_frame_rate_index(s) < 0){ | ||||
| if(s->strict_std_compliance >=0){ | if(s->strict_std_compliance >=0){ | ||||
| av_log(avctx, AV_LOG_ERROR, "MPEG1/2 doesnt support %d/%d fps\n", avctx->frame_rate, avctx->frame_rate_base); | |||||
| av_log(avctx, AV_LOG_ERROR, "MPEG1/2 does not support %d/%d fps\n", avctx->frame_rate, avctx->frame_rate_base); | |||||
| return -1; | return -1; | ||||
| }else{ | }else{ | ||||
| av_log(avctx, AV_LOG_INFO, "MPEG1/2 doesnt support %d/%d fps, there may be AV sync issues\n", avctx->frame_rate, avctx->frame_rate_base); | |||||
| av_log(avctx, AV_LOG_INFO, "MPEG1/2 does not support %d/%d fps, there may be AV sync issues\n", avctx->frame_rate, avctx->frame_rate_base); | |||||
| } | } | ||||
| } | } | ||||
| @@ -1055,11 +1055,11 @@ static int mpeg_decode_mb(MpegEncContext *s, | |||||
| dprintf("decode_mb: x=%d y=%d\n", s->mb_x, s->mb_y); | dprintf("decode_mb: x=%d y=%d\n", s->mb_x, s->mb_y); | ||||
| assert(s->mb_skiped==0); | |||||
| assert(s->mb_skipped==0); | |||||
| if (s->mb_skip_run-- != 0) { | if (s->mb_skip_run-- != 0) { | ||||
| if(s->pict_type == I_TYPE){ | if(s->pict_type == I_TYPE){ | ||||
| av_log(s->avctx, AV_LOG_ERROR, "skiped MB in I frame at %d %d\n", s->mb_x, s->mb_y); | |||||
| av_log(s->avctx, AV_LOG_ERROR, "skipped MB in I frame at %d %d\n", s->mb_x, s->mb_y); | |||||
| return -1; | return -1; | ||||
| } | } | ||||
| @@ -1078,7 +1078,7 @@ static int mpeg_decode_mb(MpegEncContext *s, | |||||
| s->last_mv[0][0][0] = s->last_mv[0][0][1] = 0; | s->last_mv[0][0][0] = s->last_mv[0][0][1] = 0; | ||||
| s->last_mv[0][1][0] = s->last_mv[0][1][1] = 0; | s->last_mv[0][1][0] = s->last_mv[0][1][1] = 0; | ||||
| s->field_select[0][0]= s->picture_structure - 1; | s->field_select[0][0]= s->picture_structure - 1; | ||||
| s->mb_skiped = 1; | |||||
| s->mb_skipped = 1; | |||||
| s->current_picture.mb_type[ s->mb_x + s->mb_y*s->mb_stride ]= MB_TYPE_SKIP | MB_TYPE_L0 | MB_TYPE_16x16; | s->current_picture.mb_type[ s->mb_x + s->mb_y*s->mb_stride ]= MB_TYPE_SKIP | MB_TYPE_L0 | MB_TYPE_16x16; | ||||
| } else { | } else { | ||||
| int mb_type; | int mb_type; | ||||
| @@ -1101,7 +1101,7 @@ static int mpeg_decode_mb(MpegEncContext *s, | |||||
| // assert(s->current_picture.mb_type[ s->mb_x + s->mb_y*s->mb_stride - 1]&(MB_TYPE_16x16|MB_TYPE_16x8)); | // assert(s->current_picture.mb_type[ s->mb_x + s->mb_y*s->mb_stride - 1]&(MB_TYPE_16x16|MB_TYPE_16x8)); | ||||
| if((s->mv[0][0][0]|s->mv[0][0][1]|s->mv[1][0][0]|s->mv[1][0][1])==0) | if((s->mv[0][0][0]|s->mv[0][0][1]|s->mv[1][0][0]|s->mv[1][0][1])==0) | ||||
| s->mb_skiped = 1; | |||||
| s->mb_skipped = 1; | |||||
| } | } | ||||
| return 0; | return 0; | ||||
| @@ -409,7 +409,7 @@ static int alloc_picture(MpegEncContext *s, Picture *pic, int shared){ | |||||
| memmove(s->prev_pict_types+1, s->prev_pict_types, PREV_PICT_TYPES_BUFFER_SIZE-1); | memmove(s->prev_pict_types+1, s->prev_pict_types, PREV_PICT_TYPES_BUFFER_SIZE-1); | ||||
| s->prev_pict_types[0]= s->pict_type; | s->prev_pict_types[0]= s->pict_type; | ||||
| if(pic->age < PREV_PICT_TYPES_BUFFER_SIZE && s->prev_pict_types[pic->age] == B_TYPE) | if(pic->age < PREV_PICT_TYPES_BUFFER_SIZE && s->prev_pict_types[pic->age] == B_TYPE) | ||||
| pic->age= INT_MAX; // skiped MBs in b frames are quite rare in mpeg1/2 and its a bit tricky to skip them anyway | |||||
| pic->age= INT_MAX; // skipped MBs in b frames are quite rare in mpeg1/2 and its a bit tricky to skip them anyway | |||||
| return 0; | return 0; | ||||
| fail: //for the CHECKED_ALLOCZ macro | fail: //for the CHECKED_ALLOCZ macro | ||||
| @@ -1444,7 +1444,7 @@ int MPV_frame_start(MpegEncContext *s, AVCodecContext *avctx) | |||||
| { | { | ||||
| int i; | int i; | ||||
| AVFrame *pic; | AVFrame *pic; | ||||
| s->mb_skiped = 0; | |||||
| s->mb_skipped = 0; | |||||
| assert(s->last_picture_ptr==NULL || s->out_format != FMT_H264 || s->codec_id == CODEC_ID_SVQ3); | assert(s->last_picture_ptr==NULL || s->out_format != FMT_H264 || s->codec_id == CODEC_ID_SVQ3); | ||||
| @@ -3227,7 +3227,7 @@ static inline void MPV_motion(MpegEncContext *s, | |||||
| const int mot_stride= s->b8_stride; | const int mot_stride= s->b8_stride; | ||||
| const int mot_xy= mb_x*2 + mb_y*2*mot_stride; | const int mot_xy= mb_x*2 + mb_y*2*mot_stride; | ||||
| assert(!s->mb_skiped); | |||||
| assert(!s->mb_skipped); | |||||
| memcpy(mv_cache[1][1], s->current_picture.motion_val[0][mot_xy ], sizeof(int16_t)*4); | memcpy(mv_cache[1][1], s->current_picture.motion_val[0][mot_xy ], sizeof(int16_t)*4); | ||||
| memcpy(mv_cache[2][1], s->current_picture.motion_val[0][mot_xy+mot_stride], sizeof(int16_t)*4); | memcpy(mv_cache[2][1], s->current_picture.motion_val[0][mot_xy+mot_stride], sizeof(int16_t)*4); | ||||
| @@ -3699,11 +3699,11 @@ static always_inline void MPV_decode_mb_internal(MpegEncContext *s, DCTELEM bloc | |||||
| assert(age); | assert(age); | ||||
| if (s->mb_skiped) { | |||||
| s->mb_skiped= 0; | |||||
| if (s->mb_skipped) { | |||||
| s->mb_skipped= 0; | |||||
| assert(s->pict_type!=I_TYPE); | assert(s->pict_type!=I_TYPE); | ||||
| (*mbskip_ptr) ++; /* indicate that this time we skiped it */ | |||||
| (*mbskip_ptr) ++; /* indicate that this time we skipped it */ | |||||
| if(*mbskip_ptr >99) *mbskip_ptr= 99; | if(*mbskip_ptr >99) *mbskip_ptr= 99; | ||||
| /* if previous was skipped too, then nothing to do ! */ | /* if previous was skipped too, then nothing to do ! */ | ||||
| @@ -4374,7 +4374,7 @@ static inline void copy_context_before_encode(MpegEncContext *d, MpegEncContext | |||||
| d->misc_bits= s->misc_bits; | d->misc_bits= s->misc_bits; | ||||
| d->last_bits= 0; | d->last_bits= 0; | ||||
| d->mb_skiped= 0; | |||||
| d->mb_skipped= 0; | |||||
| d->qscale= s->qscale; | d->qscale= s->qscale; | ||||
| d->dquant= s->dquant; | d->dquant= s->dquant; | ||||
| } | } | ||||
| @@ -4401,7 +4401,7 @@ static inline void copy_context_after_encode(MpegEncContext *d, MpegEncContext * | |||||
| d->misc_bits= s->misc_bits; | d->misc_bits= s->misc_bits; | ||||
| d->mb_intra= s->mb_intra; | d->mb_intra= s->mb_intra; | ||||
| d->mb_skiped= s->mb_skiped; | |||||
| d->mb_skipped= s->mb_skipped; | |||||
| d->mv_type= s->mv_type; | d->mv_type= s->mv_type; | ||||
| d->mv_dir= s->mv_dir; | d->mv_dir= s->mv_dir; | ||||
| d->pb= s->pb; | d->pb= s->pb; | ||||
| @@ -4773,7 +4773,7 @@ static int encode_thread(AVCodecContext *c, void *arg){ | |||||
| s->first_slice_line=0; | s->first_slice_line=0; | ||||
| } | } | ||||
| s->mb_skiped=0; | |||||
| s->mb_skipped=0; | |||||
| s->dquant=0; //only for QP_RD | s->dquant=0; //only for QP_RD | ||||
| if(mb_type & (mb_type-1) || (s->flags & CODEC_FLAG_QP_RD)){ // more than 1 MB type possible or CODEC_FLAG_QP_RD | if(mb_type & (mb_type-1) || (s->flags & CODEC_FLAG_QP_RD)){ // more than 1 MB type possible or CODEC_FLAG_QP_RD | ||||
| @@ -4810,13 +4810,13 @@ static int encode_thread(AVCodecContext *c, void *arg){ | |||||
| encode_mb_hq(s, &backup_s, &best_s, CANDIDATE_MB_TYPE_INTER_I, pb, pb2, tex_pb, | encode_mb_hq(s, &backup_s, &best_s, CANDIDATE_MB_TYPE_INTER_I, pb, pb2, tex_pb, | ||||
| &dmin, &next_block, 0, 0); | &dmin, &next_block, 0, 0); | ||||
| } | } | ||||
| if(mb_type&CANDIDATE_MB_TYPE_SKIPED){ | |||||
| if(mb_type&CANDIDATE_MB_TYPE_SKIPPED){ | |||||
| s->mv_dir = MV_DIR_FORWARD; | s->mv_dir = MV_DIR_FORWARD; | ||||
| s->mv_type = MV_TYPE_16X16; | s->mv_type = MV_TYPE_16X16; | ||||
| s->mb_intra= 0; | s->mb_intra= 0; | ||||
| s->mv[0][0][0] = 0; | s->mv[0][0][0] = 0; | ||||
| s->mv[0][0][1] = 0; | s->mv[0][0][1] = 0; | ||||
| encode_mb_hq(s, &backup_s, &best_s, CANDIDATE_MB_TYPE_SKIPED, pb, pb2, tex_pb, | |||||
| encode_mb_hq(s, &backup_s, &best_s, CANDIDATE_MB_TYPE_SKIPPED, pb, pb2, tex_pb, | |||||
| &dmin, &next_block, s->mv[0][0][0], s->mv[0][0][1]); | &dmin, &next_block, s->mv[0][0][0], s->mv[0][0][1]); | ||||
| } | } | ||||
| if(mb_type&CANDIDATE_MB_TYPE_INTER4V){ | if(mb_type&CANDIDATE_MB_TYPE_INTER4V){ | ||||
| @@ -29,7 +29,7 @@ | |||||
| #include "dsputil.h" | #include "dsputil.h" | ||||
| #include "bitstream.h" | #include "bitstream.h" | ||||
| #define FRAME_SKIPED 100 ///< return value for header parsers if frame is not coded | |||||
| #define FRAME_SKIPPED 100 ///< return value for header parsers if frame is not coded | |||||
| enum OutputFormat { | enum OutputFormat { | ||||
| FMT_MPEG1, | FMT_MPEG1, | ||||
| @@ -203,7 +203,7 @@ struct MpegEncContext; | |||||
| */ | */ | ||||
| typedef struct MotionEstContext{ | typedef struct MotionEstContext{ | ||||
| AVCodecContext *avctx; | AVCodecContext *avctx; | ||||
| int skip; ///< set if ME is skiped for the current MB | |||||
| int skip; ///< set if ME is skipped for the current MB | |||||
| int co_located_mv[4][2]; ///< mv from last p frame for direct mode ME | int co_located_mv[4][2]; ///< mv from last p frame for direct mode ME | ||||
| int direct_basis_mv[4][2]; | int direct_basis_mv[4][2]; | ||||
| uint8_t *scratchpad; ///< data area for the me algo, so that the ME doesnt need to malloc/free | uint8_t *scratchpad; ///< data area for the me algo, so that the ME doesnt need to malloc/free | ||||
| @@ -350,7 +350,7 @@ typedef struct MpegEncContext { | |||||
| int ac_pred; | int ac_pred; | ||||
| uint8_t *prev_pict_types; ///< previous picture types in bitstream order, used for mb skip | uint8_t *prev_pict_types; ///< previous picture types in bitstream order, used for mb skip | ||||
| #define PREV_PICT_TYPES_BUFFER_SIZE 256 | #define PREV_PICT_TYPES_BUFFER_SIZE 256 | ||||
| int mb_skiped; ///< MUST BE SET only during DECODING | |||||
| int mb_skipped; ///< MUST BE SET only during DECODING | |||||
| uint8_t *mbskip_table; /**< used to avoid copy if macroblock skipped (for black regions for example) | uint8_t *mbskip_table; /**< used to avoid copy if macroblock skipped (for black regions for example) | ||||
| and used for b-frame encoding & decoding (contains skip table of next P Frame) */ | and used for b-frame encoding & decoding (contains skip table of next P Frame) */ | ||||
| uint8_t *mbintra_table; ///< used to avoid setting {ac, dc, cbp}-pred stuff to zero on inter MB decoding | uint8_t *mbintra_table; ///< used to avoid setting {ac, dc, cbp}-pred stuff to zero on inter MB decoding | ||||
| @@ -378,7 +378,7 @@ typedef struct MpegEncContext { | |||||
| /* motion compensation */ | /* motion compensation */ | ||||
| int unrestricted_mv; ///< mv can point outside of the coded picture | int unrestricted_mv; ///< mv can point outside of the coded picture | ||||
| int h263_long_vectors; ///< use horrible h263v1 long vector mode | int h263_long_vectors; ///< use horrible h263v1 long vector mode | ||||
| int decode; ///< if 0 then decoding will be skiped (for encoding b frames for example) | |||||
| int decode; ///< if 0 then decoding will be skipped (for encoding b frames for example) | |||||
| DSPContext dsp; ///< pointers for accelerated dsp fucntions | DSPContext dsp; ///< pointers for accelerated dsp fucntions | ||||
| int f_code; ///< forward MV resolution | int f_code; ///< forward MV resolution | ||||
| @@ -427,7 +427,7 @@ typedef struct MpegEncContext { | |||||
| int no_rounding; /**< apply no rounding to motion compensation (MPEG4, msmpeg4, ...) | int no_rounding; /**< apply no rounding to motion compensation (MPEG4, msmpeg4, ...) | ||||
| for b-frames rounding mode is allways 0 */ | for b-frames rounding mode is allways 0 */ | ||||
| int hurry_up; /**< when set to 1 during decoding, b frames will be skiped | |||||
| int hurry_up; /**< when set to 1 during decoding, b frames will be skipped | |||||
| when set to 2 idct/dequant will be skipped too */ | when set to 2 idct/dequant will be skipped too */ | ||||
| /* macroblock layer */ | /* macroblock layer */ | ||||
| @@ -438,7 +438,7 @@ typedef struct MpegEncContext { | |||||
| #define CANDIDATE_MB_TYPE_INTRA 0x01 | #define CANDIDATE_MB_TYPE_INTRA 0x01 | ||||
| #define CANDIDATE_MB_TYPE_INTER 0x02 | #define CANDIDATE_MB_TYPE_INTER 0x02 | ||||
| #define CANDIDATE_MB_TYPE_INTER4V 0x04 | #define CANDIDATE_MB_TYPE_INTER4V 0x04 | ||||
| #define CANDIDATE_MB_TYPE_SKIPED 0x08 | |||||
| #define CANDIDATE_MB_TYPE_SKIPPED 0x08 | |||||
| //#define MB_TYPE_GMC 0x10 | //#define MB_TYPE_GMC 0x10 | ||||
| #define CANDIDATE_MB_TYPE_DIRECT 0x10 | #define CANDIDATE_MB_TYPE_DIRECT 0x10 | ||||
| @@ -1492,7 +1492,7 @@ static int msmpeg4v12_decode_mb(MpegEncContext *s, DCTELEM block[6][64]) | |||||
| s->mv_type = MV_TYPE_16X16; | s->mv_type = MV_TYPE_16X16; | ||||
| s->mv[0][0][0] = 0; | s->mv[0][0][0] = 0; | ||||
| s->mv[0][0][1] = 0; | s->mv[0][0][1] = 0; | ||||
| s->mb_skiped = 1; | |||||
| s->mb_skipped = 1; | |||||
| return 0; | return 0; | ||||
| } | } | ||||
| } | } | ||||
| @@ -1580,7 +1580,7 @@ static int msmpeg4v34_decode_mb(MpegEncContext *s, DCTELEM block[6][64]) | |||||
| s->mv_type = MV_TYPE_16X16; | s->mv_type = MV_TYPE_16X16; | ||||
| s->mv[0][0][0] = 0; | s->mv[0][0][0] = 0; | ||||
| s->mv[0][0][1] = 0; | s->mv[0][0][1] = 0; | ||||
| s->mb_skiped = 1; | |||||
| s->mb_skipped = 1; | |||||
| *mb_type_ptr = MB_TYPE_SKIP | MB_TYPE_L0 | MB_TYPE_16x16; | *mb_type_ptr = MB_TYPE_SKIP | MB_TYPE_L0 | MB_TYPE_16x16; | ||||
| return 0; | return 0; | ||||
| @@ -79,7 +79,7 @@ int ff_rate_control_init(MpegEncContext *s) | |||||
| rcc->entry = (RateControlEntry*)av_mallocz(i*sizeof(RateControlEntry)); | rcc->entry = (RateControlEntry*)av_mallocz(i*sizeof(RateControlEntry)); | ||||
| rcc->num_entries= i; | rcc->num_entries= i; | ||||
| /* init all to skiped p frames (with b frames we might have a not encoded frame at the end FIXME) */ | |||||
| /* init all to skipped p frames (with b frames we might have a not encoded frame at the end FIXME) */ | |||||
| for(i=0; i<rcc->num_entries; i++){ | for(i=0; i<rcc->num_entries; i++){ | ||||
| RateControlEntry *rce= &rcc->entry[i]; | RateControlEntry *rce= &rcc->entry[i]; | ||||
| rce->pict_type= rce->new_pict_type=P_TYPE; | rce->pict_type= rce->new_pict_type=P_TYPE; | ||||
| @@ -468,8 +468,8 @@ static int rv20_decode_picture_header(MpegEncContext *s) | |||||
| s->time= seq; | s->time= seq; | ||||
| s->pb_time= s->pp_time - (s->last_non_b_time - s->time); | s->pb_time= s->pp_time - (s->last_non_b_time - s->time); | ||||
| if(s->pp_time <=s->pb_time || s->pp_time <= s->pp_time - s->pb_time || s->pp_time<=0){ | if(s->pp_time <=s->pb_time || s->pp_time <= s->pp_time - s->pb_time || s->pp_time<=0){ | ||||
| av_log(s->avctx, AV_LOG_DEBUG, "messed up order, seeking?, skiping current b frame\n"); | |||||
| return FRAME_SKIPED; | |||||
| av_log(s->avctx, AV_LOG_DEBUG, "messed up order, possible from seeking? skipping current b frame\n"); | |||||
| return FRAME_SKIPPED; | |||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| @@ -3338,7 +3338,7 @@ static int encode_init(AVCodecContext *avctx) | |||||
| int plane_index; | int plane_index; | ||||
| if(avctx->strict_std_compliance >= 0){ | if(avctx->strict_std_compliance >= 0){ | ||||
| av_log(avctx, AV_LOG_ERROR, "this codec is under development, files encoded with it wont be decodeable with future versions!!!\n" | |||||
| av_log(avctx, AV_LOG_ERROR, "this codec is under development, files encoded with it may not be decodable with future versions!!!\n" | |||||
| "use vstrict=-1 / -strict -1 to use it anyway\n"); | "use vstrict=-1 / -strict -1 to use it anyway\n"); | ||||
| return -1; | return -1; | ||||
| } | } | ||||
| @@ -891,7 +891,7 @@ void free_bitplane(BitPlane *bp) | |||||
| if (bp->data) av_freep(&bp->data); | if (bp->data) av_freep(&bp->data); | ||||
| } | } | ||||
| /** Decode rows by checking if they are skiped | |||||
| /** Decode rows by checking if they are skipped | |||||
| * @param plane Buffer to store decoded bits | * @param plane Buffer to store decoded bits | ||||
| * @param[in] width Width of this buffer | * @param[in] width Width of this buffer | ||||
| * @param[in] height Height of this buffer | * @param[in] height Height of this buffer | ||||
| @@ -910,7 +910,7 @@ static void decode_rowskip(uint8_t* plane, int width, int height, int stride, Ge | |||||
| } | } | ||||
| } | } | ||||
| /** Decode columns by checking if they are skiped | |||||
| /** Decode columns by checking if they are skipped | |||||
| * @param plane Buffer to store decoded bits | * @param plane Buffer to store decoded bits | ||||
| * @param[in] width Width of this buffer | * @param[in] width Width of this buffer | ||||
| * @param[in] height Height of this buffer | * @param[in] height Height of this buffer | ||||
| @@ -1126,14 +1126,14 @@ static int decode_b_picture_primary_header(VC9Context *v) | |||||
| if (v->profile == PROFILE_SIMPLE) | if (v->profile == PROFILE_SIMPLE) | ||||
| { | { | ||||
| av_log(v->s.avctx, AV_LOG_ERROR, "Found a B frame while in Simple Profile!\n"); | av_log(v->s.avctx, AV_LOG_ERROR, "Found a B frame while in Simple Profile!\n"); | ||||
| return FRAME_SKIPED; | |||||
| return FRAME_SKIPPED; | |||||
| } | } | ||||
| v->bfraction = vc9_bfraction_lut[get_vlc2(gb, vc9_bfraction_vlc.table, | v->bfraction = vc9_bfraction_lut[get_vlc2(gb, vc9_bfraction_vlc.table, | ||||
| VC9_BFRACTION_VLC_BITS, 2)]; | VC9_BFRACTION_VLC_BITS, 2)]; | ||||
| if (v->bfraction < -1) | if (v->bfraction < -1) | ||||
| { | { | ||||
| av_log(v->s.avctx, AV_LOG_ERROR, "Invalid BFRaction\n"); | av_log(v->s.avctx, AV_LOG_ERROR, "Invalid BFRaction\n"); | ||||
| return FRAME_SKIPED; | |||||
| return FRAME_SKIPPED; | |||||
| } | } | ||||
| else if (!v->bfraction) | else if (!v->bfraction) | ||||
| { | { | ||||
| @@ -1474,7 +1474,7 @@ static int standard_decode_picture_primary_header(VC9Context *v) | |||||
| case B_TYPE: status = decode_b_picture_primary_header(v); break; | case B_TYPE: status = decode_b_picture_primary_header(v); break; | ||||
| } | } | ||||
| if (status == FRAME_SKIPED) | |||||
| if (status == FRAME_SKIPPED) | |||||
| { | { | ||||
| av_log(v->s.avctx, AV_LOG_INFO, "Skipping frame...\n"); | av_log(v->s.avctx, AV_LOG_INFO, "Skipping frame...\n"); | ||||
| return status; | return status; | ||||
| @@ -1499,7 +1499,7 @@ static int standard_decode_picture_secondary_header(VC9Context *v) | |||||
| case BI_TYPE: | case BI_TYPE: | ||||
| case I_TYPE: break; //Nothing needed as it's done in the epilog | case I_TYPE: break; //Nothing needed as it's done in the epilog | ||||
| } | } | ||||
| if (status < 0) return FRAME_SKIPED; | |||||
| if (status < 0) return FRAME_SKIPPED; | |||||
| /* AC Syntax */ | /* AC Syntax */ | ||||
| v->c_ac_table_index = decode012(gb); | v->c_ac_table_index = decode012(gb); | ||||
| @@ -1540,7 +1540,7 @@ static int advanced_decode_picture_primary_header(VC9Context *v) | |||||
| } | } | ||||
| type = get_prefix(gb, 0, 4); | type = get_prefix(gb, 0, 4); | ||||
| if (type > 4 || type < 0) return FRAME_SKIPED; | |||||
| if (type > 4 || type < 0) return FRAME_SKIPPED; | |||||
| v->s.pict_type = type_table[type]; | v->s.pict_type = type_table[type]; | ||||
| av_log(v->s.avctx, AV_LOG_INFO, "AP Frame Type: %i\n", v->s.pict_type); | av_log(v->s.avctx, AV_LOG_INFO, "AP Frame Type: %i\n", v->s.pict_type); | ||||
| @@ -1578,7 +1578,7 @@ static int advanced_decode_picture_primary_header(VC9Context *v) | |||||
| case I_TYPE: if (decode_i_picture_primary_header(v) < 0) return -1; | case I_TYPE: if (decode_i_picture_primary_header(v) < 0) return -1; | ||||
| case P_TYPE: if (decode_p_picture_primary_header(v) < 0) return -1; | case P_TYPE: if (decode_p_picture_primary_header(v) < 0) return -1; | ||||
| case BI_TYPE: | case BI_TYPE: | ||||
| case B_TYPE: if (decode_b_picture_primary_header(v) < 0) return FRAME_SKIPED; | |||||
| case B_TYPE: if (decode_b_picture_primary_header(v) < 0) return FRAME_SKIPPED; | |||||
| default: return -1; | default: return -1; | ||||
| } | } | ||||
| } | } | ||||
| @@ -1599,7 +1599,7 @@ static int advanced_decode_picture_secondary_header(VC9Context *v) | |||||
| case BI_TYPE: | case BI_TYPE: | ||||
| case I_TYPE: status = decode_i_picture_secondary_header(v); break; | case I_TYPE: status = decode_i_picture_secondary_header(v); break; | ||||
| } | } | ||||
| if (status<0) return FRAME_SKIPED; | |||||
| if (status<0) return FRAME_SKIPPED; | |||||
| /* AC Syntax */ | /* AC Syntax */ | ||||
| v->c_ac_table_index = decode012(gb); | v->c_ac_table_index = decode012(gb); | ||||
| @@ -2367,7 +2367,7 @@ static int vc9_decode_frame(AVCodecContext *avctx, | |||||
| { | { | ||||
| VC9Context *v = avctx->priv_data; | VC9Context *v = avctx->priv_data; | ||||
| MpegEncContext *s = &v->s; | MpegEncContext *s = &v->s; | ||||
| int ret = FRAME_SKIPED, len; | |||||
| int ret = FRAME_SKIPPED, len; | |||||
| AVFrame *pict = data; | AVFrame *pict = data; | ||||
| uint8_t *tmp_buf; | uint8_t *tmp_buf; | ||||
| v->s.avctx = avctx; | v->s.avctx = avctx; | ||||
| @@ -2470,7 +2470,7 @@ static int vc9_decode_frame(AVCodecContext *avctx, | |||||
| else | else | ||||
| #endif | #endif | ||||
| ret= standard_decode_picture_primary_header(v); | ret= standard_decode_picture_primary_header(v); | ||||
| if (ret == FRAME_SKIPED) return buf_size; | |||||
| if (ret == FRAME_SKIPPED) return buf_size; | |||||
| /* skip if the header was thrashed */ | /* skip if the header was thrashed */ | ||||
| if (ret < 0){ | if (ret < 0){ | ||||
| av_log(s->avctx, AV_LOG_ERROR, "header damaged\n"); | av_log(s->avctx, AV_LOG_ERROR, "header damaged\n"); | ||||
| @@ -2518,7 +2518,7 @@ static int vc9_decode_frame(AVCodecContext *avctx, | |||||
| else | else | ||||
| #endif | #endif | ||||
| ret = standard_decode_picture_secondary_header(v); | ret = standard_decode_picture_secondary_header(v); | ||||
| if (ret<0) return FRAME_SKIPED; //FIXME Non fatal for now | |||||
| if (ret<0) return FRAME_SKIPPED; //FIXME Non fatal for now | |||||
| //We consider the image coded in only one slice | //We consider the image coded in only one slice | ||||
| #if HAS_ADVANCED_PROFILE | #if HAS_ADVANCED_PROFILE | ||||
| @@ -2530,15 +2530,15 @@ static int vc9_decode_frame(AVCodecContext *avctx, | |||||
| case P_TYPE: ret = decode_p_mbs(v); break; | case P_TYPE: ret = decode_p_mbs(v); break; | ||||
| case B_TYPE: | case B_TYPE: | ||||
| case BI_TYPE: ret = decode_b_mbs(v); break; | case BI_TYPE: ret = decode_b_mbs(v); break; | ||||
| default: ret = FRAME_SKIPED; | |||||
| default: ret = FRAME_SKIPPED; | |||||
| } | } | ||||
| if (ret == FRAME_SKIPED) return buf_size; //We ignore for now failures | |||||
| if (ret == FRAME_SKIPPED) return buf_size; //We ignore for now failures | |||||
| } | } | ||||
| else | else | ||||
| #endif | #endif | ||||
| { | { | ||||
| ret = standard_decode_mbs(v); | ret = standard_decode_mbs(v); | ||||
| if (ret == FRAME_SKIPED) return buf_size; | |||||
| if (ret == FRAME_SKIPPED) return buf_size; | |||||
| } | } | ||||
| ff_er_frame_end(s); | ff_er_frame_end(s); | ||||
| @@ -474,7 +474,7 @@ s->picture_number++; //FIXME ? | |||||
| // return wmv2_decode_j_picture(w); //FIXME | // return wmv2_decode_j_picture(w); //FIXME | ||||
| if(w->j_type){ | if(w->j_type){ | ||||
| av_log(s->avctx, AV_LOG_ERROR, "J-type picture isnt supported\n"); | |||||
| av_log(s->avctx, AV_LOG_ERROR, "J-type picture is not supported\n"); | |||||
| return -1; | return -1; | ||||
| } | } | ||||
| @@ -723,7 +723,7 @@ static int wmv2_decode_mb(MpegEncContext *s, DCTELEM block[6][64]) | |||||
| s->mv_type = MV_TYPE_16X16; | s->mv_type = MV_TYPE_16X16; | ||||
| s->mv[0][0][0] = 0; | s->mv[0][0][0] = 0; | ||||
| s->mv[0][0][1] = 0; | s->mv[0][0][1] = 0; | ||||
| s->mb_skiped = 1; | |||||
| s->mb_skipped = 1; | |||||
| w->hshift=0; | w->hshift=0; | ||||
| return 0; | return 0; | ||||
| } | } | ||||
| @@ -148,7 +148,7 @@ const int mb_xy = s->mb_y * s->mb_stride + s->mb_x; | |||||
| } | } | ||||
| //MC doesn't skip blocks | //MC doesn't skip blocks | ||||
| s->mb_skiped = 0; | |||||
| s->mb_skipped = 0; | |||||
| // do I need to export quant when I could not perform postprocessing? | // do I need to export quant when I could not perform postprocessing? | ||||
| @@ -579,7 +579,7 @@ static int asf_read_packet(AVFormatContext *s, AVPacket *pkt) | |||||
| asf_st->frag_offset = 0; | asf_st->frag_offset = 0; | ||||
| if (asf->packet_frag_offset != 0) { | if (asf->packet_frag_offset != 0) { | ||||
| url_fskip(pb, asf->packet_frag_size); | url_fskip(pb, asf->packet_frag_size); | ||||
| av_log(s, AV_LOG_INFO, "ff asf parser skiping %db\n", asf->packet_frag_size); | |||||
| av_log(s, AV_LOG_INFO, "ff asf parser skipping %db\n", asf->packet_frag_size); | |||||
| asf->packet_size_left -= asf->packet_frag_size; | asf->packet_size_left -= asf->packet_frag_size; | ||||
| continue; | continue; | ||||
| } | } | ||||
| @@ -102,7 +102,7 @@ static int dc1394_read_header(AVFormatContext *c, AVFormatParameters * ap) | |||||
| /* Now lets prep the hardware */ | /* Now lets prep the hardware */ | ||||
| dc1394->handle = dc1394_create_handle(0); /* FIXME: gotta have ap->port */ | dc1394->handle = dc1394_create_handle(0); /* FIXME: gotta have ap->port */ | ||||
| if (!dc1394->handle) { | if (!dc1394->handle) { | ||||
| av_log(c, AV_LOG_ERROR, "Can't aquire dc1394 handle on port %d\n", 0 /* ap->port */); | |||||
| av_log(c, AV_LOG_ERROR, "Can't acquire dc1394 handle on port %d\n", 0 /* ap->port */); | |||||
| goto out; | goto out; | ||||
| } | } | ||||
| camera_nodes = dc1394_get_camera_nodes(dc1394->handle, &res, 1); | camera_nodes = dc1394_get_camera_nodes(dc1394->handle, &res, 1); | ||||
| @@ -931,7 +931,7 @@ static int decode_main_header(NUTContext *nut){ | |||||
| } | } | ||||
| if(check_checksum(bc)){ | if(check_checksum(bc)){ | ||||
| av_log(s, AV_LOG_ERROR, "Main header checksum missmatch\n"); | |||||
| av_log(s, AV_LOG_ERROR, "Main header checksum mismatch\n"); | |||||
| return -1; | return -1; | ||||
| } | } | ||||
| @@ -1012,7 +1012,7 @@ static int decode_stream_header(NUTContext *nut){ | |||||
| st->codec.channels = get_v(bc); | st->codec.channels = get_v(bc); | ||||
| } | } | ||||
| if(check_checksum(bc)){ | if(check_checksum(bc)){ | ||||
| av_log(s, AV_LOG_ERROR, "Stream header %d checksum missmatch\n", stream_id); | |||||
| av_log(s, AV_LOG_ERROR, "Stream header %d checksum mismatch\n", stream_id); | |||||
| return -1; | return -1; | ||||
| } | } | ||||
| av_set_pts_info(s->streams[stream_id], 60, denom, nom); | av_set_pts_info(s->streams[stream_id], 60, denom, nom); | ||||
| @@ -1067,7 +1067,7 @@ static int decode_info_header(NUTContext *nut){ | |||||
| } | } | ||||
| } | } | ||||
| if(check_checksum(bc)){ | if(check_checksum(bc)){ | ||||
| av_log(s, AV_LOG_ERROR, "Info header checksum missmatch\n"); | |||||
| av_log(s, AV_LOG_ERROR, "Info header checksum mismatch\n"); | |||||
| return -1; | return -1; | ||||
| } | } | ||||
| return 0; | return 0; | ||||