This fixes build failures with -DDEBUG in CPPFLAGS.tags/n0.9
@@ -620,8 +620,8 @@ retry: | |||||
} | } | ||||
MPV_frame_end(s); | MPV_frame_end(s); | ||||
assert(s->current_picture.pict_type == s->current_picture_ptr->pict_type); | |||||
assert(s->current_picture.pict_type == s->pict_type); | |||||
assert(s->current_picture.f.pict_type == s->current_picture_ptr->f.pict_type); | |||||
assert(s->current_picture.f.pict_type == s->pict_type); | |||||
*pict= *(AVFrame*)s->current_picture_ptr; | *pict= *(AVFrame*)s->current_picture_ptr; | ||||
ff_print_debug_info(s, pict); | ff_print_debug_info(s, pict); | ||||
@@ -2722,7 +2722,7 @@ static int decode_slice_header(H264Context *h, H264Context *h0){ | |||||
if (s0->first_field) { | if (s0->first_field) { | ||||
assert(s0->current_picture_ptr); | assert(s0->current_picture_ptr); | ||||
assert(s0->current_picture_ptr->f.data[0]); | assert(s0->current_picture_ptr->f.data[0]); | ||||
assert(s0->current_picture_ptr->reference != DELAYED_PIC_REF); | |||||
assert(s0->current_picture_ptr->f.reference != DELAYED_PIC_REF); | |||||
/* figure out if we have a complementary field pair */ | /* figure out if we have a complementary field pair */ | ||||
if (!FIELD_PICTURE || s->picture_structure == last_pic_structure) { | if (!FIELD_PICTURE || s->picture_structure == last_pic_structure) { | ||||
@@ -172,7 +172,7 @@ static void pred_spatial_direct_motion(H264Context * const h, int *mb_type){ | |||||
int mv[2]; | int mv[2]; | ||||
int list; | int list; | ||||
assert(h->ref_list[1][0].reference&3); | |||||
assert(h->ref_list[1][0].f.reference & 3); | |||||
await_reference_mb_row(h, &h->ref_list[1][0], s->mb_y + !!IS_INTERLACED(*mb_type)); | await_reference_mb_row(h, &h->ref_list[1][0], s->mb_y + !!IS_INTERLACED(*mb_type)); | ||||
@@ -416,7 +416,7 @@ static void pred_temp_direct_motion(H264Context * const h, int *mb_type){ | |||||
unsigned int sub_mb_type; | unsigned int sub_mb_type; | ||||
int i8, i4; | int i8, i4; | ||||
assert(h->ref_list[1][0].reference&3); | |||||
assert(h->ref_list[1][0].f.reference & 3); | |||||
await_reference_mb_row(h, &h->ref_list[1][0], s->mb_y + !!IS_INTERLACED(*mb_type)); | await_reference_mb_row(h, &h->ref_list[1][0], s->mb_y + !!IS_INTERLACED(*mb_type)); | ||||
@@ -225,7 +225,7 @@ static int alloc_frame_buffer(MpegEncContext *s, Picture *pic) | |||||
int r; | int r; | ||||
if (s->avctx->hwaccel) { | if (s->avctx->hwaccel) { | ||||
assert(!pic->hwaccel_picture_private); | |||||
assert(!pic->f.hwaccel_picture_private); | |||||
if (s->avctx->hwaccel->priv_data_size) { | if (s->avctx->hwaccel->priv_data_size) { | ||||
pic->f.hwaccel_picture_private = av_mallocz(s->avctx->hwaccel->priv_data_size); | pic->f.hwaccel_picture_private = av_mallocz(s->avctx->hwaccel->priv_data_size); | ||||
if (!pic->f.hwaccel_picture_private) { | if (!pic->f.hwaccel_picture_private) { | ||||
@@ -276,7 +276,7 @@ int ff_alloc_picture(MpegEncContext *s, Picture *pic, int shared){ | |||||
if(shared){ | if(shared){ | ||||
assert(pic->f.data[0]); | assert(pic->f.data[0]); | ||||
assert(pic->type == 0 || pic->type == FF_BUFFER_TYPE_SHARED); | |||||
assert(pic->f.type == 0 || pic->f.type == FF_BUFFER_TYPE_SHARED); | |||||
pic->f.type = FF_BUFFER_TYPE_SHARED; | pic->f.type = FF_BUFFER_TYPE_SHARED; | ||||
}else{ | }else{ | ||||
assert(!pic->f.data[0]); | assert(!pic->f.data[0]); | ||||
@@ -1093,8 +1093,8 @@ static int select_input_picture(MpegEncContext *s){ | |||||
s->input_picture[0]->f.data[i] = NULL; | s->input_picture[0]->f.data[i] = NULL; | ||||
s->input_picture[0]->f.type = 0; | s->input_picture[0]->f.type = 0; | ||||
}else{ | }else{ | ||||
assert( s->input_picture[0]->type==FF_BUFFER_TYPE_USER | |||||
|| s->input_picture[0]->type==FF_BUFFER_TYPE_INTERNAL); | |||||
assert( s->input_picture[0]->f.type == FF_BUFFER_TYPE_USER | |||||
|| s->input_picture[0]->f.type == FF_BUFFER_TYPE_INTERNAL); | |||||
s->avctx->release_buffer(s->avctx, (AVFrame*)s->input_picture[0]); | s->avctx->release_buffer(s->avctx, (AVFrame*)s->input_picture[0]); | ||||
} | } | ||||
@@ -1220,8 +1220,8 @@ no_output_pic: | |||||
}else{ | }else{ | ||||
// input is not a shared pix -> reuse buffer for current_pix | // input is not a shared pix -> reuse buffer for current_pix | ||||
assert( s->reordered_input_picture[0]->type==FF_BUFFER_TYPE_USER | |||||
|| s->reordered_input_picture[0]->type==FF_BUFFER_TYPE_INTERNAL); | |||||
assert( s->reordered_input_picture[0]->f.type == FF_BUFFER_TYPE_USER | |||||
|| s->reordered_input_picture[0]->f.type == FF_BUFFER_TYPE_INTERNAL); | |||||
s->current_picture_ptr= s->reordered_input_picture[0]; | s->current_picture_ptr= s->reordered_input_picture[0]; | ||||
for(i=0; i<4; i++){ | for(i=0; i<4; i++){ | ||||
@@ -2757,7 +2757,7 @@ static int estimate_qp(MpegEncContext *s, int dry_run){ | |||||
/* must be called before writing the header */ | /* must be called before writing the header */ | ||||
static void set_frame_distances(MpegEncContext * s){ | static void set_frame_distances(MpegEncContext * s){ | ||||
assert(s->current_picture_ptr->pts != AV_NOPTS_VALUE); | |||||
assert(s->current_picture_ptr->f.pts != AV_NOPTS_VALUE); | |||||
s->time = s->current_picture_ptr->f.pts * s->avctx->time_base.num; | s->time = s->current_picture_ptr->f.pts * s->avctx->time_base.num; | ||||
if(s->pict_type==AV_PICTURE_TYPE_B){ | if(s->pict_type==AV_PICTURE_TYPE_B){ | ||||