| @@ -454,7 +454,7 @@ static int decode_frame(AVCodecContext *avctx, | |||||
| } | } | ||||
| } | } | ||||
| *picture= *(AVFrame*)&a->picture; | |||||
| *picture = a->picture; | |||||
| *data_size = sizeof(AVPicture); | *data_size = sizeof(AVPicture); | ||||
| emms_c(); | emms_c(); | ||||
| @@ -149,7 +149,7 @@ avs_decode_frame(AVCodecContext * avctx, | |||||
| align_get_bits(&change_map); | align_get_bits(&change_map); | ||||
| } | } | ||||
| *picture = *(AVFrame *) & avs->picture; | |||||
| *picture = avs->picture; | |||||
| *data_size = sizeof(AVPicture); | *data_size = sizeof(AVPicture); | ||||
| return buf_size; | return buf_size; | ||||
| @@ -655,7 +655,7 @@ static int cavs_decode_frame(AVCodecContext * avctx,void *data, int *data_size, | |||||
| if (buf_size == 0) { | if (buf_size == 0) { | ||||
| if (!s->low_delay && h->DPB[0].f.data[0]) { | if (!s->low_delay && h->DPB[0].f.data[0]) { | ||||
| *data_size = sizeof(AVPicture); | *data_size = sizeof(AVPicture); | ||||
| *picture = *(AVFrame *) &h->DPB[0]; | |||||
| *picture = h->DPB[0].f; | |||||
| } | } | ||||
| return 0; | return 0; | ||||
| } | } | ||||
| @@ -691,12 +691,12 @@ static int cavs_decode_frame(AVCodecContext * avctx,void *data, int *data_size, | |||||
| *data_size = sizeof(AVPicture); | *data_size = sizeof(AVPicture); | ||||
| if(h->pic_type != AV_PICTURE_TYPE_B) { | if(h->pic_type != AV_PICTURE_TYPE_B) { | ||||
| if(h->DPB[1].f.data[0]) { | if(h->DPB[1].f.data[0]) { | ||||
| *picture = *(AVFrame *) &h->DPB[1]; | |||||
| *picture = h->DPB[1].f; | |||||
| } else { | } else { | ||||
| *data_size = 0; | *data_size = 0; | ||||
| } | } | ||||
| } else | } else | ||||
| *picture = *(AVFrame *) &h->picture; | |||||
| *picture = h->picture.f; | |||||
| break; | break; | ||||
| case EXT_START_CODE: | case EXT_START_CODE: | ||||
| //mpeg_decode_extension(avctx,buf_ptr, input_size); | //mpeg_decode_extension(avctx,buf_ptr, input_size); | ||||
| @@ -624,7 +624,8 @@ retry: | |||||
| assert(s->current_picture.f.pict_type == s->current_picture_ptr->f.pict_type); | assert(s->current_picture.f.pict_type == s->current_picture_ptr->f.pict_type); | ||||
| assert(s->current_picture.f.pict_type == s->pict_type); | assert(s->current_picture.f.pict_type == s->pict_type); | ||||
| *pict= *(AVFrame*)s->current_picture_ptr; | |||||
| *pict = s->current_picture_ptr->f; | |||||
| ff_print_debug_info(s, pict); | ff_print_debug_info(s, pict); | ||||
| *data_size = sizeof(AVFrame); | *data_size = sizeof(AVFrame); | ||||
| @@ -355,7 +355,7 @@ uint64_t time= rdtsc(); | |||||
| if (buf_size == 0) { | if (buf_size == 0) { | ||||
| /* special case for last picture */ | /* special case for last picture */ | ||||
| if (s->low_delay==0 && s->next_picture_ptr) { | if (s->low_delay==0 && s->next_picture_ptr) { | ||||
| *pict= *(AVFrame*)s->next_picture_ptr; | |||||
| *pict = s->next_picture_ptr->f; | |||||
| s->next_picture_ptr= NULL; | s->next_picture_ptr= NULL; | ||||
| *data_size = sizeof(AVFrame); | *data_size = sizeof(AVFrame); | ||||
| @@ -712,9 +712,9 @@ intrax8_decoded: | |||||
| assert(s->current_picture.f.pict_type == s->current_picture_ptr->f.pict_type); | assert(s->current_picture.f.pict_type == s->current_picture_ptr->f.pict_type); | ||||
| assert(s->current_picture.f.pict_type == s->pict_type); | assert(s->current_picture.f.pict_type == s->pict_type); | ||||
| if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) { | if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) { | ||||
| *pict= *(AVFrame*)s->current_picture_ptr; | |||||
| *pict = s->current_picture_ptr->f; | |||||
| } else if (s->last_picture_ptr != NULL) { | } else if (s->last_picture_ptr != NULL) { | ||||
| *pict= *(AVFrame*)s->last_picture_ptr; | |||||
| *pict = s->last_picture_ptr->f; | |||||
| } | } | ||||
| if(s->last_picture_ptr || s->low_delay){ | if(s->last_picture_ptr || s->low_delay){ | ||||
| @@ -361,26 +361,14 @@ static void await_references(H264Context *h){ | |||||
| nrefs[list]--; | nrefs[list]--; | ||||
| if(!FIELD_PICTURE && ref_field_picture){ // frame referencing two fields | if(!FIELD_PICTURE && ref_field_picture){ // frame referencing two fields | ||||
| ff_thread_await_progress(&ref_pic->f, | |||||
| FFMIN((row >> 1) - !(row & 1), | |||||
| pic_height - 1), | |||||
| 1); | |||||
| ff_thread_await_progress(&ref_pic->f, | |||||
| FFMIN((row >> 1), pic_height - 1), | |||||
| 0); | |||||
| ff_thread_await_progress(&ref_pic->f, FFMIN((row >> 1) - !(row & 1), pic_height - 1), 1); | |||||
| ff_thread_await_progress(&ref_pic->f, FFMIN((row >> 1), pic_height - 1), 0); | |||||
| }else if(FIELD_PICTURE && !ref_field_picture){ // field referencing one field of a frame | }else if(FIELD_PICTURE && !ref_field_picture){ // field referencing one field of a frame | ||||
| ff_thread_await_progress(&ref_pic->f, | |||||
| FFMIN(row * 2 + ref_field, | |||||
| pic_height - 1), | |||||
| 0); | |||||
| ff_thread_await_progress(&ref_pic->f, FFMIN(row * 2 + ref_field, pic_height - 1), 0); | |||||
| }else if(FIELD_PICTURE){ | }else if(FIELD_PICTURE){ | ||||
| ff_thread_await_progress(&ref_pic->f, | |||||
| FFMIN(row, pic_height - 1), | |||||
| ref_field); | |||||
| ff_thread_await_progress(&ref_pic->f, FFMIN(row, pic_height - 1), ref_field); | |||||
| }else{ | }else{ | ||||
| ff_thread_await_progress(&ref_pic->f, | |||||
| FFMIN(row, pic_height - 1), | |||||
| 0); | |||||
| ff_thread_await_progress(&ref_pic->f, FFMIN(row, pic_height - 1), 0); | |||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| @@ -4053,7 +4041,7 @@ static int decode_frame(AVCodecContext *avctx, | |||||
| if(out){ | if(out){ | ||||
| *data_size = sizeof(AVFrame); | *data_size = sizeof(AVFrame); | ||||
| *pict= *(AVFrame*)out; | |||||
| *pict = out->f; | |||||
| } | } | ||||
| return buf_index; | return buf_index; | ||||
| @@ -4087,7 +4075,7 @@ static int decode_frame(AVCodecContext *avctx, | |||||
| } else { | } else { | ||||
| *data_size = sizeof(AVFrame); | *data_size = sizeof(AVFrame); | ||||
| *pict = *(AVFrame*)h->next_output_pic; | |||||
| *pict = h->next_output_pic->f; | |||||
| } | } | ||||
| } | } | ||||
| @@ -154,8 +154,7 @@ static void await_reference_mb_row(H264Context * const h, Picture *ref, int mb_y | |||||
| //even if pixels aren't deblocked yet | //even if pixels aren't deblocked yet | ||||
| ff_thread_await_progress(&ref->f, | ff_thread_await_progress(&ref->f, | ||||
| FFMIN(16 * mb_y >> ref_field_picture, | |||||
| ref_height - 1), | |||||
| FFMIN(16 * mb_y >> ref_field_picture, ref_height - 1), | |||||
| ref_field_picture && ref_field); | ref_field_picture && ref_field); | ||||
| } | } | ||||
| @@ -191,7 +191,7 @@ static int ir2_decode_frame(AVCodecContext *avctx, | |||||
| s->picture.data[1], s->picture.linesize[1], ir2_luma_table); | s->picture.data[1], s->picture.linesize[1], ir2_luma_table); | ||||
| } | } | ||||
| *picture= *(AVFrame*)&s->picture; | |||||
| *picture = s->picture; | |||||
| *data_size = sizeof(AVPicture); | *data_size = sizeof(AVPicture); | ||||
| return buf_size; | return buf_size; | ||||
| @@ -1915,7 +1915,7 @@ static int slice_end(AVCodecContext *avctx, AVFrame *pict) | |||||
| ff_MPV_frame_end(s); | ff_MPV_frame_end(s); | ||||
| if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) { | if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) { | ||||
| *pict = *(AVFrame*)s->current_picture_ptr; | |||||
| *pict = s->current_picture_ptr->f; | |||||
| ff_print_debug_info(s, pict); | ff_print_debug_info(s, pict); | ||||
| } else { | } else { | ||||
| if (avctx->active_thread_type & FF_THREAD_FRAME) | if (avctx->active_thread_type & FF_THREAD_FRAME) | ||||
| @@ -1923,7 +1923,7 @@ static int slice_end(AVCodecContext *avctx, AVFrame *pict) | |||||
| /* latency of 1 frame for I- and P-frames */ | /* latency of 1 frame for I- and P-frames */ | ||||
| /* XXX: use another variable than picture_number */ | /* XXX: use another variable than picture_number */ | ||||
| if (s->last_picture_ptr != NULL) { | if (s->last_picture_ptr != NULL) { | ||||
| *pict = *(AVFrame*)s->last_picture_ptr; | |||||
| *pict = s->last_picture_ptr->f; | |||||
| ff_print_debug_info(s, pict); | ff_print_debug_info(s, pict); | ||||
| } | } | ||||
| } | } | ||||
| @@ -2203,7 +2203,7 @@ static int mpeg_decode_frame(AVCodecContext *avctx, | |||||
| if (buf_size == 0 || (buf_size == 4 && AV_RB32(buf) == SEQ_END_CODE)) { | if (buf_size == 0 || (buf_size == 4 && AV_RB32(buf) == SEQ_END_CODE)) { | ||||
| /* special case for last picture */ | /* special case for last picture */ | ||||
| if (s2->low_delay == 0 && s2->next_picture_ptr) { | if (s2->low_delay == 0 && s2->next_picture_ptr) { | ||||
| *picture = *(AVFrame*)s2->next_picture_ptr; | |||||
| *picture = s2->next_picture_ptr->f; | |||||
| s2->next_picture_ptr = NULL; | s2->next_picture_ptr = NULL; | ||||
| *data_size = sizeof(AVFrame); | *data_size = sizeof(AVFrame); | ||||
| @@ -180,7 +180,7 @@ static int pnm_decode_frame(AVCodecContext *avctx, void *data, | |||||
| } | } | ||||
| break; | break; | ||||
| } | } | ||||
| *picture = *(AVFrame*)&s->picture; | |||||
| *picture = s->picture; | |||||
| *data_size = sizeof(AVPicture); | *data_size = sizeof(AVPicture); | ||||
| return s->bytestream - s->bytestream_start; | return s->bytestream - s->bytestream_start; | ||||
| @@ -677,9 +677,9 @@ static int rv10_decode_frame(AVCodecContext *avctx, | |||||
| ff_MPV_frame_end(s); | ff_MPV_frame_end(s); | ||||
| if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) { | if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) { | ||||
| *pict= *(AVFrame*)s->current_picture_ptr; | |||||
| *pict = s->current_picture_ptr->f; | |||||
| } else if (s->last_picture_ptr != NULL) { | } else if (s->last_picture_ptr != NULL) { | ||||
| *pict= *(AVFrame*)s->last_picture_ptr; | |||||
| *pict = s->last_picture_ptr->f; | |||||
| } | } | ||||
| if(s->last_picture_ptr || s->low_delay){ | if(s->last_picture_ptr || s->low_delay){ | ||||
| @@ -1656,7 +1656,7 @@ int ff_rv34_decode_frame(AVCodecContext *avctx, | |||||
| if (buf_size == 0) { | if (buf_size == 0) { | ||||
| /* special case for last picture */ | /* special case for last picture */ | ||||
| if (s->low_delay==0 && s->next_picture_ptr) { | if (s->low_delay==0 && s->next_picture_ptr) { | ||||
| *pict = *(AVFrame*)s->next_picture_ptr; | |||||
| *pict = s->next_picture_ptr->f; | |||||
| s->next_picture_ptr = NULL; | s->next_picture_ptr = NULL; | ||||
| *data_size = sizeof(AVFrame); | *data_size = sizeof(AVFrame); | ||||
| @@ -1743,9 +1743,9 @@ int ff_rv34_decode_frame(AVCodecContext *avctx, | |||||
| ff_thread_report_progress(&s->current_picture_ptr->f, INT_MAX, 0); | ff_thread_report_progress(&s->current_picture_ptr->f, INT_MAX, 0); | ||||
| if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) { | if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) { | ||||
| *pict = *(AVFrame*)s->current_picture_ptr; | |||||
| *pict = s->current_picture_ptr->f; | |||||
| } else if (s->last_picture_ptr != NULL) { | } else if (s->last_picture_ptr != NULL) { | ||||
| *pict = *(AVFrame*)s->last_picture_ptr; | |||||
| *pict = s->last_picture_ptr->f; | |||||
| } | } | ||||
| if(s->last_picture_ptr || s->low_delay){ | if(s->last_picture_ptr || s->low_delay){ | ||||
| @@ -735,7 +735,7 @@ static int svq1_decode_frame(AVCodecContext *avctx, | |||||
| } | } | ||||
| } | } | ||||
| *pict = *(AVFrame*)&s->current_picture; | |||||
| *pict = s->current_picture.f; | |||||
| ff_MPV_frame_end(s); | ff_MPV_frame_end(s); | ||||
| @@ -956,7 +956,7 @@ static int svq3_decode_frame(AVCodecContext *avctx, | |||||
| /* special case for last picture */ | /* special case for last picture */ | ||||
| if (buf_size == 0) { | if (buf_size == 0) { | ||||
| if (s->next_picture_ptr && !s->low_delay) { | if (s->next_picture_ptr && !s->low_delay) { | ||||
| *(AVFrame *) data = *(AVFrame *) &s->next_picture; | |||||
| *(AVFrame *) data = s->next_picture.f; | |||||
| s->next_picture_ptr = NULL; | s->next_picture_ptr = NULL; | ||||
| *data_size = sizeof(AVFrame); | *data_size = sizeof(AVFrame); | ||||
| } | } | ||||
| @@ -1076,9 +1076,9 @@ static int svq3_decode_frame(AVCodecContext *avctx, | |||||
| ff_MPV_frame_end(s); | ff_MPV_frame_end(s); | ||||
| if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) { | if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) { | ||||
| *(AVFrame *) data = *(AVFrame *) &s->current_picture; | |||||
| *(AVFrame *) data = s->current_picture.f; | |||||
| } else { | } else { | ||||
| *(AVFrame *) data = *(AVFrame *) &s->last_picture; | |||||
| *(AVFrame *) data = s->last_picture.f; | |||||
| } | } | ||||
| /* Do not output the last pic after seeking. */ | /* Do not output the last pic after seeking. */ | ||||
| @@ -248,7 +248,7 @@ static int decode_frame(AVCodecContext *avctx, | |||||
| } | } | ||||
| } | } | ||||
| *picture= *(AVFrame*)&s->picture; | |||||
| *picture = s->picture; | |||||
| *data_size = sizeof(AVPicture); | *data_size = sizeof(AVPicture); | ||||
| return avpkt->size; | return avpkt->size; | ||||
| @@ -607,7 +607,7 @@ static int decode_frame(AVCodecContext *avctx, | |||||
| src += s->picture.linesize[0]; | src += s->picture.linesize[0]; | ||||
| } | } | ||||
| } | } | ||||
| *picture= *(AVFrame*)&s->picture; | |||||
| *picture = s->picture; | |||||
| *data_size = sizeof(AVPicture); | *data_size = sizeof(AVPicture); | ||||
| return buf_size; | return buf_size; | ||||
| @@ -5449,7 +5449,7 @@ static int vc1_decode_frame(AVCodecContext *avctx, void *data, | |||||
| if (buf_size == 0 || (buf_size == 4 && AV_RB32(buf) == VC1_CODE_ENDOFSEQ)) { | if (buf_size == 0 || (buf_size == 4 && AV_RB32(buf) == VC1_CODE_ENDOFSEQ)) { | ||||
| /* special case for last picture */ | /* special case for last picture */ | ||||
| if (s->low_delay == 0 && s->next_picture_ptr) { | if (s->low_delay == 0 && s->next_picture_ptr) { | ||||
| *pict = *(AVFrame*)s->next_picture_ptr; | |||||
| *pict = s->next_picture_ptr->f; | |||||
| s->next_picture_ptr = NULL; | s->next_picture_ptr = NULL; | ||||
| *data_size = sizeof(AVFrame); | *data_size = sizeof(AVFrame); | ||||
| @@ -5755,9 +5755,9 @@ image: | |||||
| *data_size = sizeof(AVFrame); | *data_size = sizeof(AVFrame); | ||||
| } else { | } else { | ||||
| if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) { | if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) { | ||||
| *pict = *(AVFrame*)s->current_picture_ptr; | |||||
| *pict = s->current_picture_ptr->f; | |||||
| } else if (s->last_picture_ptr != NULL) { | } else if (s->last_picture_ptr != NULL) { | ||||
| *pict = *(AVFrame*)s->last_picture_ptr; | |||||
| *pict = s->last_picture_ptr->f; | |||||
| } | } | ||||
| if (s->last_picture_ptr || s->low_delay) { | if (s->last_picture_ptr || s->low_delay) { | ||||
| *data_size = sizeof(AVFrame); | *data_size = sizeof(AVFrame); | ||||
| @@ -111,7 +111,7 @@ static int decode_frame(AVCodecContext *avctx, | |||||
| } | } | ||||
| } | } | ||||
| *picture= *(AVFrame*)&a->picture; | |||||
| *picture = a->picture; | |||||
| *data_size = sizeof(AVPicture); | *data_size = sizeof(AVPicture); | ||||
| return buf_size; | return buf_size; | ||||