| @@ -323,9 +323,10 @@ static int request_frame(AVFilterLink *link) | |||
| switch (link->type) { | |||
| case AVMEDIA_TYPE_VIDEO: | |||
| ff_start_frame(link, avfilter_ref_buffer(buf, ~0)); | |||
| ff_draw_slice(link, 0, link->h, 1); | |||
| ff_end_frame(link); | |||
| if ((ret = ff_start_frame(link, avfilter_ref_buffer(buf, ~0))) < 0 || | |||
| (ret = ff_draw_slice(link, 0, link->h, 1)) < 0 || | |||
| (ret = ff_end_frame(link)) < 0) | |||
| goto fail; | |||
| break; | |||
| case AVMEDIA_TYPE_AUDIO: | |||
| ret = ff_filter_samples(link, avfilter_ref_buffer(buf, ~0)); | |||
| @@ -334,6 +335,7 @@ static int request_frame(AVFilterLink *link) | |||
| return AVERROR(EINVAL); | |||
| } | |||
| fail: | |||
| avfilter_unref_buffer(buf); | |||
| return ret; | |||
| @@ -241,9 +241,11 @@ static int request_frame(AVFilterLink *outlink) | |||
| * so we don't have to worry about dereferencing it ourselves. */ | |||
| switch (outlink->type) { | |||
| case AVMEDIA_TYPE_VIDEO: | |||
| ff_start_frame(outlink, fifo->root.next->buf); | |||
| ff_draw_slice (outlink, 0, outlink->h, 1); | |||
| ff_end_frame (outlink); | |||
| if ((ret = ff_start_frame(outlink, fifo->root.next->buf)) < 0 || | |||
| (ret = ff_draw_slice(outlink, 0, outlink->h, 1)) < 0 || | |||
| (ret = ff_end_frame(outlink)) < 0) | |||
| return ret; | |||
| queue_pop(fifo); | |||
| break; | |||
| case AVMEDIA_TYPE_AUDIO: | |||
| @@ -143,9 +143,11 @@ static int request_frame(AVFilterLink *outlink) | |||
| buf->pts = av_rescale_q(s->first_pts, ctx->inputs[0]->time_base, | |||
| outlink->time_base) + s->frames_out; | |||
| ff_start_frame(outlink, buf); | |||
| ff_draw_slice(outlink, 0, outlink->h, 1); | |||
| ff_end_frame(outlink); | |||
| if ((ret = ff_start_frame(outlink, buf)) < 0 || | |||
| (ret = ff_draw_slice(outlink, 0, outlink->h, 1)) < 0 || | |||
| (ret = ff_end_frame(outlink)) < 0) | |||
| return ret; | |||
| s->frames_out++; | |||
| } | |||
| return 0; | |||
| @@ -231,9 +233,13 @@ static int end_frame(AVFilterLink *inlink) | |||
| buf_out->pts = av_rescale_q(s->first_pts, inlink->time_base, | |||
| outlink->time_base) + s->frames_out; | |||
| ff_start_frame(outlink, buf_out); | |||
| ff_draw_slice(outlink, 0, outlink->h, 1); | |||
| ff_end_frame(outlink); | |||
| if ((ret = ff_start_frame(outlink, buf_out)) < 0 || | |||
| (ret = ff_draw_slice(outlink, 0, outlink->h, 1)) < 0 || | |||
| (ret = ff_end_frame(outlink)) < 0) { | |||
| avfilter_unref_bufferp(&buf); | |||
| return ret; | |||
| } | |||
| s->frames_out++; | |||
| } | |||
| flush_fifo(s->fifo); | |||
| @@ -438,18 +438,28 @@ static int source_request_frame(AVFilterLink *outlink) | |||
| { | |||
| Frei0rContext *frei0r = outlink->src->priv; | |||
| AVFilterBufferRef *picref = ff_get_video_buffer(outlink, AV_PERM_WRITE, outlink->w, outlink->h); | |||
| int ret; | |||
| picref->video->pixel_aspect = (AVRational) {1, 1}; | |||
| picref->pts = frei0r->pts++; | |||
| picref->pos = -1; | |||
| ff_start_frame(outlink, avfilter_ref_buffer(picref, ~0)); | |||
| ret = ff_start_frame(outlink, avfilter_ref_buffer(picref, ~0)); | |||
| if (ret < 0) | |||
| goto fail; | |||
| frei0r->update(frei0r->instance, av_rescale_q(picref->pts, frei0r->time_base, (AVRational){1,1000}), | |||
| NULL, (uint32_t *)picref->data[0]); | |||
| ff_draw_slice(outlink, 0, outlink->h, 1); | |||
| ff_end_frame(outlink); | |||
| ret = ff_draw_slice(outlink, 0, outlink->h, 1); | |||
| if (ret < 0) | |||
| goto fail; | |||
| ret = ff_end_frame(outlink); | |||
| fail: | |||
| avfilter_unref_buffer(picref); | |||
| return 0; | |||
| return ret; | |||
| } | |||
| AVFilter avfilter_vsrc_frei0r_src = { | |||
| @@ -279,12 +279,15 @@ static int request_frame(AVFilterLink *outlink) | |||
| if (av_fifo_size(select->pending_frames)) { | |||
| AVFilterBufferRef *picref; | |||
| int ret; | |||
| av_fifo_generic_read(select->pending_frames, &picref, sizeof(picref), NULL); | |||
| ff_start_frame(outlink, avfilter_ref_buffer(picref, ~0)); | |||
| ff_draw_slice(outlink, 0, outlink->h, 1); | |||
| ff_end_frame(outlink); | |||
| if ((ret = ff_start_frame(outlink, avfilter_ref_buffer(picref, ~0))) < 0 || | |||
| (ret = ff_draw_slice(outlink, 0, outlink->h, 1)) < 0 || | |||
| (ret = ff_end_frame(outlink)) < 0); | |||
| avfilter_unref_buffer(picref); | |||
| return 0; | |||
| return ret; | |||
| } | |||
| while (!select->select) { | |||
| @@ -157,11 +157,11 @@ static AVFilterBufferRef *get_video_buffer(AVFilterLink *link, int perms, int w, | |||
| return picref; | |||
| } | |||
| static void return_frame(AVFilterContext *ctx, int is_second) | |||
| static int return_frame(AVFilterContext *ctx, int is_second) | |||
| { | |||
| YADIFContext *yadif = ctx->priv; | |||
| AVFilterLink *link= ctx->outputs[0]; | |||
| int tff; | |||
| int tff, ret; | |||
| if (yadif->parity == -1) { | |||
| tff = yadif->cur->video->interlaced ? | |||
| @@ -193,12 +193,16 @@ static void return_frame(AVFilterContext *ctx, int is_second) | |||
| } else { | |||
| yadif->out->pts = AV_NOPTS_VALUE; | |||
| } | |||
| ff_start_frame(ctx->outputs[0], yadif->out); | |||
| ret = ff_start_frame(ctx->outputs[0], yadif->out); | |||
| if (ret < 0) | |||
| return ret; | |||
| } | |||
| ff_draw_slice(ctx->outputs[0], 0, link->h, 1); | |||
| ff_end_frame(ctx->outputs[0]); | |||
| if ((ret = ff_draw_slice(ctx->outputs[0], 0, link->h, 1)) < 0 || | |||
| (ret = ff_end_frame(ctx->outputs[0])) < 0) | |||
| return ret; | |||
| yadif->frame_pending = (yadif->mode&1) && !is_second; | |||
| return 0; | |||
| } | |||
| static int start_frame(AVFilterLink *link, AVFilterBufferRef *picref) | |||
| @@ -142,19 +142,29 @@ static int color_request_frame(AVFilterLink *link) | |||
| { | |||
| ColorContext *color = link->src->priv; | |||
| AVFilterBufferRef *picref = ff_get_video_buffer(link, AV_PERM_WRITE, color->w, color->h); | |||
| int ret; | |||
| picref->video->pixel_aspect = (AVRational) {1, 1}; | |||
| picref->pts = color->pts++; | |||
| picref->pos = -1; | |||
| ff_start_frame(link, avfilter_ref_buffer(picref, ~0)); | |||
| ret = ff_start_frame(link, avfilter_ref_buffer(picref, ~0)); | |||
| if (ret < 0) | |||
| goto fail; | |||
| ff_draw_rectangle(picref->data, picref->linesize, | |||
| color->line, color->line_step, color->hsub, color->vsub, | |||
| 0, 0, color->w, color->h); | |||
| ff_draw_slice(link, 0, color->h, 1); | |||
| ff_end_frame(link); | |||
| ret = ff_draw_slice(link, 0, color->h, 1); | |||
| if (ret < 0) | |||
| goto fail; | |||
| ret = ff_end_frame(link); | |||
| fail: | |||
| avfilter_unref_buffer(picref); | |||
| return 0; | |||
| return ret; | |||
| } | |||
| AVFilter avfilter_vsrc_color = { | |||
| @@ -289,13 +289,20 @@ static int request_frame(AVFilterLink *outlink) | |||
| return ret; | |||
| outpicref = avfilter_ref_buffer(movie->picref, ~0); | |||
| ff_start_frame(outlink, outpicref); | |||
| ff_draw_slice(outlink, 0, outlink->h, 1); | |||
| ff_end_frame(outlink); | |||
| ret = ff_start_frame(outlink, outpicref); | |||
| if (ret < 0) | |||
| goto fail; | |||
| ret = ff_draw_slice(outlink, 0, outlink->h, 1); | |||
| if (ret < 0) | |||
| goto fail; | |||
| ret = ff_end_frame(outlink); | |||
| fail: | |||
| avfilter_unref_buffer(movie->picref); | |||
| movie->picref = NULL; | |||
| return 0; | |||
| return ret; | |||
| } | |||
| AVFilter avfilter_vsrc_movie = { | |||
| @@ -130,6 +130,7 @@ static int request_frame(AVFilterLink *outlink) | |||
| { | |||
| TestSourceContext *test = outlink->src->priv; | |||
| AVFilterBufferRef *picref; | |||
| int ret; | |||
| if (test->max_pts >= 0 && test->pts > test->max_pts) | |||
| return AVERROR_EOF; | |||
| @@ -143,9 +144,10 @@ static int request_frame(AVFilterLink *outlink) | |||
| test->nb_frame++; | |||
| test->fill_picture_fn(outlink->src, picref); | |||
| ff_start_frame(outlink, picref); | |||
| ff_draw_slice(outlink, 0, test->h, 1); | |||
| ff_end_frame(outlink); | |||
| if ((ret = ff_start_frame(outlink, picref)) < 0 || | |||
| (ret = ff_draw_slice(outlink, 0, test->h, 1)) < 0 || | |||
| (ret = ff_end_frame(outlink)) < 0) | |||
| return ret; | |||
| return 0; | |||
| } | |||