Browse Source

Factorize definition of the output_filter defined in both ffplay.c and

ffmpeg.c.
Replace it with a more generic definition which can be shared.

Originally committed as revision 25453 to svn://svn.ffmpeg.org/ffmpeg/trunk
tags/n0.8
Stefano Sabatini 15 years ago
parent
commit
f7ead94c69
4 changed files with 58 additions and 75 deletions
  1. +41
    -0
      cmdutils.c
  2. +11
    -0
      cmdutils.h
  3. +3
    -47
      ffmpeg.c
  4. +3
    -28
      ffplay.c

+ 41
- 0
cmdutils.c View File

@@ -747,3 +747,44 @@ int64_t guess_correct_pts(PtsCorrectionContext *ctx, int64_t reordered_pts, int6

return pts;
}

#if CONFIG_AVFILTER

static int ffsink_init(AVFilterContext *ctx, const char *args, void *opaque)
{
FFSinkContext *priv = ctx->priv;

if (!opaque)
return AVERROR(EINVAL);
*priv = *(FFSinkContext *)opaque;

return 0;
}

static void null_end_frame(AVFilterLink *inlink) { }

static int ffsink_query_formats(AVFilterContext *ctx)
{
FFSinkContext *priv = ctx->priv;
enum PixelFormat pix_fmts[] = { priv->pix_fmt, PIX_FMT_NONE };

avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
return 0;
}

AVFilter ffsink = {
.name = "ffsink",
.priv_size = sizeof(FFSinkContext),
.init = ffsink_init,

.query_formats = ffsink_query_formats,

.inputs = (AVFilterPad[]) {{ .name = "default",
.type = AVMEDIA_TYPE_VIDEO,
.end_frame = null_end_frame,
.min_perms = AV_PERM_READ, },
{ .name = NULL }},
.outputs = (AVFilterPad[]) {{ .name = NULL }},
};

#endif /* CONFIG_AVFILTER */

+ 11
- 0
cmdutils.h View File

@@ -261,4 +261,15 @@ void init_pts_correction(PtsCorrectionContext *ctx);
*/
int64_t guess_correct_pts(PtsCorrectionContext *ctx, int64_t pts, int64_t dts);

#if CONFIG_AVFILTER
#include "libavfilter/avfilter.h"

typedef struct {
enum PixelFormat pix_fmt;
} FFSinkContext;

extern AVFilter ffsink;

#endif /* CONFIG_AVFILTER */

#endif /* FFMPEG_CMDUTILS_H */

+ 3
- 47
ffmpeg.c View File

@@ -339,34 +339,6 @@ static struct termios oldtty;
#endif

#if CONFIG_AVFILTER
typedef struct {
int pix_fmt;
} FilterOutPriv;


static int output_init(AVFilterContext *ctx, const char *args, void *opaque)
{
FilterOutPriv *priv = ctx->priv;

if(!opaque) return -1;

priv->pix_fmt = *((int *)opaque);

return 0;
}

static void output_end_frame(AVFilterLink *link)
{
}

static int output_query_formats(AVFilterContext *ctx)
{
FilterOutPriv *priv = ctx->priv;
enum PixelFormat pix_fmts[] = { priv->pix_fmt, PIX_FMT_NONE };

avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
return 0;
}

static int get_filtered_video_pic(AVFilterContext *ctx,
AVFilterBufferRef **picref, AVFrame *pic2,
@@ -391,29 +363,13 @@ static int get_filtered_video_pic(AVFilterContext *ctx,
return 1;
}

static AVFilter output_filter =
{
.name = "ffmpeg_output",

.priv_size = sizeof(FilterOutPriv),
.init = output_init,

.query_formats = output_query_formats,

.inputs = (AVFilterPad[]) {{ .name = "default",
.type = AVMEDIA_TYPE_VIDEO,
.end_frame = output_end_frame,
.min_perms = AV_PERM_READ, },
{ .name = NULL }},
.outputs = (AVFilterPad[]) {{ .name = NULL }},
};

static int configure_filters(AVInputStream *ist, AVOutputStream *ost)
{
AVFilterContext *last_filter, *filter;
/** filter graph containing all filters including input & output */
AVCodecContext *codec = ost->st->codec;
AVCodecContext *icodec = ist->st->codec;
FFSinkContext ffsink_ctx = { .pix_fmt = codec->pix_fmt };
char args[255];
int ret;

@@ -421,7 +377,7 @@ static int configure_filters(AVInputStream *ist, AVOutputStream *ost)

if ((ret = avfilter_open(&ist->input_video_filter, avfilter_get_by_name("buffer"), "src")) < 0)
return ret;
if ((ret = avfilter_open(&ist->output_video_filter, &output_filter, "out")) < 0)
if ((ret = avfilter_open(&ist->output_video_filter, &ffsink, "out")) < 0)
return ret;

snprintf(args, 255, "%d:%d:%d:%d:%d", ist->st->codec->width,
@@ -429,7 +385,7 @@ static int configure_filters(AVInputStream *ist, AVOutputStream *ost)
ist->st->time_base.num, ist->st->time_base.den);
if ((ret = avfilter_init_filter(ist->input_video_filter, args, NULL)) < 0)
return ret;
if ((ret = avfilter_init_filter(ist->output_video_filter, NULL, &codec->pix_fmt)) < 0)
if ((ret = avfilter_init_filter(ist->output_video_filter, NULL, &ffsink_ctx)) < 0)
return ret;

/* add input and output filters to the overall graph */


+ 3
- 28
ffplay.c View File

@@ -1779,18 +1779,6 @@ static AVFilter input_filter =
{ .name = NULL }},
};

static void output_end_frame(AVFilterLink *link)
{
}

static int output_query_formats(AVFilterContext *ctx)
{
enum PixelFormat pix_fmts[] = { PIX_FMT_YUV420P, PIX_FMT_NONE };

avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
return 0;
}

static int get_filtered_video_frame(AVFilterContext *ctx, AVFrame *frame,
int64_t *pts, AVRational *tb, int64_t *pos)
{
@@ -1812,20 +1800,6 @@ static int get_filtered_video_frame(AVFilterContext *ctx, AVFrame *frame,

return 1;
}

static AVFilter output_filter =
{
.name = "ffplay_output",

.query_formats = output_query_formats,

.inputs = (AVFilterPad[]) {{ .name = "default",
.type = AVMEDIA_TYPE_VIDEO,
.end_frame = output_end_frame,
.min_perms = AV_PERM_READ, },
{ .name = NULL }},
.outputs = (AVFilterPad[]) {{ .name = NULL }},
};
#endif /* CONFIG_AVFILTER */

static int video_thread(void *arg)
@@ -1839,16 +1813,17 @@ static int video_thread(void *arg)
#if CONFIG_AVFILTER
int64_t pos;
char sws_flags_str[128];
FFSinkContext ffsink_ctx = { .pix_fmt = PIX_FMT_YUV420P };
AVFilterContext *filt_src = NULL, *filt_out = NULL;
AVFilterGraph *graph = av_mallocz(sizeof(AVFilterGraph));
snprintf(sws_flags_str, sizeof(sws_flags_str), "flags=%d", sws_flags);
graph->scale_sws_opts = av_strdup(sws_flags_str);

if (avfilter_open(&filt_src, &input_filter, "src") < 0) goto the_end;
if (avfilter_open(&filt_out, &output_filter, "out") < 0) goto the_end;
if (avfilter_open(&filt_out, &ffsink , "out") < 0) goto the_end;

if(avfilter_init_filter(filt_src, NULL, is)) goto the_end;
if(avfilter_init_filter(filt_out, NULL, NULL)) goto the_end;
if(avfilter_init_filter(filt_out, NULL, &ffsink_ctx)) goto the_end;


if(vfilters) {


Loading…
Cancel
Save