1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-12-23 12:43:46 +02:00

Merge remote-tracking branch 'cigaes/master'

* cigaes/master:
  lavfi/dualinput: reimplement on top of framesync.
  lavfi: add an API to synchronize multiple video inputs.

Merged-by: Michael Niedermayer <michaelni@gmx.at>
This commit is contained in:
Michael Niedermayer 2013-09-23 15:04:05 +02:00
commit 3f3867ca27
9 changed files with 688 additions and 113 deletions

View File

@ -113,7 +113,7 @@ OBJS-$(CONFIG_ALPHAMERGE_FILTER) += vf_alphamerge.o
OBJS-$(CONFIG_BBOX_FILTER) += bbox.o vf_bbox.o
OBJS-$(CONFIG_BLACKDETECT_FILTER) += vf_blackdetect.o
OBJS-$(CONFIG_BLACKFRAME_FILTER) += vf_blackframe.o
OBJS-$(CONFIG_BLEND_FILTER) += vf_blend.o dualinput.o
OBJS-$(CONFIG_BLEND_FILTER) += vf_blend.o dualinput.o framesync.o
OBJS-$(CONFIG_BOXBLUR_FILTER) += vf_boxblur.o
OBJS-$(CONFIG_COLORBALANCE_FILTER) += vf_colorbalance.o
OBJS-$(CONFIG_COLORCHANNELMIXER_FILTER) += vf_colorchannelmixer.o
@ -141,7 +141,7 @@ OBJS-$(CONFIG_FPS_FILTER) += vf_fps.o
OBJS-$(CONFIG_FREI0R_FILTER) += vf_frei0r.o
OBJS-$(CONFIG_GEQ_FILTER) += vf_geq.o
OBJS-$(CONFIG_GRADFUN_FILTER) += vf_gradfun.o
OBJS-$(CONFIG_HALDCLUT_FILTER) += vf_lut3d.o dualinput.o
OBJS-$(CONFIG_HALDCLUT_FILTER) += vf_lut3d.o dualinput.o framesync.o
OBJS-$(CONFIG_HFLIP_FILTER) += vf_hflip.o
OBJS-$(CONFIG_HISTEQ_FILTER) += vf_histeq.o
OBJS-$(CONFIG_HISTOGRAM_FILTER) += vf_histogram.o
@ -165,7 +165,7 @@ OBJS-$(CONFIG_NOISE_FILTER) += vf_noise.o
OBJS-$(CONFIG_NULL_FILTER) += vf_null.o
OBJS-$(CONFIG_OCV_FILTER) += vf_libopencv.o
OBJS-$(CONFIG_OPENCL) += deshake_opencl.o unsharp_opencl.o
OBJS-$(CONFIG_OVERLAY_FILTER) += vf_overlay.o dualinput.o
OBJS-$(CONFIG_OVERLAY_FILTER) += vf_overlay.o dualinput.o framesync.o
OBJS-$(CONFIG_OWDENOISE_FILTER) += vf_owdenoise.o
OBJS-$(CONFIG_PAD_FILTER) += vf_pad.o
OBJS-$(CONFIG_PERMS_FILTER) += f_perms.o
@ -173,7 +173,7 @@ OBJS-$(CONFIG_PERSPECTIVE_FILTER) += vf_perspective.o
OBJS-$(CONFIG_PHASE_FILTER) += vf_phase.o
OBJS-$(CONFIG_PIXDESCTEST_FILTER) += vf_pixdesctest.o
OBJS-$(CONFIG_PP_FILTER) += vf_pp.o
OBJS-$(CONFIG_PSNR_FILTER) += vf_psnr.o dualinput.o
OBJS-$(CONFIG_PSNR_FILTER) += vf_psnr.o dualinput.o framesync.o
OBJS-$(CONFIG_PULLUP_FILTER) += vf_pullup.o
OBJS-$(CONFIG_REMOVELOGO_FILTER) += bbox.o lswsutils.o lavfutils.o vf_removelogo.o
OBJS-$(CONFIG_ROTATE_FILTER) += vf_rotate.o

View File

@ -16,144 +16,75 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#define MAIN 0
#define SECOND 1
#include "dualinput.h"
#include "libavutil/timestamp.h"
static int try_filter_frame(FFDualInputContext *s,
AVFilterContext *ctx, AVFrame *mainpic)
static int process_frame(FFFrameSync *fs)
{
int ret;
AVFilterContext *ctx = fs->parent;
FFDualInputContext *s = fs->opaque;
AVFrame *mainpic = NULL, *secondpic = NULL;
int ret = 0;
/* Discard obsolete second frames: if there is a next second frame with pts
* before the main frame, we can drop the current second. */
while (1) {
AVFrame *next_overpic = ff_bufqueue_peek(&s->queue[SECOND], 0);
if (!next_overpic && s->second_eof && !s->repeatlast) {
av_frame_free(&s->second_frame);
break;
}
if (!next_overpic || av_compare_ts(next_overpic->pts, ctx->inputs[SECOND]->time_base,
mainpic->pts, ctx->inputs[MAIN]->time_base) > 0)
break;
ff_bufqueue_get(&s->queue[SECOND]);
av_frame_free(&s->second_frame);
s->second_frame = next_overpic;
if ((ret = ff_framesync_get_frame(&s->fs, 0, &mainpic, 1)) < 0 ||
(ret = ff_framesync_get_frame(&s->fs, 1, &secondpic, 0)) < 0) {
av_frame_free(&mainpic);
return ret;
}
/* If there is no next frame and no EOF and the second frame is before
* the main frame, we can not know yet if it will be superseded. */
if (!s->queue[SECOND].available && !s->second_eof &&
(!s->second_frame || av_compare_ts(s->second_frame->pts, ctx->inputs[SECOND]->time_base,
mainpic->pts, ctx->inputs[MAIN]->time_base) < 0))
return AVERROR(EAGAIN);
/* At this point, we know that the current second frame extends to the
* time of the main frame. */
av_dlog(ctx, "main_pts:%s main_pts_time:%s",
av_ts2str(mainpic->pts), av_ts2timestr(mainpic->pts, &ctx->inputs[MAIN]->time_base));
if (s->second_frame)
av_dlog(ctx, " second_pts:%s second_pts_time:%s",
av_ts2str(s->second_frame->pts), av_ts2timestr(s->second_frame->pts, &ctx->inputs[SECOND]->time_base));
av_dlog(ctx, "\n");
if (s->second_frame && !ctx->is_disabled)
mainpic = s->process(ctx, mainpic, s->second_frame);
av_assert0(mainpic);
mainpic->pts = av_rescale_q(mainpic->pts, s->fs.time_base, ctx->outputs[0]->time_base);
if (secondpic && !ctx->is_disabled)
mainpic = s->process(ctx, mainpic, secondpic);
ret = ff_filter_frame(ctx->outputs[0], mainpic);
av_assert1(ret != AVERROR(EAGAIN));
s->frame_requested = 0;
return ret;
}
static int try_filter_next_frame(FFDualInputContext *s, AVFilterContext *ctx)
int ff_dualinput_init(AVFilterContext *ctx, FFDualInputContext *s)
{
AVFrame *next_mainpic = ff_bufqueue_peek(&s->queue[MAIN], 0);
int ret;
FFFrameSyncIn *in = s->fs.in;
if (!next_mainpic)
return AVERROR(EAGAIN);
if ((ret = try_filter_frame(s, ctx, next_mainpic)) == AVERROR(EAGAIN))
return ret;
ff_bufqueue_get(&s->queue[MAIN]);
return ret;
}
ff_framesync_init(&s->fs, ctx, 2);
s->fs.opaque = s;
s->fs.on_event = process_frame;
in[0].time_base = ctx->inputs[0]->time_base;
in[1].time_base = ctx->inputs[1]->time_base;
in[0].sync = 2;
in[0].before = EXT_STOP;
in[0].after = EXT_INFINITY;
in[1].sync = 1;
in[1].before = EXT_NULL;
in[1].after = EXT_INFINITY;
static int flush_frames(FFDualInputContext *s, AVFilterContext *ctx)
{
int ret;
if (s->shortest)
in[1].after = EXT_STOP;
if (!s->repeatlast) {
in[0].after = EXT_STOP;
in[1].sync = 0;
}
while (!(ret = try_filter_next_frame(s, ctx)));
return ret == AVERROR(EAGAIN) ? 0 : ret;
return ff_framesync_configure(&s->fs);
}
int ff_dualinput_filter_frame_main(FFDualInputContext *s,
AVFilterLink *inlink, AVFrame *in)
{
AVFilterContext *ctx = inlink->dst;
int ret;
if ((ret = flush_frames(s, ctx)) < 0)
return ret;
if ((ret = try_filter_frame(s, ctx, in)) < 0) {
if (ret != AVERROR(EAGAIN))
return ret;
ff_bufqueue_add(ctx, &s->queue[MAIN], in);
}
if (!s->second_frame)
return 0;
flush_frames(s, ctx);
return 0;
return ff_framesync_filter_frame(&s->fs, inlink, in);
}
int ff_dualinput_filter_frame_second(FFDualInputContext *s,
AVFilterLink *inlink, AVFrame *in)
{
AVFilterContext *ctx = inlink->dst;
int ret;
if ((ret = flush_frames(s, ctx)) < 0)
return ret;
ff_bufqueue_add(ctx, &s->queue[SECOND], in);
ret = try_filter_next_frame(s, ctx);
return ret == AVERROR(EAGAIN) ? 0 : ret;
return ff_framesync_filter_frame(&s->fs, inlink, in);
}
int ff_dualinput_request_frame(FFDualInputContext *s, AVFilterLink *outlink)
{
AVFilterContext *ctx = outlink->src;
int input, ret;
if (!try_filter_next_frame(s, ctx))
return 0;
s->frame_requested = 1;
while (s->frame_requested) {
/* TODO if we had a frame duration, we could guess more accurately */
input = !s->second_eof && (s->queue[MAIN].available ||
s->queue[SECOND].available < 2) ?
SECOND : MAIN;
ret = ff_request_frame(ctx->inputs[input]);
/* EOF on main is reported immediately */
if (ret == AVERROR_EOF && input == SECOND) {
s->second_eof = 1;
if (s->shortest)
return ret;
if ((ret = try_filter_next_frame(s, ctx)) != AVERROR(EAGAIN))
return ret;
ret = 0; /* continue requesting frames on main */
}
if (ret < 0)
return ret;
}
return 0;
return ff_framesync_request_frame(&s->fs, outlink);
}
void ff_dualinput_uninit(FFDualInputContext *s)
{
av_frame_free(&s->second_frame);
ff_bufqueue_discard_all(&s->queue[MAIN]);
ff_bufqueue_discard_all(&s->queue[SECOND]);
ff_framesync_uninit(&s->fs);
}

View File

@ -26,18 +26,20 @@
#include <stdint.h>
#include "bufferqueue.h"
#include "framesync.h"
#include "internal.h"
typedef struct {
FFFrameSync fs;
FFFrameSyncIn second_input; /* must be immediately after fs */
uint8_t frame_requested;
uint8_t second_eof;
AVFrame *second_frame;
struct FFBufQueue queue[2];
AVFrame *(*process)(AVFilterContext *ctx, AVFrame *main, const AVFrame *second);
int shortest; ///< terminate stream when the second input terminates
int repeatlast; ///< repeat last second frame
} FFDualInputContext;
int ff_dualinput_init(AVFilterContext *ctx, FFDualInputContext *s);
int ff_dualinput_filter_frame_main(FFDualInputContext *s, AVFilterLink *inlink, AVFrame *in);
int ff_dualinput_filter_frame_second(FFDualInputContext *s, AVFilterLink *inlink, AVFrame *in);
int ff_dualinput_request_frame(FFDualInputContext *s, AVFilterLink *outlink);

329
libavfilter/framesync.c Normal file
View File

@ -0,0 +1,329 @@
/*
* Copyright (c) 2013 Nicolas George
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with FFmpeg; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "libavutil/avassert.h"
#include "avfilter.h"
#include "bufferqueue.h"
#include "framesync.h"
#include "internal.h"
#define OFFSET(member) offsetof(FFFrameSync, member)
static const char *framesync_name(void *ptr)
{
return "framesync";
}
static const AVClass framesync_class = {
.version = LIBAVUTIL_VERSION_INT,
.class_name = "framesync",
.item_name = framesync_name,
.category = AV_CLASS_CATEGORY_FILTER,
.option = NULL,
.parent_log_context_offset = OFFSET(parent),
};
enum {
STATE_BOF,
STATE_RUN,
STATE_EOF,
};
void ff_framesync_init(FFFrameSync *fs, void *parent, unsigned nb_in)
{
fs->class = &framesync_class;
fs->parent = parent;
fs->nb_in = nb_in;
}
static void framesync_sync_level_update(FFFrameSync *fs)
{
unsigned i, level = 0;
for (i = 0; i < fs->nb_in; i++)
if (fs->in[i].state != STATE_EOF)
level = FFMAX(level, fs->in[i].sync);
av_assert0(level <= fs->sync_level);
if (level < fs->sync_level)
av_log(fs, AV_LOG_VERBOSE, "Sync level %u\n", level);
if (level)
fs->sync_level = level;
else
fs->eof = 1;
}
int ff_framesync_configure(FFFrameSync *fs)
{
unsigned i;
int64_t gcd, lcm;
if (!fs->time_base.num) {
for (i = 0; i < fs->nb_in; i++) {
if (fs->in[i].sync) {
if (fs->time_base.num) {
gcd = av_gcd(fs->time_base.den, fs->in[i].time_base.den);
lcm = (fs->time_base.den / gcd) * fs->in[i].time_base.den;
if (lcm < AV_TIME_BASE / 2) {
fs->time_base.den = lcm;
fs->time_base.num = av_gcd(fs->time_base.num,
fs->in[i].time_base.num);
} else {
fs->time_base.num = 1;
fs->time_base.den = AV_TIME_BASE;
break;
}
} else {
fs->time_base = fs->in[i].time_base;
}
}
}
if (!fs->time_base.num) {
av_log(fs, AV_LOG_ERROR, "Impossible to set time base\n");
return AVERROR(EINVAL);
}
av_log(fs, AV_LOG_VERBOSE, "Selected %d/%d time base\n",
fs->time_base.num, fs->time_base.den);
}
for (i = 0; i < fs->nb_in; i++)
fs->in[i].pts = fs->in[i].pts_next = AV_NOPTS_VALUE;
fs->sync_level = UINT_MAX;
framesync_sync_level_update(fs);
return 0;
}
static void framesync_advance(FFFrameSync *fs)
{
int latest;
unsigned i;
int64_t pts;
if (fs->eof)
return;
while (!fs->frame_ready) {
latest = -1;
for (i = 0; i < fs->nb_in; i++) {
if (!fs->in[i].have_next) {
if (latest < 0 || fs->in[i].pts < fs->in[latest].pts)
latest = i;
}
}
if (latest >= 0) {
fs->in_request = latest;
break;
}
pts = fs->in[0].pts_next;
for (i = 1; i < fs->nb_in; i++)
if (fs->in[i].pts_next < pts)
pts = fs->in[i].pts_next;
if (pts == INT64_MAX) {
fs->eof = 1;
break;
}
for (i = 0; i < fs->nb_in; i++) {
if (fs->in[i].pts_next == pts ||
(fs->in[i].before == EXT_INFINITY &&
fs->in[i].state == STATE_BOF)) {
av_frame_free(&fs->in[i].frame);
fs->in[i].frame = fs->in[i].frame_next;
fs->in[i].pts = fs->in[i].pts_next;
fs->in[i].frame_next = NULL;
fs->in[i].pts_next = AV_NOPTS_VALUE;
fs->in[i].have_next = 0;
fs->in[i].state = fs->in[i].frame ? STATE_RUN : STATE_EOF;
if (fs->in[i].sync == fs->sync_level && fs->in[i].frame)
fs->frame_ready = 1;
if (fs->in[i].state == STATE_EOF &&
fs->in[i].after == EXT_STOP)
fs->eof = 1;
}
}
if (fs->eof)
fs->frame_ready = 0;
if (fs->frame_ready)
for (i = 0; i < fs->nb_in; i++)
if ((fs->in[i].state == STATE_BOF &&
fs->in[i].before == EXT_STOP))
fs->frame_ready = 0;
fs->pts = pts;
}
}
static int64_t framesync_pts_extrapolate(FFFrameSync *fs, unsigned in,
int64_t pts)
{
/* Possible enhancement: use the link's frame rate */
return pts + 1;
}
static void framesync_inject_frame(FFFrameSync *fs, unsigned in, AVFrame *frame)
{
int64_t pts;
av_assert0(!fs->in[in].have_next);
if (frame) {
pts = av_rescale_q(frame->pts, fs->in[in].time_base, fs->time_base);
frame->pts = pts;
} else {
pts = fs->in[in].state != STATE_RUN || fs->in[in].after == EXT_INFINITY
? INT64_MAX : framesync_pts_extrapolate(fs, in, fs->in[in].pts);
fs->in[in].sync = 0;
framesync_sync_level_update(fs);
}
fs->in[in].frame_next = frame;
fs->in[in].pts_next = pts;
fs->in[in].have_next = 1;
}
int ff_framesync_add_frame(FFFrameSync *fs, unsigned in, AVFrame *frame)
{
av_assert1(in < fs->nb_in);
if (!fs->in[in].have_next)
framesync_inject_frame(fs, in, frame);
else
ff_bufqueue_add(fs, &fs->in[in].queue, frame);
return 0;
}
void ff_framesync_next(FFFrameSync *fs)
{
unsigned i;
av_assert0(!fs->frame_ready);
for (i = 0; i < fs->nb_in; i++)
if (!fs->in[i].have_next && fs->in[i].queue.available)
framesync_inject_frame(fs, i, ff_bufqueue_get(&fs->in[i].queue));
fs->frame_ready = 0;
framesync_advance(fs);
}
void ff_framesync_drop(FFFrameSync *fs)
{
fs->frame_ready = 0;
}
int ff_framesync_get_frame(FFFrameSync *fs, unsigned in, AVFrame **rframe,
unsigned get)
{
AVFrame *frame;
unsigned need_copy = 0, i;
int64_t pts_next;
int ret;
if (!fs->in[in].frame) {
*rframe = NULL;
return 0;
}
frame = fs->in[in].frame;
if (get) {
/* Find out if we need to copy the frame: is there another sync
stream, and do we know if its current frame will outlast this one? */
pts_next = fs->in[in].have_next ? fs->in[in].pts_next : INT64_MAX;
for (i = 0; i < fs->nb_in && !need_copy; i++)
if (i != in && fs->in[i].sync &&
(!fs->in[i].have_next || fs->in[i].pts_next < pts_next))
need_copy = 1;
if (need_copy) {
if (!(frame = av_frame_clone(frame)))
return AVERROR(ENOMEM);
if ((ret = av_frame_make_writable(frame)) < 0) {
av_frame_free(&frame);
return ret;
}
} else {
fs->in[in].frame = NULL;
}
fs->frame_ready = 0;
}
*rframe = frame;
return 0;
}
void ff_framesync_uninit(FFFrameSync *fs)
{
unsigned i;
for (i = 0; i < fs->nb_in; i++) {
av_frame_free(&fs->in[i].frame);
av_frame_free(&fs->in[i].frame_next);
ff_bufqueue_discard_all(&fs->in[i].queue);
}
}
int ff_framesync_process_frame(FFFrameSync *fs, unsigned all)
{
int ret, count = 0;
av_assert0(fs->on_event);
while (1) {
ff_framesync_next(fs);
if (fs->eof || !fs->frame_ready)
break;
if ((ret = fs->on_event(fs)) < 0)
return ret;
ff_framesync_drop(fs);
count++;
if (!all)
break;
}
if (!count && fs->eof)
return AVERROR_EOF;
return count;
}
int ff_framesync_filter_frame(FFFrameSync *fs, AVFilterLink *inlink,
AVFrame *in)
{
int ret;
if ((ret = ff_framesync_process_frame(fs, 1)) < 0)
return ret;
if ((ret = ff_framesync_add_frame(fs, FF_INLINK_IDX(inlink), in)) < 0)
return ret;
if ((ret = ff_framesync_process_frame(fs, 0)) < 0)
return ret;
return 0;
}
int ff_framesync_request_frame(FFFrameSync *fs, AVFilterLink *outlink)
{
AVFilterContext *ctx = outlink->src;
int input, ret;
if ((ret = ff_framesync_process_frame(fs, 0)) < 0)
return ret;
if (ret > 0)
return 0;
if (fs->eof)
return AVERROR_EOF;
outlink->flags |= FF_LINK_FLAG_REQUEST_LOOP;
input = fs->in_request;
ret = ff_request_frame(ctx->inputs[input]);
if (ret == AVERROR_EOF) {
if ((ret = ff_framesync_add_frame(fs, input, NULL)) < 0)
return ret;
if ((ret = ff_framesync_process_frame(fs, 0)) < 0)
return ret;
ret = 0;
}
return ret;
}

296
libavfilter/framesync.h Normal file
View File

@ -0,0 +1,296 @@
/*
* Copyright (c) 2013 Nicolas George
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with FFmpeg; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVFILTER_FRAMESYNC_H
#define AVFILTER_FRAMESYNC_H
#include "bufferqueue.h"
/*
* TODO
* Callback-based API similar to dualinput.
* Export convenient options.
*/
/**
* This API is intended as a helper for filters that have several video
* input and need to combine them somehow. If the inputs have different or
* variable frame rate, getting the input frames to match requires a rather
* complex logic and a few user-tunable options.
*
* In this API, when a set of synchronized input frames is ready to be
* procesed is called a frame event. Frame event can be generated in
* response to input frames on any or all inputs and the handling of
* situations where some stream extend beyond the beginning or the end of
* others can be configured.
*
* The basic working of this API is the following:
*
* - When a frame is available on any input, add it using
* ff_framesync_add_frame().
*
* - When a frame event is ready to be processed (i.e. after adding a frame
* or when requested on input):
* - call ff_framesync_next();
* - if fs->frame_ready is true, process the frames;
* - call ff_framesync_drop().
*/
/**
* Stream extrapolation mode
*
* Describe how the frames of a stream are extrapolated before the first one
* and after EOF to keep sync with possibly longer other streams.
*/
enum FFFrameSyncExtMode {
/**
* Completely stop all streams with this one.
*/
EXT_STOP,
/**
* Ignore this stream and continue processing the other ones.
*/
EXT_NULL,
/**
* Extend the frame to infinity.
*/
EXT_INFINITY,
};
/**
* Input stream structure
*/
typedef struct FFFrameSyncIn {
/**
* Queue of incoming AVFrame, and NULL to mark EOF
*/
struct FFBufQueue queue;
/**
* Extrapolation mode for timestamps before the first frame
*/
enum FFFrameSyncExtMode before;
/**
* Extrapolation mode for timestamps after the last frame
*/
enum FFFrameSyncExtMode after;
/**
* Time base for the incoming frames
*/
AVRational time_base;
/**
* Current frame, may be NULL before the first one or after EOF
*/
AVFrame *frame;
/**
* Next frame, for internal use
*/
AVFrame *frame_next;
/**
* PTS of the current frame
*/
int64_t pts;
/**
* PTS of the next frame, for internal use
*/
int64_t pts_next;
/**
* Boolean flagging the next frame, for internal use
*/
uint8_t have_next;
/**
* State: before first, in stream or after EOF, for internal use
*/
uint8_t state;
/**
* Synchronization level: frames on input at the highest sync level will
* generate output frame events.
*
* For example, if inputs #0 and #1 have sync level 2 and input #2 has
* sync level 1, then a frame on either input #0 or #1 will generate a
* frame event, but not a frame on input #2 until both inputs #0 and #1
* have reached EOF.
*
* If sync is 0, no frame event will be generated.
*/
unsigned sync;
} FFFrameSyncIn;
/**
* Frame sync structure.
*/
typedef struct FFFrameSync {
const AVClass *class;
void *parent;
/**
* Number of input streams
*/
unsigned nb_in;
/**
* Time base for the output events
*/
AVRational time_base;
/**
* Timestamp of the current event
*/
int64_t pts;
/**
* Callback called when a frame event is ready
*/
int (*on_event)(struct FFFrameSync *fs);
/**
* Opaque pointer, not used by the API
*/
void *opaque;
/**
* Index of the input that requires a request
*/
unsigned in_request;
/**
* Synchronization level: only inputs with the same sync level are sync
* sources.
*/
unsigned sync_level;
/**
* Flag indicating that a frame event is ready
*/
uint8_t frame_ready;
/**
* Flag indicating that output has reached EOF.
*/
uint8_t eof;
/**
* Array of inputs; all inputs must be in consecutive memory
*/
FFFrameSyncIn in[1]; /* must be the last field */
} FFFrameSync;
/**
* Initialize a frame sync structure.
*
* The entire structure is expected to be already set to 0.
*
* @param fs frame sync structure to initialize
* @param parent parent object, used for logging
* @param nb_in number of inputs
*/
void ff_framesync_init(FFFrameSync *fs, void *parent, unsigned nb_in);
/**
* Configure a frame sync structure.
*
* Must be called after all options are set but before all use.
*
* @return >= 0 for success or a negative error code
*/
int ff_framesync_configure(FFFrameSync *fs);
/**
* Free all memory currently allocated.
*/
void ff_framesync_uninit(FFFrameSync *fs);
/**
* Add a frame to an input
*
* Typically called from the filter_frame() method.
*
* @param fs frame sync structure
* @param in index of the input
* @param frame input frame, or NULL for EOF
*/
int ff_framesync_add_frame(FFFrameSync *fs, unsigned in, AVFrame *frame);
/**
* Prepare the next frame event.
*
* The status of the operation can be found in fs->frame_ready and fs->eof.
*/
void ff_framesync_next(FFFrameSync *fs);
/**
* Drop the current frame event.
*/
void ff_framesync_drop(FFFrameSync *fs);
/**
* Get the current frame in an input.
*
* @param fs frame sync structure
* @param in index of the input
* @param rframe used to return the current frame (or NULL)
* @param get if not zero, the calling code needs to get ownership of
* the returned frame; the current frame will either be
* duplicated or removed from the framesync structure
*/
int ff_framesync_get_frame(FFFrameSync *fs, unsigned in, AVFrame **rframe,
unsigned get);
/**
* Process one or several frame using the on_event callback.
*
* @return number of frames processed or negative error code
*/
int ff_framesync_process_frame(FFFrameSync *fs, unsigned all);
/**
* Accept a frame on a filter input.
*
* This function can be the complete implementation of all filter_frame
* methods of a filter using framesync.
*/
int ff_framesync_filter_frame(FFFrameSync *fs, AVFilterLink *inlink,
AVFrame *in);
/**
* Request a frame on the filter output.
*
* This function can be the complete implementation of all filter_frame
* methods of a filter using framesync if it has only one output.
*/
int ff_framesync_request_frame(FFFrameSync *fs, AVFilterLink *outlink);
#endif /* AVFILTER_FRAMESYNC_H */

View File

@ -368,6 +368,7 @@ static int config_output(AVFilterLink *outlink)
AVFilterLink *bottomlink = ctx->inputs[BOTTOM];
BlendContext *b = ctx->priv;
const AVPixFmtDescriptor *pix_desc = av_pix_fmt_desc_get(toplink->format);
int ret;
if (toplink->format != bottomlink->format) {
av_log(ctx, AV_LOG_ERROR, "inputs must be of same pixel format\n");
@ -399,6 +400,9 @@ static int config_output(AVFilterLink *outlink)
b->vsub = pix_desc->log2_chroma_h;
b->nb_planes = av_pix_fmt_count_planes(toplink->format);
if ((ret = ff_dualinput_init(ctx, &b->dinput)) < 0)
return ret;
return 0;
}

View File

@ -656,10 +656,14 @@ static void update_clut(LUT3DContext *lut3d, const AVFrame *frame)
static int config_output(AVFilterLink *outlink)
{
AVFilterContext *ctx = outlink->src;
LUT3DContext *lut3d = ctx->priv;
int ret;
outlink->w = ctx->inputs[0]->w;
outlink->h = ctx->inputs[0]->h;
outlink->time_base = ctx->inputs[0]->time_base;
if ((ret = ff_dualinput_init(ctx, &lut3d->dinput)) < 0)
return ret;
return 0;
}

View File

@ -309,6 +309,11 @@ static int config_input_overlay(AVFilterLink *inlink)
static int config_output(AVFilterLink *outlink)
{
AVFilterContext *ctx = outlink->src;
OverlayContext *s = ctx->priv;
int ret;
if ((ret = ff_dualinput_init(ctx, &s->dinput)) < 0)
return ret;
outlink->w = ctx->inputs[MAIN]->w;
outlink->h = ctx->inputs[MAIN]->h;

View File

@ -305,13 +305,17 @@ static int config_input_ref(AVFilterLink *inlink)
static int config_output(AVFilterLink *outlink)
{
AVFilterContext *ctx = outlink->src;
PSNRContext *s = ctx->priv;
AVFilterLink *mainlink = ctx->inputs[0];
int ret;
outlink->w = mainlink->w;
outlink->h = mainlink->h;
outlink->time_base = mainlink->time_base;
outlink->sample_aspect_ratio = mainlink->sample_aspect_ratio;
outlink->frame_rate = mainlink->frame_rate;
if ((ret = ff_dualinput_init(ctx, &s->dinput)) < 0)
return ret;
return 0;
}