1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-12-23 12:43:46 +02:00

lavfi/framesync2: implement "activate" design.

With this helper API, filters that used the first framesync
helper API can easily be changed to use the new and more
extensible design for filters with a single activate() callback.
This commit is contained in:
Nicolas George 2017-07-17 14:36:42 +02:00
parent ed1c884b9e
commit 4e0e9ce2dc
2 changed files with 82 additions and 161 deletions

View File

@ -18,12 +18,9 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#define FF_INTERNAL_FIELDS 1
#include "framequeue.h"
#include "libavutil/avassert.h"
#include "avfilter.h"
#include "bufferqueue.h"
#include "filters.h"
#include "framesync2.h"
#include "internal.h"
@ -49,8 +46,13 @@ enum {
STATE_EOF,
};
int ff_framesync2_init(FFFrameSync *fs, void *parent, unsigned nb_in)
int ff_framesync2_init(FFFrameSync *fs, AVFilterContext *parent, unsigned nb_in)
{
/* For filters with several outputs, we will not be able to assume which
output is relevant for ff_outlink_frame_wanted() and
ff_outlink_set_status(). To be designed when needed. */
av_assert0(parent->nb_outputs == 1);
fs->class = &framesync_class;
fs->parent = parent;
fs->nb_in = nb_in;
@ -61,6 +63,13 @@ int ff_framesync2_init(FFFrameSync *fs, void *parent, unsigned nb_in)
return 0;
}
static void framesync_eof(FFFrameSync *fs)
{
fs->eof = 1;
fs->frame_ready = 0;
ff_outlink_set_status(fs->parent->outputs[0], AVERROR_EOF, AV_NOPTS_VALUE);
}
static void framesync_sync_level_update(FFFrameSync *fs)
{
unsigned i, level = 0;
@ -74,7 +83,7 @@ static void framesync_sync_level_update(FFFrameSync *fs)
if (level)
fs->sync_level = level;
else
fs->eof = 1;
framesync_eof(fs);
}
int ff_framesync2_configure(FFFrameSync *fs)
@ -144,7 +153,7 @@ static void framesync_advance(FFFrameSync *fs)
if (fs->in[i].pts_next < pts)
pts = fs->in[i].pts_next;
if (pts == INT64_MAX) {
fs->eof = 1;
framesync_eof(fs);
break;
}
for (i = 0; i < fs->nb_in; i++) {
@ -162,11 +171,9 @@ static void framesync_advance(FFFrameSync *fs)
fs->frame_ready = 1;
if (fs->in[i].state == STATE_EOF &&
fs->in[i].after == EXT_STOP)
fs->eof = 1;
framesync_eof(fs);
}
}
if (fs->eof)
fs->frame_ready = 0;
if (fs->frame_ready)
for (i = 0; i < fs->nb_in; i++)
if ((fs->in[i].state == STATE_BOF &&
@ -188,45 +195,24 @@ static void framesync_inject_frame(FFFrameSync *fs, unsigned in, AVFrame *frame)
int64_t pts;
av_assert0(!fs->in[in].have_next);
if (frame) {
av_assert0(frame);
pts = av_rescale_q(frame->pts, fs->in[in].time_base, fs->time_base);
frame->pts = pts;
} else {
pts = fs->in[in].state != STATE_RUN || fs->in[in].after == EXT_INFINITY
? INT64_MAX : framesync_pts_extrapolate(fs, in, fs->in[in].pts);
fs->in[in].sync = 0;
framesync_sync_level_update(fs);
}
fs->in[in].frame_next = frame;
fs->in[in].pts_next = pts;
fs->in[in].have_next = 1;
}
int ff_framesync2_add_frame(FFFrameSync *fs, unsigned in, AVFrame *frame)
static void framesync_inject_status(FFFrameSync *fs, unsigned in, int status, int64_t pts)
{
av_assert1(in < fs->nb_in);
if (!fs->in[in].have_next)
framesync_inject_frame(fs, in, frame);
else
ff_bufqueue_add(fs, &fs->in[in].queue, frame);
return 0;
}
void ff_framesync2_next(FFFrameSync *fs)
{
unsigned i;
av_assert0(!fs->frame_ready);
for (i = 0; i < fs->nb_in; i++)
if (!fs->in[i].have_next && fs->in[i].queue.available)
framesync_inject_frame(fs, i, ff_bufqueue_get(&fs->in[i].queue));
fs->frame_ready = 0;
framesync_advance(fs);
}
void ff_framesync2_drop(FFFrameSync *fs)
{
fs->frame_ready = 0;
av_assert0(!fs->in[in].have_next);
pts = fs->in[in].state != STATE_RUN || fs->in[in].after == EXT_INFINITY
? INT64_MAX : framesync_pts_extrapolate(fs, in, fs->in[in].pts);
fs->in[in].sync = 0;
framesync_sync_level_update(fs);
fs->in[in].frame_next = NULL;
fs->in[in].pts_next = pts;
fs->in[in].have_next = 1;
}
int ff_framesync2_get_frame(FFFrameSync *fs, unsigned in, AVFrame **rframe,
@ -273,71 +259,55 @@ void ff_framesync2_uninit(FFFrameSync *fs)
for (i = 0; i < fs->nb_in; i++) {
av_frame_free(&fs->in[i].frame);
av_frame_free(&fs->in[i].frame_next);
ff_bufqueue_discard_all(&fs->in[i].queue);
}
av_freep(&fs->in);
}
int ff_framesync2_process_frame(FFFrameSync *fs, unsigned all)
int ff_framesync2_activate(FFFrameSync *fs)
{
int ret, count = 0;
AVFilterContext *ctx = fs->parent;
AVFrame *frame = NULL;
int64_t pts;
unsigned i, nb_active, nb_miss;
int ret, status;
av_assert0(fs->on_event);
while (1) {
ff_framesync2_next(fs);
if (fs->eof || !fs->frame_ready)
break;
if ((ret = fs->on_event(fs)) < 0)
nb_active = nb_miss = 0;
for (i = 0; i < fs->nb_in; i++) {
if (fs->in[i].have_next || fs->in[i].state == STATE_EOF)
continue;
nb_active++;
ret = ff_inlink_consume_frame(ctx->inputs[i], &frame);
if (ret < 0)
return ret;
ff_framesync2_drop(fs);
count++;
if (!all)
break;
if (ret) {
av_assert0(frame);
framesync_inject_frame(fs, i, frame);
} else {
ret = ff_inlink_acknowledge_status(ctx->inputs[i], &status, &pts);
if (ret > 0) {
framesync_inject_status(fs, i, status, pts);
} else if (!ret) {
nb_miss++;
}
if (!count && fs->eof)
return AVERROR_EOF;
return count;
}
int ff_framesync2_filter_frame(FFFrameSync *fs, AVFilterLink *inlink,
AVFrame *in)
{
int ret;
if ((ret = ff_framesync2_process_frame(fs, 1)) < 0)
return ret;
if ((ret = ff_framesync2_add_frame(fs, FF_INLINK_IDX(inlink), in)) < 0)
return ret;
if ((ret = ff_framesync2_process_frame(fs, 0)) < 0)
return ret;
return 0;
}
int ff_framesync2_request_frame(FFFrameSync *fs, AVFilterLink *outlink)
{
AVFilterContext *ctx = outlink->src;
int input, ret, i;
if ((ret = ff_framesync2_process_frame(fs, 0)) < 0)
return ret;
if (ret > 0)
return 0;
if (fs->eof)
return AVERROR_EOF;
input = fs->in_request;
/* Detect status change early */
}
}
if (nb_miss) {
if (nb_miss == nb_active && !ff_outlink_frame_wanted(ctx->outputs[0]))
return FFERROR_NOT_READY;
for (i = 0; i < fs->nb_in; i++)
if (!ff_framequeue_queued_frames(&ctx->inputs[i]->fifo) &&
ctx->inputs[i]->status_in && !ctx->inputs[i]->status_out)
input = i;
ret = ff_request_frame(ctx->inputs[input]);
if (ret == AVERROR_EOF) {
if ((ret = ff_framesync2_add_frame(fs, input, NULL)) < 0)
return ret;
if ((ret = ff_framesync2_process_frame(fs, 0)) < 0)
return ret;
ret = 0;
if (!fs->in[i].have_next && fs->in[i].state != STATE_EOF)
ff_inlink_request_frame(ctx->inputs[i]);
return 0;
}
framesync_advance(fs);
if (fs->eof || !fs->frame_ready)
return 0;
ret = fs->on_event(fs);
if (ret < 0)
return ret;
fs->frame_ready = 0;
return 0;
}

View File

@ -25,7 +25,6 @@
/*
* TODO
* Callback-based API similar to dualinput.
* Export convenient options.
*/
@ -41,16 +40,9 @@
* situations where some stream extend beyond the beginning or the end of
* others can be configured.
*
* The basic working of this API is the following:
*
* - When a frame is available on any input, add it using
* ff_framesync2_add_frame().
*
* - When a frame event is ready to be processed (i.e. after adding a frame
* or when requested on input):
* - call ff_framesync2_next();
* - if fs->frame_ready is true, process the frames;
* - call ff_framesync2_drop().
* The basic working of this API is the following: set the on_event
* callback, then call ff_framesync2_activate() from the filter's activate
* callback.
*/
/**
@ -82,11 +74,6 @@ enum FFFrameSyncExtMode {
*/
typedef struct FFFrameSyncIn {
/**
* Queue of incoming AVFrame, and NULL to mark EOF
*/
struct FFBufQueue queue;
/**
* Extrapolation mode for timestamps before the first frame
*/
@ -152,7 +139,11 @@ typedef struct FFFrameSyncIn {
*/
typedef struct FFFrameSync {
const AVClass *class;
void *parent;
/**
* Parent filter context.
*/
AVFilterContext *parent;
/**
* Number of input streams
@ -213,11 +204,11 @@ typedef struct FFFrameSync {
* The entire structure is expected to be already set to 0.
*
* @param fs frame sync structure to initialize
* @param parent parent object, used for logging
* @param parent parent AVFilterContext object
* @param nb_in number of inputs
* @return >= 0 for success or a negative error code
*/
int ff_framesync2_init(FFFrameSync *fs, void *parent, unsigned nb_in);
int ff_framesync2_init(FFFrameSync *fs, AVFilterContext *parent, unsigned nb_in);
/**
* Configure a frame sync structure.
@ -233,29 +224,6 @@ int ff_framesync2_configure(FFFrameSync *fs);
*/
void ff_framesync2_uninit(FFFrameSync *fs);
/**
* Add a frame to an input
*
* Typically called from the filter_frame() method.
*
* @param fs frame sync structure
* @param in index of the input
* @param frame input frame, or NULL for EOF
*/
int ff_framesync2_add_frame(FFFrameSync *fs, unsigned in, AVFrame *frame);
/**
* Prepare the next frame event.
*
* The status of the operation can be found in fs->frame_ready and fs->eof.
*/
void ff_framesync2_next(FFFrameSync *fs);
/**
* Drop the current frame event.
*/
void ff_framesync2_drop(FFFrameSync *fs);
/**
* Get the current frame in an input.
*
@ -270,28 +238,11 @@ int ff_framesync2_get_frame(FFFrameSync *fs, unsigned in, AVFrame **rframe,
unsigned get);
/**
* Process one or several frame using the on_event callback.
* Examine the frames in the filter's input and try to produce output.
*
* @return number of frames processed or negative error code
* This function can be the complete implementation of the activate
* method of a filter using framesync2.
*/
int ff_framesync2_process_frame(FFFrameSync *fs, unsigned all);
/**
* Accept a frame on a filter input.
*
* This function can be the complete implementation of all filter_frame
* methods of a filter using framesync.
*/
int ff_framesync2_filter_frame(FFFrameSync *fs, AVFilterLink *inlink,
AVFrame *in);
/**
* Request a frame on the filter output.
*
* This function can be the complete implementation of all filter_frame
* methods of a filter using framesync if it has only one output.
*/
int ff_framesync2_request_frame(FFFrameSync *fs, AVFilterLink *outlink);
int ff_framesync2_activate(FFFrameSync *fs);
#endif /* AVFILTER_FRAMESYNC2_H */