1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-12-23 12:43:46 +02:00

lavfi: Use ff_get_video_buffer in all filters using hwframes

This commit is contained in:
Mark Thompson 2016-10-31 22:14:10 +00:00
parent 7433feb82f
commit 7e2561fa83
5 changed files with 11 additions and 31 deletions

View File

@ -433,13 +433,11 @@ static int process_frame(AVFilterContext *ctx, const AVFrame *in,
mfxStatus err;
int ret, again = 0;
out = av_frame_alloc();
if (!out)
return AVERROR(ENOMEM);
ret = av_hwframe_get_buffer(s->hw_frames_ctx, out, 0);
if (ret < 0)
out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
if (!out) {
ret = AVERROR(ENOMEM);
goto fail;
}
surf_out = (mfxFrameSurface1*)out->data[3];
surf_out->Info.CropW = outlink->w;

View File

@ -161,15 +161,10 @@ static int hwupload_filter_frame(AVFilterLink *link, AVFrame *input)
if (input->format == outlink->format)
return ff_filter_frame(outlink, input);
output = av_frame_alloc();
output = ff_get_video_buffer(outlink, outlink->w, outlink->h);
if (!output) {
err = AVERROR(ENOMEM);
goto fail;
}
err = av_hwframe_get_buffer(ctx->hwframes_ref, output, 0);
if (err < 0) {
av_log(ctx, AV_LOG_ERROR, "Failed to allocate frame to upload to.\n");
err = AVERROR(ENOMEM);
goto fail;
}

View File

@ -156,16 +156,12 @@ static int cudaupload_filter_frame(AVFilterLink *link, AVFrame *in)
AVFrame *out = NULL;
int ret;
out = av_frame_alloc();
out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
if (!out) {
ret = AVERROR(ENOMEM);
goto fail;
}
ret = av_hwframe_get_buffer(s->hwframe, out, 0);
if (ret < 0)
goto fail;
out->width = in->width;
out->height = in->height;

View File

@ -528,16 +528,12 @@ static int qsvscale_filter_frame(AVFilterLink *link, AVFrame *in)
AVFrame *out = NULL;
int ret = 0;
out = av_frame_alloc();
out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
if (!out) {
ret = AVERROR(ENOMEM);
goto fail;
}
ret = av_hwframe_get_buffer(s->out_frames_ref, out, 0);
if (ret < 0)
goto fail;
do {
err = MFXVideoVPP_RunFrameVPPAsync(s->session,
(mfxFrameSurface1*)in->data[3],

View File

@ -31,6 +31,7 @@
#include "avfilter.h"
#include "formats.h"
#include "internal.h"
#include "video.h"
typedef struct ScaleVAAPIContext {
const AVClass *class;
@ -274,19 +275,13 @@ static int scale_vaapi_filter_frame(AVFilterLink *inlink, AVFrame *input_frame)
av_log(ctx, AV_LOG_DEBUG, "Using surface %#x for scale input.\n",
input_surface);
output_frame = av_frame_alloc();
output_frame = ff_get_video_buffer(outlink, ctx->output_width,
ctx->output_height);
if (!output_frame) {
av_log(ctx, AV_LOG_ERROR, "Failed to allocate output frame.");
err = AVERROR(ENOMEM);
goto fail;
}
err = av_hwframe_get_buffer(ctx->output_frames_ref, output_frame, 0);
if (err < 0) {
av_log(ctx, AV_LOG_ERROR, "Failed to get surface for "
"output: %d\n.", err);
}
output_surface = (VASurfaceID)(uintptr_t)output_frame->data[3];
av_log(ctx, AV_LOG_DEBUG, "Using surface %#x for scale output.\n",
output_surface);