1
0
mirror of https://github.com/FFmpeg/FFmpeg.git synced 2024-12-23 12:43:46 +02:00

Merge commit 'bd49be885e9ad6bae599c54473ba2fa2957eb140'

* commit 'bd49be885e9ad6bae599c54473ba2fa2957eb140':
  avconv_vdpau: use the hwcontext API to simplify code

Tested-by: wm4
Merged-by: Derek Buitenhuis <derek.buitenhuis@gmail.com>
This commit is contained in:
Derek Buitenhuis 2016-02-17 16:46:44 +00:00
commit 6b706ce85f
3 changed files with 91 additions and 223 deletions

View File

@ -538,7 +538,6 @@ extern int stdin_interaction;
extern int frame_bits_per_raw_sample; extern int frame_bits_per_raw_sample;
extern AVIOContext *progress_avio; extern AVIOContext *progress_avio;
extern float max_error_rate; extern float max_error_rate;
extern int vdpau_api_ver;
extern char *videotoolbox_pixfmt; extern char *videotoolbox_pixfmt;
extern const AVIOInterruptCB int_cb; extern const AVIOInterruptCB int_cb;

View File

@ -3361,9 +3361,6 @@ const OptionDef options[] = {
{ "hwaccel_device", OPT_VIDEO | OPT_STRING | HAS_ARG | OPT_EXPERT | { "hwaccel_device", OPT_VIDEO | OPT_STRING | HAS_ARG | OPT_EXPERT |
OPT_SPEC | OPT_INPUT, { .off = OFFSET(hwaccel_devices) }, OPT_SPEC | OPT_INPUT, { .off = OFFSET(hwaccel_devices) },
"select a device for HW acceleration", "devicename" }, "select a device for HW acceleration", "devicename" },
#if HAVE_VDPAU_X11
{ "vdpau_api_ver", HAS_ARG | OPT_INT | OPT_EXPERT, { &vdpau_api_ver }, "" },
#endif
#if CONFIG_VDA || CONFIG_VIDEOTOOLBOX #if CONFIG_VDA || CONFIG_VIDEOTOOLBOX
{ "videotoolbox_pixfmt", HAS_ARG | OPT_STRING | OPT_EXPERT, { &videotoolbox_pixfmt}, "" }, { "videotoolbox_pixfmt", HAS_ARG | OPT_STRING | OPT_EXPERT, { &videotoolbox_pixfmt}, "" },
#endif #endif

View File

@ -30,36 +30,31 @@
#include "libavutil/avassert.h" #include "libavutil/avassert.h"
#include "libavutil/buffer.h" #include "libavutil/buffer.h"
#include "libavutil/frame.h" #include "libavutil/frame.h"
#include "libavutil/hwcontext.h"
#include "libavutil/hwcontext_vdpau.h"
#include "libavutil/pixfmt.h" #include "libavutil/pixfmt.h"
typedef struct VDPAUContext { typedef struct VDPAUContext {
Display *dpy; AVBufferRef *hw_frames_ctx;
VdpDevice device;
VdpDecoder decoder;
VdpGetProcAddress *get_proc_address;
VdpGetErrorString *get_error_string;
VdpGetInformationString *get_information_string;
VdpDeviceDestroy *device_destroy;
#if 1 // for ffmpegs older vdpau API, not the oldest though
VdpDecoderCreate *decoder_create;
VdpDecoderDestroy *decoder_destroy;
VdpDecoderRender *decoder_render;
#endif
VdpVideoSurfaceCreate *video_surface_create;
VdpVideoSurfaceDestroy *video_surface_destroy;
VdpVideoSurfaceGetBitsYCbCr *video_surface_get_bits;
VdpVideoSurfaceGetParameters *video_surface_get_parameters;
VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *video_surface_query;
AVFrame *tmp_frame; AVFrame *tmp_frame;
enum AVPixelFormat pix_fmt;
VdpYCbCrFormat vdpau_format;
} VDPAUContext; } VDPAUContext;
int vdpau_api_ver = 2; typedef struct VDPAUHWDevicePriv {
VdpDeviceDestroy *device_destroy;
Display *dpy;
} VDPAUHWDevicePriv;
static void device_free(AVHWDeviceContext *ctx)
{
AVVDPAUDeviceContext *hwctx = ctx->hwctx;
VDPAUHWDevicePriv *priv = ctx->user_opaque;
if (priv->device_destroy)
priv->device_destroy(hwctx->device);
if (priv->dpy)
XCloseDisplay(priv->dpy);
av_freep(&priv);
}
static void vdpau_uninit(AVCodecContext *s) static void vdpau_uninit(AVCodecContext *s)
{ {
@ -70,131 +65,43 @@ static void vdpau_uninit(AVCodecContext *s)
ist->hwaccel_get_buffer = NULL; ist->hwaccel_get_buffer = NULL;
ist->hwaccel_retrieve_data = NULL; ist->hwaccel_retrieve_data = NULL;
if (ctx->decoder_destroy) av_buffer_unref(&ctx->hw_frames_ctx);
ctx->decoder_destroy(ctx->decoder);
if (ctx->device_destroy)
ctx->device_destroy(ctx->device);
if (ctx->dpy)
XCloseDisplay(ctx->dpy);
av_frame_free(&ctx->tmp_frame); av_frame_free(&ctx->tmp_frame);
av_freep(&ist->hwaccel_ctx); av_freep(&ist->hwaccel_ctx);
av_freep(&s->hwaccel_context); av_freep(&s->hwaccel_context);
} }
static void vdpau_release_buffer(void *opaque, uint8_t *data)
{
VdpVideoSurface surface = *(VdpVideoSurface*)data;
VDPAUContext *ctx = opaque;
ctx->video_surface_destroy(surface);
av_freep(&data);
}
static int vdpau_get_buffer(AVCodecContext *s, AVFrame *frame, int flags) static int vdpau_get_buffer(AVCodecContext *s, AVFrame *frame, int flags)
{ {
InputStream *ist = s->opaque; InputStream *ist = s->opaque;
VDPAUContext *ctx = ist->hwaccel_ctx; VDPAUContext *ctx = ist->hwaccel_ctx;
VdpVideoSurface *surface;
VdpStatus err;
VdpChromaType chroma;
uint32_t width, height;
av_assert0(frame->format == AV_PIX_FMT_VDPAU); return av_hwframe_get_buffer(ctx->hw_frames_ctx, frame, 0);
if (av_vdpau_get_surface_parameters(s, &chroma, &width, &height))
return AVERROR(ENOSYS);
surface = av_malloc(sizeof(*surface));
if (!surface)
return AVERROR(ENOMEM);
frame->buf[0] = av_buffer_create((uint8_t*)surface, sizeof(*surface),
vdpau_release_buffer, ctx,
AV_BUFFER_FLAG_READONLY);
if (!frame->buf[0]) {
av_freep(&surface);
return AVERROR(ENOMEM);
}
// properly we should keep a pool of surfaces instead of creating
// them anew for each frame, but since we don't care about speed
// much in this code, we don't bother
err = ctx->video_surface_create(ctx->device, chroma, width, height,
surface);
if (err != VDP_STATUS_OK) {
av_log(NULL, AV_LOG_ERROR, "Error allocating a VDPAU video surface: %s\n",
ctx->get_error_string(err));
av_buffer_unref(&frame->buf[0]);
return AVERROR_UNKNOWN;
}
frame->data[3] = (uint8_t*)(uintptr_t)*surface;
return 0;
} }
static int vdpau_retrieve_data(AVCodecContext *s, AVFrame *frame) static int vdpau_retrieve_data(AVCodecContext *s, AVFrame *frame)
{ {
VdpVideoSurface surface = (VdpVideoSurface)(uintptr_t)frame->data[3];
InputStream *ist = s->opaque; InputStream *ist = s->opaque;
VDPAUContext *ctx = ist->hwaccel_ctx; VDPAUContext *ctx = ist->hwaccel_ctx;
VdpStatus err; int ret;
int ret, chroma_type;
err = ctx->video_surface_get_parameters(surface, &chroma_type, ret = av_hwframe_transfer_data(ctx->tmp_frame, frame, 0);
&ctx->tmp_frame->width,
&ctx->tmp_frame->height);
if (err != VDP_STATUS_OK) {
av_log(NULL, AV_LOG_ERROR, "Error getting surface parameters: %s\n",
ctx->get_error_string(err));
return AVERROR_UNKNOWN;
}
ctx->tmp_frame->format = ctx->pix_fmt;
ret = av_frame_get_buffer(ctx->tmp_frame, 32);
if (ret < 0) if (ret < 0)
return ret; return ret;
ctx->tmp_frame->width = frame->width;
ctx->tmp_frame->height = frame->height;
err = ctx->video_surface_get_bits(surface, ctx->vdpau_format,
(void * const *)ctx->tmp_frame->data,
ctx->tmp_frame->linesize);
if (err != VDP_STATUS_OK) {
av_log(NULL, AV_LOG_ERROR, "Error retrieving frame data from VDPAU: %s\n",
ctx->get_error_string(err));
ret = AVERROR_UNKNOWN;
goto fail;
}
if (ctx->vdpau_format == VDP_YCBCR_FORMAT_YV12)
FFSWAP(uint8_t*, ctx->tmp_frame->data[1], ctx->tmp_frame->data[2]);
ret = av_frame_copy_props(ctx->tmp_frame, frame); ret = av_frame_copy_props(ctx->tmp_frame, frame);
if (ret < 0) if (ret < 0) {
goto fail; av_frame_unref(ctx->tmp_frame);
return ret;
}
av_frame_unref(frame); av_frame_unref(frame);
av_frame_move_ref(frame, ctx->tmp_frame); av_frame_move_ref(frame, ctx->tmp_frame);
return 0; return 0;
fail:
av_frame_unref(ctx->tmp_frame);
return ret;
} }
static const int vdpau_formats[][2] = {
{ VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV420P },
{ VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV12 },
{ VDP_YCBCR_FORMAT_YUYV, AV_PIX_FMT_YUYV422 },
{ VDP_YCBCR_FORMAT_UYVY, AV_PIX_FMT_UYVY422 },
};
static int vdpau_alloc(AVCodecContext *s) static int vdpau_alloc(AVCodecContext *s)
{ {
InputStream *ist = s->opaque; InputStream *ist = s->opaque;
@ -203,12 +110,26 @@ static int vdpau_alloc(AVCodecContext *s)
VDPAUContext *ctx; VDPAUContext *ctx;
const char *display, *vendor; const char *display, *vendor;
VdpStatus err; VdpStatus err;
int i; int ret;
VdpDevice device;
VdpGetProcAddress *get_proc_address;
VdpGetInformationString *get_information_string;
VDPAUHWDevicePriv *device_priv = NULL;
AVBufferRef *device_ref = NULL;
AVHWDeviceContext *device_ctx;
AVVDPAUDeviceContext *device_hwctx;
AVHWFramesContext *frames_ctx;
ctx = av_mallocz(sizeof(*ctx)); ctx = av_mallocz(sizeof(*ctx));
if (!ctx) if (!ctx)
return AVERROR(ENOMEM); return AVERROR(ENOMEM);
device_priv = av_mallocz(sizeof(*device_priv));
if (!device_priv)
goto fail;
ist->hwaccel_ctx = ctx; ist->hwaccel_ctx = ctx;
ist->hwaccel_uninit = vdpau_uninit; ist->hwaccel_uninit = vdpau_uninit;
ist->hwaccel_get_buffer = vdpau_get_buffer; ist->hwaccel_get_buffer = vdpau_get_buffer;
@ -218,16 +139,16 @@ static int vdpau_alloc(AVCodecContext *s)
if (!ctx->tmp_frame) if (!ctx->tmp_frame)
goto fail; goto fail;
ctx->dpy = XOpenDisplay(ist->hwaccel_device); device_priv->dpy = XOpenDisplay(ist->hwaccel_device);
if (!ctx->dpy) { if (!device_priv->dpy) {
av_log(NULL, loglevel, "Cannot open the X11 display %s.\n", av_log(NULL, loglevel, "Cannot open the X11 display %s.\n",
XDisplayName(ist->hwaccel_device)); XDisplayName(ist->hwaccel_device));
goto fail; goto fail;
} }
display = XDisplayString(ctx->dpy); display = XDisplayString(device_priv->dpy);
err = vdp_device_create_x11(ctx->dpy, XDefaultScreen(ctx->dpy), &ctx->device, err = vdp_device_create_x11(device_priv->dpy, XDefaultScreen(device_priv->dpy),
&ctx->get_proc_address); &device, &get_proc_address);
if (err != VDP_STATUS_OK) { if (err != VDP_STATUS_OK) {
av_log(NULL, loglevel, "VDPAU device creation on X11 display %s failed.\n", av_log(NULL, loglevel, "VDPAU device creation on X11 display %s failed.\n",
display); display);
@ -237,63 +158,52 @@ static int vdpau_alloc(AVCodecContext *s)
#define GET_CALLBACK(id, result) \ #define GET_CALLBACK(id, result) \
do { \ do { \
void *tmp; \ void *tmp; \
err = ctx->get_proc_address(ctx->device, id, &tmp); \ err = get_proc_address(device, id, &tmp); \
if (err != VDP_STATUS_OK) { \ if (err != VDP_STATUS_OK) { \
av_log(NULL, loglevel, "Error getting the " #id " callback.\n"); \ av_log(NULL, loglevel, "Error getting the " #id " callback.\n"); \
goto fail; \ goto fail; \
} \ } \
ctx->result = tmp; \ result = tmp; \
} while (0) } while (0)
GET_CALLBACK(VDP_FUNC_ID_GET_ERROR_STRING, get_error_string); GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string);
GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string); GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY, device_priv->device_destroy);
GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY, device_destroy);
if (vdpau_api_ver == 1) {
GET_CALLBACK(VDP_FUNC_ID_DECODER_CREATE, decoder_create);
GET_CALLBACK(VDP_FUNC_ID_DECODER_DESTROY, decoder_destroy);
GET_CALLBACK(VDP_FUNC_ID_DECODER_RENDER, decoder_render);
}
GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_CREATE, video_surface_create);
GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_DESTROY, video_surface_destroy);
GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR, video_surface_get_bits);
GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_PARAMETERS, video_surface_get_parameters);
GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
video_surface_query);
for (i = 0; i < FF_ARRAY_ELEMS(vdpau_formats); i++) { device_ref = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_VDPAU);
VdpBool supported; if (!device_ref)
err = ctx->video_surface_query(ctx->device, VDP_CHROMA_TYPE_420, goto fail;
vdpau_formats[i][0], &supported); device_ctx = (AVHWDeviceContext*)device_ref->data;
if (err != VDP_STATUS_OK) { device_hwctx = device_ctx->hwctx;
av_log(NULL, loglevel, device_ctx->user_opaque = device_priv;
"Error querying VDPAU surface capabilities: %s\n", device_ctx->free = device_free;
ctx->get_error_string(err)); device_hwctx->device = device;
goto fail; device_hwctx->get_proc_address = get_proc_address;
}
if (supported)
break;
}
if (i == FF_ARRAY_ELEMS(vdpau_formats)) {
av_log(NULL, loglevel,
"No supported VDPAU format for retrieving the data.\n");
return AVERROR(EINVAL);
}
ctx->vdpau_format = vdpau_formats[i][0];
ctx->pix_fmt = vdpau_formats[i][1];
if (vdpau_api_ver == 1) { device_priv = NULL;
vdpau_ctx = av_vdpau_alloc_context();
if (!vdpau_ctx)
goto fail;
vdpau_ctx->render = ctx->decoder_render;
s->hwaccel_context = vdpau_ctx; ret = av_hwdevice_ctx_init(device_ref);
} else if (ret < 0)
if (av_vdpau_bind_context(s, ctx->device, ctx->get_proc_address,
AV_HWACCEL_FLAG_IGNORE_LEVEL))
goto fail; goto fail;
ctx->get_information_string(&vendor); ctx->hw_frames_ctx = av_hwframe_ctx_alloc(device_ref);
if (!ctx->hw_frames_ctx)
goto fail;
av_buffer_unref(&device_ref);
frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
frames_ctx->format = AV_PIX_FMT_VDPAU;
frames_ctx->sw_format = s->sw_pix_fmt;
frames_ctx->width = s->coded_width;
frames_ctx->height = s->coded_height;
ret = av_hwframe_ctx_init(ctx->hw_frames_ctx);
if (ret < 0)
goto fail;
if (av_vdpau_bind_context(s, device, get_proc_address, 0))
goto fail;
get_information_string(&vendor);
av_log(NULL, AV_LOG_VERBOSE, "Using VDPAU -- %s -- on X11 display %s, " av_log(NULL, AV_LOG_VERBOSE, "Using VDPAU -- %s -- on X11 display %s, "
"to decode input stream #%d:%d.\n", vendor, "to decode input stream #%d:%d.\n", vendor,
display, ist->file_index, ist->st->index); display, ist->file_index, ist->st->index);
@ -303,60 +213,22 @@ do {
fail: fail:
av_log(NULL, loglevel, "VDPAU init failed for stream #%d:%d.\n", av_log(NULL, loglevel, "VDPAU init failed for stream #%d:%d.\n",
ist->file_index, ist->st->index); ist->file_index, ist->st->index);
if (device_priv) {
if (device_priv->device_destroy)
device_priv->device_destroy(device);
if (device_priv->dpy)
XCloseDisplay(device_priv->dpy);
}
av_freep(&device_priv);
av_buffer_unref(&device_ref);
vdpau_uninit(s); vdpau_uninit(s);
return AVERROR(EINVAL); return AVERROR(EINVAL);
} }
static int vdpau_old_init(AVCodecContext *s)
{
InputStream *ist = s->opaque;
int loglevel = (ist->hwaccel_id == HWACCEL_AUTO) ? AV_LOG_VERBOSE : AV_LOG_ERROR;
AVVDPAUContext *vdpau_ctx;
VDPAUContext *ctx;
VdpStatus err;
int profile, ret;
if (!ist->hwaccel_ctx) {
ret = vdpau_alloc(s);
if (ret < 0)
return ret;
}
ctx = ist->hwaccel_ctx;
vdpau_ctx = s->hwaccel_context;
ret = av_vdpau_get_profile(s, &profile);
if (ret < 0) {
av_log(NULL, loglevel, "No known VDPAU decoder profile for this stream.\n");
return AVERROR(EINVAL);
}
if (ctx->decoder)
ctx->decoder_destroy(ctx->decoder);
err = ctx->decoder_create(ctx->device, profile,
s->coded_width, s->coded_height,
16, &ctx->decoder);
if (err != VDP_STATUS_OK) {
av_log(NULL, loglevel, "Error creating the VDPAU decoder: %s\n",
ctx->get_error_string(err));
return AVERROR_UNKNOWN;
}
vdpau_ctx->decoder = ctx->decoder;
ist->hwaccel_get_buffer = vdpau_get_buffer;
ist->hwaccel_retrieve_data = vdpau_retrieve_data;
return 0;
}
int vdpau_init(AVCodecContext *s) int vdpau_init(AVCodecContext *s)
{ {
InputStream *ist = s->opaque; InputStream *ist = s->opaque;
if (vdpau_api_ver == 1)
return vdpau_old_init(s);
if (!ist->hwaccel_ctx) { if (!ist->hwaccel_ctx) {
int ret = vdpau_alloc(s); int ret = vdpau_alloc(s);
if (ret < 0) if (ret < 0)